problem_id
stringlengths 18
22
| source
stringclasses 1
value | task_type
stringclasses 1
value | in_source_id
stringlengths 13
58
| prompt
stringlengths 1.1k
10.2k
| golden_diff
stringlengths 151
4.94k
| verification_info
stringlengths 582
21k
| num_tokens
int64 271
2.05k
| num_tokens_diff
int64 47
1.02k
|
---|---|---|---|---|---|---|---|---|
gh_patches_debug_7175 | rasdani/github-patches | git_diff | RedHatInsights__insights-core-2743 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove defunct entry_points
These scripts no longer exist. We should remove the entry_points.
* [insights.tools.generate_api_config](https://github.com/RedHatInsights/insights-core/blob/master/setup.py#L23)
* [insights.tools.perf](https://github.com/RedHatInsights/insights-core/blob/master/setup.py#L24)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 import os
2 import sys
3 from setuptools import setup, find_packages
4
5 __here__ = os.path.dirname(os.path.abspath(__file__))
6
7 package_info = dict.fromkeys(["RELEASE", "COMMIT", "VERSION", "NAME"])
8
9 for name in package_info:
10 with open(os.path.join(__here__, "insights", name)) as f:
11 package_info[name] = f.read().strip()
12
13 entry_points = {
14 'console_scripts': [
15 'insights-collect = insights.collect:main',
16 'insights-run = insights:main',
17 'insights = insights.command_parser:main',
18 'insights-cat = insights.tools.cat:main',
19 'insights-dupkeycheck = insights.tools.dupkeycheck:main',
20 'insights-inspect = insights.tools.insights_inspect:main',
21 'insights-info = insights.tools.query:main',
22 'insights-ocpshell= insights.ocpshell:main',
23 'gen_api = insights.tools.generate_api_config:main',
24 'insights-perf = insights.tools.perf:main',
25 'client = insights.client:run',
26 'mangle = insights.util.mangle:main'
27 ]
28 }
29
30 runtime = set([
31 'six',
32 'requests',
33 'redis',
34 'cachecontrol',
35 'cachecontrol[redis]',
36 'cachecontrol[filecache]',
37 'defusedxml',
38 'lockfile',
39 'jinja2',
40 ])
41
42 if (sys.version_info < (2, 7)):
43 runtime.add('pyyaml>=3.10,<=3.13')
44 else:
45 runtime.add('pyyaml')
46
47
48 def maybe_require(pkg):
49 try:
50 __import__(pkg)
51 except ImportError:
52 runtime.add(pkg)
53
54
55 maybe_require("importlib")
56 maybe_require("argparse")
57
58
59 client = set([
60 'requests'
61 ])
62
63 develop = set([
64 'futures==3.0.5',
65 'wheel',
66 ])
67
68 docs = set([
69 'Sphinx<=3.0.2',
70 'nbsphinx',
71 'sphinx_rtd_theme',
72 'ipython',
73 'colorama',
74 'jinja2',
75 'Pygments'
76 ])
77
78 testing = set([
79 'coverage==4.3.4',
80 'pytest==3.0.6',
81 'pytest-cov==2.4.0',
82 'mock==2.0.0',
83 ])
84
85 cluster = set([
86 'ansible',
87 'pandas',
88 'colorama',
89 ])
90
91 openshift = set([
92 'openshift'
93 ])
94
95 linting = set([
96 'flake8==2.6.2',
97 ])
98
99 optional = set([
100 'python-cjson',
101 'python-logstash',
102 'python-statsd',
103 'watchdog',
104 ])
105
106 if __name__ == "__main__":
107 # allows for runtime modification of rpm name
108 name = os.environ.get("INSIGHTS_CORE_NAME", package_info["NAME"])
109
110 setup(
111 name=name,
112 version=package_info["VERSION"],
113 description="Insights Core is a data collection and analysis framework",
114 long_description=open("README.rst").read(),
115 url="https://github.com/redhatinsights/insights-core",
116 author="Red Hat, Inc.",
117 author_email="[email protected]",
118 packages=find_packages(),
119 install_requires=list(runtime),
120 package_data={'': ['LICENSE']},
121 license='Apache 2.0',
122 extras_require={
123 'develop': list(runtime | develop | client | docs | linting | testing | cluster),
124 'develop26': list(runtime | develop | client | linting | testing | cluster),
125 'client': list(runtime | client),
126 'client-develop': list(runtime | develop | client | linting | testing),
127 'cluster': list(runtime | cluster),
128 'openshift': list(runtime | openshift),
129 'optional': list(optional),
130 'docs': list(docs),
131 'linting': list(linting | client),
132 'testing': list(testing | client)
133 },
134 classifiers=[
135 'Development Status :: 5 - Production/Stable',
136 'Intended Audience :: Developers',
137 'Natural Language :: English',
138 'License :: OSI Approved :: Apache Software License',
139 'Programming Language :: Python',
140 'Programming Language :: Python :: 2.6',
141 'Programming Language :: Python :: 2.7',
142 'Programming Language :: Python :: 3.3',
143 'Programming Language :: Python :: 3.4',
144 'Programming Language :: Python :: 3.5',
145 'Programming Language :: Python :: 3.6'
146 ],
147 entry_points=entry_points,
148 include_package_data=True
149 )
150
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -20,8 +20,6 @@
'insights-inspect = insights.tools.insights_inspect:main',
'insights-info = insights.tools.query:main',
'insights-ocpshell= insights.ocpshell:main',
- 'gen_api = insights.tools.generate_api_config:main',
- 'insights-perf = insights.tools.perf:main',
'client = insights.client:run',
'mangle = insights.util.mangle:main'
]
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -20,8 +20,6 @@\n 'insights-inspect = insights.tools.insights_inspect:main',\n 'insights-info = insights.tools.query:main',\n 'insights-ocpshell= insights.ocpshell:main',\n- 'gen_api = insights.tools.generate_api_config:main',\n- 'insights-perf = insights.tools.perf:main',\n 'client = insights.client:run',\n 'mangle = insights.util.mangle:main'\n ]\n", "issue": "Remove defunct entry_points\nThese scripts no longer exist. We should remove the entry_points.\r\n\r\n* [insights.tools.generate_api_config](https://github.com/RedHatInsights/insights-core/blob/master/setup.py#L23)\r\n* [insights.tools.perf](https://github.com/RedHatInsights/insights-core/blob/master/setup.py#L24)\n", "before_files": [{"content": "import os\nimport sys\nfrom setuptools import setup, find_packages\n\n__here__ = os.path.dirname(os.path.abspath(__file__))\n\npackage_info = dict.fromkeys([\"RELEASE\", \"COMMIT\", \"VERSION\", \"NAME\"])\n\nfor name in package_info:\n with open(os.path.join(__here__, \"insights\", name)) as f:\n package_info[name] = f.read().strip()\n\nentry_points = {\n 'console_scripts': [\n 'insights-collect = insights.collect:main',\n 'insights-run = insights:main',\n 'insights = insights.command_parser:main',\n 'insights-cat = insights.tools.cat:main',\n 'insights-dupkeycheck = insights.tools.dupkeycheck:main',\n 'insights-inspect = insights.tools.insights_inspect:main',\n 'insights-info = insights.tools.query:main',\n 'insights-ocpshell= insights.ocpshell:main',\n 'gen_api = insights.tools.generate_api_config:main',\n 'insights-perf = insights.tools.perf:main',\n 'client = insights.client:run',\n 'mangle = insights.util.mangle:main'\n ]\n}\n\nruntime = set([\n 'six',\n 'requests',\n 'redis',\n 'cachecontrol',\n 'cachecontrol[redis]',\n 'cachecontrol[filecache]',\n 'defusedxml',\n 'lockfile',\n 'jinja2',\n])\n\nif (sys.version_info < (2, 7)):\n runtime.add('pyyaml>=3.10,<=3.13')\nelse:\n runtime.add('pyyaml')\n\n\ndef maybe_require(pkg):\n try:\n __import__(pkg)\n except ImportError:\n runtime.add(pkg)\n\n\nmaybe_require(\"importlib\")\nmaybe_require(\"argparse\")\n\n\nclient = set([\n 'requests'\n])\n\ndevelop = set([\n 'futures==3.0.5',\n 'wheel',\n])\n\ndocs = set([\n 'Sphinx<=3.0.2',\n 'nbsphinx',\n 'sphinx_rtd_theme',\n 'ipython',\n 'colorama',\n 'jinja2',\n 'Pygments'\n])\n\ntesting = set([\n 'coverage==4.3.4',\n 'pytest==3.0.6',\n 'pytest-cov==2.4.0',\n 'mock==2.0.0',\n])\n\ncluster = set([\n 'ansible',\n 'pandas',\n 'colorama',\n])\n\nopenshift = set([\n 'openshift'\n])\n\nlinting = set([\n 'flake8==2.6.2',\n])\n\noptional = set([\n 'python-cjson',\n 'python-logstash',\n 'python-statsd',\n 'watchdog',\n])\n\nif __name__ == \"__main__\":\n # allows for runtime modification of rpm name\n name = os.environ.get(\"INSIGHTS_CORE_NAME\", package_info[\"NAME\"])\n\n setup(\n name=name,\n version=package_info[\"VERSION\"],\n description=\"Insights Core is a data collection and analysis framework\",\n long_description=open(\"README.rst\").read(),\n url=\"https://github.com/redhatinsights/insights-core\",\n author=\"Red Hat, Inc.\",\n author_email=\"[email protected]\",\n packages=find_packages(),\n install_requires=list(runtime),\n package_data={'': ['LICENSE']},\n license='Apache 2.0',\n extras_require={\n 'develop': list(runtime | develop | client | docs | linting | testing | cluster),\n 'develop26': list(runtime | develop | client | linting | testing | cluster),\n 'client': list(runtime | client),\n 'client-develop': list(runtime | develop | client | linting | testing),\n 'cluster': list(runtime | cluster),\n 'openshift': list(runtime | openshift),\n 'optional': list(optional),\n 'docs': list(docs),\n 'linting': list(linting | client),\n 'testing': list(testing | client)\n },\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'Natural Language :: English',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6'\n ],\n entry_points=entry_points,\n include_package_data=True\n )\n", "path": "setup.py"}], "after_files": [{"content": "import os\nimport sys\nfrom setuptools import setup, find_packages\n\n__here__ = os.path.dirname(os.path.abspath(__file__))\n\npackage_info = dict.fromkeys([\"RELEASE\", \"COMMIT\", \"VERSION\", \"NAME\"])\n\nfor name in package_info:\n with open(os.path.join(__here__, \"insights\", name)) as f:\n package_info[name] = f.read().strip()\n\nentry_points = {\n 'console_scripts': [\n 'insights-collect = insights.collect:main',\n 'insights-run = insights:main',\n 'insights = insights.command_parser:main',\n 'insights-cat = insights.tools.cat:main',\n 'insights-dupkeycheck = insights.tools.dupkeycheck:main',\n 'insights-inspect = insights.tools.insights_inspect:main',\n 'insights-info = insights.tools.query:main',\n 'insights-ocpshell= insights.ocpshell:main',\n 'client = insights.client:run',\n 'mangle = insights.util.mangle:main'\n ]\n}\n\nruntime = set([\n 'six',\n 'requests',\n 'redis',\n 'cachecontrol',\n 'cachecontrol[redis]',\n 'cachecontrol[filecache]',\n 'defusedxml',\n 'lockfile',\n 'jinja2',\n])\n\nif (sys.version_info < (2, 7)):\n runtime.add('pyyaml>=3.10,<=3.13')\nelse:\n runtime.add('pyyaml')\n\n\ndef maybe_require(pkg):\n try:\n __import__(pkg)\n except ImportError:\n runtime.add(pkg)\n\n\nmaybe_require(\"importlib\")\nmaybe_require(\"argparse\")\n\n\nclient = set([\n 'requests'\n])\n\ndevelop = set([\n 'futures==3.0.5',\n 'wheel',\n])\n\ndocs = set([\n 'Sphinx<=3.0.2',\n 'nbsphinx',\n 'sphinx_rtd_theme',\n 'ipython',\n 'colorama',\n 'jinja2',\n 'Pygments'\n])\n\ntesting = set([\n 'coverage==4.3.4',\n 'pytest==3.0.6',\n 'pytest-cov==2.4.0',\n 'mock==2.0.0',\n])\n\ncluster = set([\n 'ansible',\n 'pandas',\n 'colorama',\n])\n\nopenshift = set([\n 'openshift'\n])\n\nlinting = set([\n 'flake8==2.6.2',\n])\n\noptional = set([\n 'python-cjson',\n 'python-logstash',\n 'python-statsd',\n 'watchdog',\n])\n\nif __name__ == \"__main__\":\n # allows for runtime modification of rpm name\n name = os.environ.get(\"INSIGHTS_CORE_NAME\", package_info[\"NAME\"])\n\n setup(\n name=name,\n version=package_info[\"VERSION\"],\n description=\"Insights Core is a data collection and analysis framework\",\n long_description=open(\"README.rst\").read(),\n url=\"https://github.com/redhatinsights/insights-core\",\n author=\"Red Hat, Inc.\",\n author_email=\"[email protected]\",\n packages=find_packages(),\n install_requires=list(runtime),\n package_data={'': ['LICENSE']},\n license='Apache 2.0',\n extras_require={\n 'develop': list(runtime | develop | client | docs | linting | testing | cluster),\n 'develop26': list(runtime | develop | client | linting | testing | cluster),\n 'client': list(runtime | client),\n 'client-develop': list(runtime | develop | client | linting | testing),\n 'cluster': list(runtime | cluster),\n 'openshift': list(runtime | openshift),\n 'optional': list(optional),\n 'docs': list(docs),\n 'linting': list(linting | client),\n 'testing': list(testing | client)\n },\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'Natural Language :: English',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6'\n ],\n entry_points=entry_points,\n include_package_data=True\n )\n", "path": "setup.py"}]} | 1,680 | 128 |
gh_patches_debug_26911 | rasdani/github-patches | git_diff | pyca__cryptography-1424 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Need zope.interface.verify.verifyObject for ABCs.
Every object that claims to provide some interface should then use it in a unit test.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `cryptography/utils.py`
Content:
```
1 # Licensed under the Apache License, Version 2.0 (the "License");
2 # you may not use this file except in compliance with the License.
3 # You may obtain a copy of the License at
4 #
5 # http://www.apache.org/licenses/LICENSE-2.0
6 #
7 # Unless required by applicable law or agreed to in writing, software
8 # distributed under the License is distributed on an "AS IS" BASIS,
9 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
10 # implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
13
14 from __future__ import absolute_import, division, print_function
15
16 import sys
17
18
19 DeprecatedIn06 = DeprecationWarning
20
21
22 def register_interface(iface):
23 def register_decorator(klass):
24 iface.register(klass)
25 return klass
26 return register_decorator
27
28
29 def read_only_property(name):
30 return property(lambda self: getattr(self, name))
31
32
33 def bit_length(x):
34 if sys.version_info >= (2, 7):
35 return x.bit_length()
36 else:
37 return len(bin(x)) - (2 + (x <= 0))
38
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/cryptography/utils.py b/cryptography/utils.py
--- a/cryptography/utils.py
+++ b/cryptography/utils.py
@@ -13,6 +13,8 @@
from __future__ import absolute_import, division, print_function
+import abc
+import inspect
import sys
@@ -21,6 +23,7 @@
def register_interface(iface):
def register_decorator(klass):
+ verify_interface(iface, klass)
iface.register(klass)
return klass
return register_decorator
@@ -30,6 +33,30 @@
return property(lambda self: getattr(self, name))
+class InterfaceNotImplemented(Exception):
+ pass
+
+
+def verify_interface(iface, klass):
+ for method in iface.__abstractmethods__:
+ if not hasattr(klass, method):
+ raise InterfaceNotImplemented(
+ "{0} is missing a {1!r} method".format(klass, method)
+ )
+ if isinstance(getattr(iface, method), abc.abstractproperty):
+ # Can't properly verify these yet.
+ continue
+ spec = inspect.getargspec(getattr(iface, method))
+ actual = inspect.getargspec(getattr(klass, method))
+ if spec != actual:
+ raise InterfaceNotImplemented(
+ "{0}.{1}'s signature differs from the expected. Expected: "
+ "{2!r}. Received: {3!r}".format(
+ klass, method, spec, actual
+ )
+ )
+
+
def bit_length(x):
if sys.version_info >= (2, 7):
return x.bit_length()
| {"golden_diff": "diff --git a/cryptography/utils.py b/cryptography/utils.py\n--- a/cryptography/utils.py\n+++ b/cryptography/utils.py\n@@ -13,6 +13,8 @@\n \n from __future__ import absolute_import, division, print_function\n \n+import abc\n+import inspect\n import sys\n \n \n@@ -21,6 +23,7 @@\n \n def register_interface(iface):\n def register_decorator(klass):\n+ verify_interface(iface, klass)\n iface.register(klass)\n return klass\n return register_decorator\n@@ -30,6 +33,30 @@\n return property(lambda self: getattr(self, name))\n \n \n+class InterfaceNotImplemented(Exception):\n+ pass\n+\n+\n+def verify_interface(iface, klass):\n+ for method in iface.__abstractmethods__:\n+ if not hasattr(klass, method):\n+ raise InterfaceNotImplemented(\n+ \"{0} is missing a {1!r} method\".format(klass, method)\n+ )\n+ if isinstance(getattr(iface, method), abc.abstractproperty):\n+ # Can't properly verify these yet.\n+ continue\n+ spec = inspect.getargspec(getattr(iface, method))\n+ actual = inspect.getargspec(getattr(klass, method))\n+ if spec != actual:\n+ raise InterfaceNotImplemented(\n+ \"{0}.{1}'s signature differs from the expected. Expected: \"\n+ \"{2!r}. Received: {3!r}\".format(\n+ klass, method, spec, actual\n+ )\n+ )\n+\n+\n def bit_length(x):\n if sys.version_info >= (2, 7):\n return x.bit_length()\n", "issue": "Need zope.interface.verify.verifyObject for ABCs.\nEvery object that claims to provide some interface should then use it in a unit test.\n\n", "before_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n# implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import, division, print_function\n\nimport sys\n\n\nDeprecatedIn06 = DeprecationWarning\n\n\ndef register_interface(iface):\n def register_decorator(klass):\n iface.register(klass)\n return klass\n return register_decorator\n\n\ndef read_only_property(name):\n return property(lambda self: getattr(self, name))\n\n\ndef bit_length(x):\n if sys.version_info >= (2, 7):\n return x.bit_length()\n else:\n return len(bin(x)) - (2 + (x <= 0))\n", "path": "cryptography/utils.py"}], "after_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n# implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import, division, print_function\n\nimport abc\nimport inspect\nimport sys\n\n\nDeprecatedIn06 = DeprecationWarning\n\n\ndef register_interface(iface):\n def register_decorator(klass):\n verify_interface(iface, klass)\n iface.register(klass)\n return klass\n return register_decorator\n\n\ndef read_only_property(name):\n return property(lambda self: getattr(self, name))\n\n\nclass InterfaceNotImplemented(Exception):\n pass\n\n\ndef verify_interface(iface, klass):\n for method in iface.__abstractmethods__:\n if not hasattr(klass, method):\n raise InterfaceNotImplemented(\n \"{0} is missing a {1!r} method\".format(klass, method)\n )\n if isinstance(getattr(iface, method), abc.abstractproperty):\n # Can't properly verify these yet.\n continue\n spec = inspect.getargspec(getattr(iface, method))\n actual = inspect.getargspec(getattr(klass, method))\n if spec != actual:\n raise InterfaceNotImplemented(\n \"{0}.{1}'s signature differs from the expected. Expected: \"\n \"{2!r}. Received: {3!r}\".format(\n klass, method, spec, actual\n )\n )\n\n\ndef bit_length(x):\n if sys.version_info >= (2, 7):\n return x.bit_length()\n else:\n return len(bin(x)) - (2 + (x <= 0))\n", "path": "cryptography/utils.py"}]} | 595 | 357 |
gh_patches_debug_3588 | rasdani/github-patches | git_diff | akvo__akvo-rsr-3753 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Show only relevant updates in typeahead on Akvo pages
Currently, all updates can be searched for on partner site updates typeahead.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `akvo/rest/views/typeahead.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 """Akvo RSR is covered by the GNU Affero General Public License.
4 See more details in the license.txt file located at the root folder of the
5 Akvo RSR module. For additional details on the GNU license please
6 see < http://www.gnu.org/licenses/agpl.html >.
7 """
8
9 from django.conf import settings
10 from rest_framework.decorators import api_view
11 from rest_framework.response import Response
12
13 from akvo.codelists.models import Country, Version
14 from akvo.rest.serializers import (TypeaheadCountrySerializer,
15 TypeaheadOrganisationSerializer,
16 TypeaheadProjectSerializer,
17 TypeaheadProjectUpdateSerializer,
18 TypeaheadKeywordSerializer,)
19 from akvo.rsr.models import Organisation, Project, ProjectUpdate
20 from akvo.rsr.views.project import _project_directory_coll
21
22
23 def rejig(queryset, serializer):
24 """Rearrange & add queryset count to the response data."""
25 return {
26 'count': queryset.count(),
27 'results': serializer.data
28 }
29
30
31 @api_view(['GET'])
32 def typeahead_country(request):
33 iati_version = Version.objects.get(code=settings.IATI_VERSION)
34 countries = Country.objects.filter(version=iati_version)
35 return Response(
36 rejig(countries, TypeaheadCountrySerializer(countries, many=True))
37 )
38
39
40 @api_view(['GET'])
41 def typeahead_organisation(request):
42 page = request.rsr_page
43 if request.GET.get('partners', '0') == '1' and page:
44 organisations = page.partners()
45 else:
46 # Project editor - all organizations
47 organisations = Organisation.objects.all()
48
49 organisations = organisations.values('id', 'name', 'long_name')
50
51 return Response(
52 rejig(organisations, TypeaheadOrganisationSerializer(organisations,
53 many=True))
54 )
55
56
57 @api_view(['GET'])
58 def typeahead_user_organisations(request):
59 user = request.user
60 is_admin = user.is_active and (user.is_superuser or user.is_admin)
61 organisations = user.approved_organisations() if not is_admin else Organisation.objects.all()
62 return Response(
63 rejig(organisations, TypeaheadOrganisationSerializer(organisations,
64 many=True))
65 )
66
67
68 @api_view(['GET'])
69 def typeahead_keyword(request):
70 page = request.rsr_page
71 keywords = page.keywords.all() if page else None
72 if keywords:
73 return Response(
74 rejig(keywords, TypeaheadKeywordSerializer(keywords, many=True))
75 )
76 # No keywords on rsr.akvo.org
77 return Response({})
78
79
80 @api_view(['GET'])
81 def typeahead_project(request):
82 """Return the typeaheads for projects.
83
84 Without any query parameters, it returns the info for all the projects in
85 the current context -- changes depending on whether we are on a partner
86 site, or the RSR site.
87
88 If a published query parameter is passed, only projects that have been
89 published are returned.
90
91 NOTE: The unauthenticated user gets information about all the projects when
92 using this API endpoint. More permission checking will need to be added,
93 if the amount of data being returned is changed.
94
95 """
96 if request.GET.get('published', '0') == '0':
97 # Project editor - organization projects, all
98 page = request.rsr_page
99 projects = page.all_projects() if page else Project.objects.all()
100 else:
101 # Search bar - organization projects, published
102 projects = _project_directory_coll(request)
103
104 projects = projects.exclude(title='')
105 return Response(
106 rejig(projects, TypeaheadProjectSerializer(projects, many=True))
107 )
108
109
110 @api_view(['GET'])
111 def typeahead_user_projects(request):
112 user = request.user
113 is_admin = user.is_active and (user.is_superuser or user.is_admin)
114 if is_admin:
115 projects = Project.objects.all()
116 else:
117 projects = user.approved_organisations().all_projects()
118 projects = projects.exclude(title='')
119 return Response(
120 rejig(projects, TypeaheadProjectSerializer(projects, many=True))
121 )
122
123
124 @api_view(['GET'])
125 def typeahead_impact_projects(request):
126 user = request.user
127 projects = Project.objects.all() if user.is_admin or user.is_superuser else user.my_projects()
128 projects = projects.published().filter(is_impact_project=True).order_by('title')
129
130 return Response(
131 rejig(projects, TypeaheadProjectSerializer(projects, many=True))
132 )
133
134
135 @api_view(['GET'])
136 def typeahead_projectupdate(request):
137 updates = ProjectUpdate.objects.all()
138 return Response(
139 rejig(updates, TypeaheadProjectUpdateSerializer(updates, many=True))
140 )
141
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/akvo/rest/views/typeahead.py b/akvo/rest/views/typeahead.py
--- a/akvo/rest/views/typeahead.py
+++ b/akvo/rest/views/typeahead.py
@@ -134,7 +134,8 @@
@api_view(['GET'])
def typeahead_projectupdate(request):
- updates = ProjectUpdate.objects.all()
+ page = request.rsr_page
+ updates = page.updates() if page else ProjectUpdate.objects.all()
return Response(
rejig(updates, TypeaheadProjectUpdateSerializer(updates, many=True))
)
| {"golden_diff": "diff --git a/akvo/rest/views/typeahead.py b/akvo/rest/views/typeahead.py\n--- a/akvo/rest/views/typeahead.py\n+++ b/akvo/rest/views/typeahead.py\n@@ -134,7 +134,8 @@\n \n @api_view(['GET'])\n def typeahead_projectupdate(request):\n- updates = ProjectUpdate.objects.all()\n+ page = request.rsr_page\n+ updates = page.updates() if page else ProjectUpdate.objects.all()\n return Response(\n rejig(updates, TypeaheadProjectUpdateSerializer(updates, many=True))\n )\n", "issue": "Show only relevant updates in typeahead on Akvo pages\nCurrently, all updates can be searched for on partner site updates typeahead. \n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n\"\"\"Akvo RSR is covered by the GNU Affero General Public License.\nSee more details in the license.txt file located at the root folder of the\nAkvo RSR module. For additional details on the GNU license please\nsee < http://www.gnu.org/licenses/agpl.html >.\n\"\"\"\n\nfrom django.conf import settings\nfrom rest_framework.decorators import api_view\nfrom rest_framework.response import Response\n\nfrom akvo.codelists.models import Country, Version\nfrom akvo.rest.serializers import (TypeaheadCountrySerializer,\n TypeaheadOrganisationSerializer,\n TypeaheadProjectSerializer,\n TypeaheadProjectUpdateSerializer,\n TypeaheadKeywordSerializer,)\nfrom akvo.rsr.models import Organisation, Project, ProjectUpdate\nfrom akvo.rsr.views.project import _project_directory_coll\n\n\ndef rejig(queryset, serializer):\n \"\"\"Rearrange & add queryset count to the response data.\"\"\"\n return {\n 'count': queryset.count(),\n 'results': serializer.data\n }\n\n\n@api_view(['GET'])\ndef typeahead_country(request):\n iati_version = Version.objects.get(code=settings.IATI_VERSION)\n countries = Country.objects.filter(version=iati_version)\n return Response(\n rejig(countries, TypeaheadCountrySerializer(countries, many=True))\n )\n\n\n@api_view(['GET'])\ndef typeahead_organisation(request):\n page = request.rsr_page\n if request.GET.get('partners', '0') == '1' and page:\n organisations = page.partners()\n else:\n # Project editor - all organizations\n organisations = Organisation.objects.all()\n\n organisations = organisations.values('id', 'name', 'long_name')\n\n return Response(\n rejig(organisations, TypeaheadOrganisationSerializer(organisations,\n many=True))\n )\n\n\n@api_view(['GET'])\ndef typeahead_user_organisations(request):\n user = request.user\n is_admin = user.is_active and (user.is_superuser or user.is_admin)\n organisations = user.approved_organisations() if not is_admin else Organisation.objects.all()\n return Response(\n rejig(organisations, TypeaheadOrganisationSerializer(organisations,\n many=True))\n )\n\n\n@api_view(['GET'])\ndef typeahead_keyword(request):\n page = request.rsr_page\n keywords = page.keywords.all() if page else None\n if keywords:\n return Response(\n rejig(keywords, TypeaheadKeywordSerializer(keywords, many=True))\n )\n # No keywords on rsr.akvo.org\n return Response({})\n\n\n@api_view(['GET'])\ndef typeahead_project(request):\n \"\"\"Return the typeaheads for projects.\n\n Without any query parameters, it returns the info for all the projects in\n the current context -- changes depending on whether we are on a partner\n site, or the RSR site.\n\n If a published query parameter is passed, only projects that have been\n published are returned.\n\n NOTE: The unauthenticated user gets information about all the projects when\n using this API endpoint. More permission checking will need to be added,\n if the amount of data being returned is changed.\n\n \"\"\"\n if request.GET.get('published', '0') == '0':\n # Project editor - organization projects, all\n page = request.rsr_page\n projects = page.all_projects() if page else Project.objects.all()\n else:\n # Search bar - organization projects, published\n projects = _project_directory_coll(request)\n\n projects = projects.exclude(title='')\n return Response(\n rejig(projects, TypeaheadProjectSerializer(projects, many=True))\n )\n\n\n@api_view(['GET'])\ndef typeahead_user_projects(request):\n user = request.user\n is_admin = user.is_active and (user.is_superuser or user.is_admin)\n if is_admin:\n projects = Project.objects.all()\n else:\n projects = user.approved_organisations().all_projects()\n projects = projects.exclude(title='')\n return Response(\n rejig(projects, TypeaheadProjectSerializer(projects, many=True))\n )\n\n\n@api_view(['GET'])\ndef typeahead_impact_projects(request):\n user = request.user\n projects = Project.objects.all() if user.is_admin or user.is_superuser else user.my_projects()\n projects = projects.published().filter(is_impact_project=True).order_by('title')\n\n return Response(\n rejig(projects, TypeaheadProjectSerializer(projects, many=True))\n )\n\n\n@api_view(['GET'])\ndef typeahead_projectupdate(request):\n updates = ProjectUpdate.objects.all()\n return Response(\n rejig(updates, TypeaheadProjectUpdateSerializer(updates, many=True))\n )\n", "path": "akvo/rest/views/typeahead.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\n\"\"\"Akvo RSR is covered by the GNU Affero General Public License.\nSee more details in the license.txt file located at the root folder of the\nAkvo RSR module. For additional details on the GNU license please\nsee < http://www.gnu.org/licenses/agpl.html >.\n\"\"\"\n\nfrom django.conf import settings\nfrom rest_framework.decorators import api_view\nfrom rest_framework.response import Response\n\nfrom akvo.codelists.models import Country, Version\nfrom akvo.rest.serializers import (TypeaheadCountrySerializer,\n TypeaheadOrganisationSerializer,\n TypeaheadProjectSerializer,\n TypeaheadProjectUpdateSerializer,\n TypeaheadKeywordSerializer,)\nfrom akvo.rsr.models import Organisation, Project, ProjectUpdate\nfrom akvo.rsr.views.project import _project_directory_coll\n\n\ndef rejig(queryset, serializer):\n \"\"\"Rearrange & add queryset count to the response data.\"\"\"\n return {\n 'count': queryset.count(),\n 'results': serializer.data\n }\n\n\n@api_view(['GET'])\ndef typeahead_country(request):\n iati_version = Version.objects.get(code=settings.IATI_VERSION)\n countries = Country.objects.filter(version=iati_version)\n return Response(\n rejig(countries, TypeaheadCountrySerializer(countries, many=True))\n )\n\n\n@api_view(['GET'])\ndef typeahead_organisation(request):\n page = request.rsr_page\n if request.GET.get('partners', '0') == '1' and page:\n organisations = page.partners()\n else:\n # Project editor - all organizations\n organisations = Organisation.objects.all()\n\n organisations = organisations.values('id', 'name', 'long_name')\n\n return Response(\n rejig(organisations, TypeaheadOrganisationSerializer(organisations,\n many=True))\n )\n\n\n@api_view(['GET'])\ndef typeahead_user_organisations(request):\n user = request.user\n is_admin = user.is_active and (user.is_superuser or user.is_admin)\n organisations = user.approved_organisations() if not is_admin else Organisation.objects.all()\n return Response(\n rejig(organisations, TypeaheadOrganisationSerializer(organisations,\n many=True))\n )\n\n\n@api_view(['GET'])\ndef typeahead_keyword(request):\n page = request.rsr_page\n keywords = page.keywords.all() if page else None\n if keywords:\n return Response(\n rejig(keywords, TypeaheadKeywordSerializer(keywords, many=True))\n )\n # No keywords on rsr.akvo.org\n return Response({})\n\n\n@api_view(['GET'])\ndef typeahead_project(request):\n \"\"\"Return the typeaheads for projects.\n\n Without any query parameters, it returns the info for all the projects in\n the current context -- changes depending on whether we are on a partner\n site, or the RSR site.\n\n If a published query parameter is passed, only projects that have been\n published are returned.\n\n NOTE: The unauthenticated user gets information about all the projects when\n using this API endpoint. More permission checking will need to be added,\n if the amount of data being returned is changed.\n\n \"\"\"\n if request.GET.get('published', '0') == '0':\n # Project editor - organization projects, all\n page = request.rsr_page\n projects = page.all_projects() if page else Project.objects.all()\n else:\n # Search bar - organization projects, published\n projects = _project_directory_coll(request)\n\n projects = projects.exclude(title='')\n return Response(\n rejig(projects, TypeaheadProjectSerializer(projects, many=True))\n )\n\n\n@api_view(['GET'])\ndef typeahead_user_projects(request):\n user = request.user\n is_admin = user.is_active and (user.is_superuser or user.is_admin)\n if is_admin:\n projects = Project.objects.all()\n else:\n projects = user.approved_organisations().all_projects()\n projects = projects.exclude(title='')\n return Response(\n rejig(projects, TypeaheadProjectSerializer(projects, many=True))\n )\n\n\n@api_view(['GET'])\ndef typeahead_impact_projects(request):\n user = request.user\n projects = Project.objects.all() if user.is_admin or user.is_superuser else user.my_projects()\n projects = projects.published().filter(is_impact_project=True).order_by('title')\n\n return Response(\n rejig(projects, TypeaheadProjectSerializer(projects, many=True))\n )\n\n\n@api_view(['GET'])\ndef typeahead_projectupdate(request):\n page = request.rsr_page\n updates = page.updates() if page else ProjectUpdate.objects.all()\n return Response(\n rejig(updates, TypeaheadProjectUpdateSerializer(updates, many=True))\n )\n", "path": "akvo/rest/views/typeahead.py"}]} | 1,605 | 129 |
gh_patches_debug_31114 | rasdani/github-patches | git_diff | bridgecrewio__checkov-2154 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
CKV_AWS_174 incorrect reporting
CKV_AWS_174 is being triggered in our terraform code even though we have the viewer certificate set to use TLSv.1.2. Snippet of our code here:
viewer_certificate {
acm_certificate_arn = aws_acm_certificate.cert.arn
ssl_support_method = "sni-only"
minimum_protocol_version = "TLSv1.2_2019"
}
Steps to reproduce the behavior:
Running checkov on our terraform code
**Expected behavior**
This check should be passed
**Additional context**
It looks to me like the issue is in the code between lines 17 and 19. I dont think based on the terraform documentation and the if statements that it would ever pass if using an acm certificate
https://github.com/bridgecrewio/checkov/blob/master/checkov/terraform/checks/resource/aws/CloudfrontTLS12.py
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `checkov/terraform/checks/resource/aws/CloudfrontTLS12.py`
Content:
```
1 from checkov.common.models.enums import CheckCategories, CheckResult
2 from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck
3
4
5 class CloudFrontTLS12(BaseResourceValueCheck):
6 def __init__(self):
7 name = "Verify CloudFront Distribution Viewer Certificate is using TLS v1.2"
8 id = "CKV_AWS_174"
9 supported_resources = ["aws_cloudfront_distribution"]
10 categories = [CheckCategories.ENCRYPTION]
11 super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
12
13 def scan_resource_conf(self, conf):
14 if "viewer_certificate" in conf.keys():
15 # check if cloudfront_default_certificate is true then this could use less than tls 1.2
16 viewer_certificate = conf["viewer_certificate"][0]
17 if 'cloudfront_default_certificate' in viewer_certificate:
18 #is not using the default certificate
19 if viewer_certificate["cloudfront_default_certificate"] is not True:
20 #these protocol versions
21 if "minimum_protocol_version" in viewer_certificate:
22 protocol=viewer_certificate["minimum_protocol_version"][0]
23 if protocol in ['TLSv1.2_2018', 'TLSv1.2_2019', 'TLSv1.2_2021']:
24 return CheckResult.PASSED
25
26 #No cert specified so using default which can be less that tls 1.2
27 return CheckResult.FAILED
28
29 def get_inspected_key(self):
30
31 return "viewer_certificate/[0]/minimum_protocol_version"
32
33 def get_expected_values(self):
34 return ['TLSv1.2_2018', 'TLSv1.2_2019', 'TLSv1.2_2021']
35
36
37 check = CloudFrontTLS12()
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/checkov/terraform/checks/resource/aws/CloudfrontTLS12.py b/checkov/terraform/checks/resource/aws/CloudfrontTLS12.py
--- a/checkov/terraform/checks/resource/aws/CloudfrontTLS12.py
+++ b/checkov/terraform/checks/resource/aws/CloudfrontTLS12.py
@@ -12,18 +12,18 @@
def scan_resource_conf(self, conf):
if "viewer_certificate" in conf.keys():
- # check if cloudfront_default_certificate is true then this could use less than tls 1.2
viewer_certificate = conf["viewer_certificate"][0]
- if 'cloudfront_default_certificate' in viewer_certificate:
- #is not using the default certificate
- if viewer_certificate["cloudfront_default_certificate"] is not True:
- #these protocol versions
- if "minimum_protocol_version" in viewer_certificate:
- protocol=viewer_certificate["minimum_protocol_version"][0]
- if protocol in ['TLSv1.2_2018', 'TLSv1.2_2019', 'TLSv1.2_2021']:
- return CheckResult.PASSED
-
- #No cert specified so using default which can be less that tls 1.2
+ # check if cloudfront_default_certificate is true then this could use less than tls 1.2
+ if ("cloudfront_default_certificate" in viewer_certificate and viewer_certificate
+ ["cloudfront_default_certificate"][0] is not True) or (
+ 'minimum_protocol_version' in viewer_certificate):
+ # is not using the default certificate
+ if 'minimum_protocol_version' in viewer_certificate:
+ protocol = viewer_certificate["minimum_protocol_version"][0]
+ # these protocol versions
+ if protocol in ['TLSv1.2_2018', 'TLSv1.2_2019', 'TLSv1.2_2021']:
+ return CheckResult.PASSED
+ # No cert specified so using default which can be less that tls 1.2
return CheckResult.FAILED
def get_inspected_key(self):
@@ -34,4 +34,4 @@
return ['TLSv1.2_2018', 'TLSv1.2_2019', 'TLSv1.2_2021']
-check = CloudFrontTLS12()
\ No newline at end of file
+check = CloudFrontTLS12()
| {"golden_diff": "diff --git a/checkov/terraform/checks/resource/aws/CloudfrontTLS12.py b/checkov/terraform/checks/resource/aws/CloudfrontTLS12.py\n--- a/checkov/terraform/checks/resource/aws/CloudfrontTLS12.py\n+++ b/checkov/terraform/checks/resource/aws/CloudfrontTLS12.py\n@@ -12,18 +12,18 @@\n \n def scan_resource_conf(self, conf):\n if \"viewer_certificate\" in conf.keys():\n- # check if cloudfront_default_certificate is true then this could use less than tls 1.2\n viewer_certificate = conf[\"viewer_certificate\"][0]\n- if 'cloudfront_default_certificate' in viewer_certificate:\n- #is not using the default certificate\n- if viewer_certificate[\"cloudfront_default_certificate\"] is not True:\n- #these protocol versions\n- if \"minimum_protocol_version\" in viewer_certificate:\n- protocol=viewer_certificate[\"minimum_protocol_version\"][0]\n- if protocol in ['TLSv1.2_2018', 'TLSv1.2_2019', 'TLSv1.2_2021']:\n- return CheckResult.PASSED\n-\n- #No cert specified so using default which can be less that tls 1.2\n+ # check if cloudfront_default_certificate is true then this could use less than tls 1.2\n+ if (\"cloudfront_default_certificate\" in viewer_certificate and viewer_certificate\n+ [\"cloudfront_default_certificate\"][0] is not True) or (\n+ 'minimum_protocol_version' in viewer_certificate):\n+ # is not using the default certificate\n+ if 'minimum_protocol_version' in viewer_certificate:\n+ protocol = viewer_certificate[\"minimum_protocol_version\"][0]\n+ # these protocol versions\n+ if protocol in ['TLSv1.2_2018', 'TLSv1.2_2019', 'TLSv1.2_2021']:\n+ return CheckResult.PASSED\n+ # No cert specified so using default which can be less that tls 1.2\n return CheckResult.FAILED\n \n def get_inspected_key(self):\n@@ -34,4 +34,4 @@\n return ['TLSv1.2_2018', 'TLSv1.2_2019', 'TLSv1.2_2021']\n \n \n-check = CloudFrontTLS12()\n\\ No newline at end of file\n+check = CloudFrontTLS12()\n", "issue": "CKV_AWS_174 incorrect reporting\nCKV_AWS_174 is being triggered in our terraform code even though we have the viewer certificate set to use TLSv.1.2. Snippet of our code here:\r\n\r\nviewer_certificate {\r\n acm_certificate_arn = aws_acm_certificate.cert.arn\r\n ssl_support_method = \"sni-only\"\r\n minimum_protocol_version = \"TLSv1.2_2019\" \r\n}\r\n\r\n\r\nSteps to reproduce the behavior:\r\nRunning checkov on our terraform code\r\n\r\n**Expected behavior**\r\nThis check should be passed\r\n\r\n\r\n\r\n**Additional context**\r\nIt looks to me like the issue is in the code between lines 17 and 19. I dont think based on the terraform documentation and the if statements that it would ever pass if using an acm certificate\r\n\r\nhttps://github.com/bridgecrewio/checkov/blob/master/checkov/terraform/checks/resource/aws/CloudfrontTLS12.py\r\n\n", "before_files": [{"content": "from checkov.common.models.enums import CheckCategories, CheckResult\nfrom checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck\n\n\nclass CloudFrontTLS12(BaseResourceValueCheck):\n def __init__(self):\n name = \"Verify CloudFront Distribution Viewer Certificate is using TLS v1.2\"\n id = \"CKV_AWS_174\"\n supported_resources = [\"aws_cloudfront_distribution\"]\n categories = [CheckCategories.ENCRYPTION]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def scan_resource_conf(self, conf):\n if \"viewer_certificate\" in conf.keys():\n # check if cloudfront_default_certificate is true then this could use less than tls 1.2\n viewer_certificate = conf[\"viewer_certificate\"][0]\n if 'cloudfront_default_certificate' in viewer_certificate:\n #is not using the default certificate\n if viewer_certificate[\"cloudfront_default_certificate\"] is not True:\n #these protocol versions\n if \"minimum_protocol_version\" in viewer_certificate:\n protocol=viewer_certificate[\"minimum_protocol_version\"][0]\n if protocol in ['TLSv1.2_2018', 'TLSv1.2_2019', 'TLSv1.2_2021']:\n return CheckResult.PASSED\n\n #No cert specified so using default which can be less that tls 1.2\n return CheckResult.FAILED\n\n def get_inspected_key(self):\n\n return \"viewer_certificate/[0]/minimum_protocol_version\"\n\n def get_expected_values(self):\n return ['TLSv1.2_2018', 'TLSv1.2_2019', 'TLSv1.2_2021']\n\n\ncheck = CloudFrontTLS12()", "path": "checkov/terraform/checks/resource/aws/CloudfrontTLS12.py"}], "after_files": [{"content": "from checkov.common.models.enums import CheckCategories, CheckResult\nfrom checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck\n\n\nclass CloudFrontTLS12(BaseResourceValueCheck):\n def __init__(self):\n name = \"Verify CloudFront Distribution Viewer Certificate is using TLS v1.2\"\n id = \"CKV_AWS_174\"\n supported_resources = [\"aws_cloudfront_distribution\"]\n categories = [CheckCategories.ENCRYPTION]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def scan_resource_conf(self, conf):\n if \"viewer_certificate\" in conf.keys():\n viewer_certificate = conf[\"viewer_certificate\"][0]\n # check if cloudfront_default_certificate is true then this could use less than tls 1.2\n if (\"cloudfront_default_certificate\" in viewer_certificate and viewer_certificate\n [\"cloudfront_default_certificate\"][0] is not True) or (\n 'minimum_protocol_version' in viewer_certificate):\n # is not using the default certificate\n if 'minimum_protocol_version' in viewer_certificate:\n protocol = viewer_certificate[\"minimum_protocol_version\"][0]\n # these protocol versions\n if protocol in ['TLSv1.2_2018', 'TLSv1.2_2019', 'TLSv1.2_2021']:\n return CheckResult.PASSED\n # No cert specified so using default which can be less that tls 1.2\n return CheckResult.FAILED\n\n def get_inspected_key(self):\n\n return \"viewer_certificate/[0]/minimum_protocol_version\"\n\n def get_expected_values(self):\n return ['TLSv1.2_2018', 'TLSv1.2_2019', 'TLSv1.2_2021']\n\n\ncheck = CloudFrontTLS12()\n", "path": "checkov/terraform/checks/resource/aws/CloudfrontTLS12.py"}]} | 930 | 551 |
gh_patches_debug_5802 | rasdani/github-patches | git_diff | akvo__akvo-rsr-4094 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Investigate creation of duplicate user accounts with differently cased emails
- [ ] Verify that lookups using email are using `__iexact` or something like that.
- [ ] Figure out a plan for existing duplicates
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `akvo/rest/views/utils.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 # Akvo RSR is covered by the GNU Affero General Public License.
4
5 # See more details in the license.txt file located at the root folder of the Akvo RSR module.
6 # For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.
7
8 from django.conf import settings
9 from django.contrib.auth import get_user_model
10 from django.core.cache import cache
11 from django.utils.cache import get_cache_key, _generate_cache_header_key
12 from django.db import IntegrityError
13
14
15 def get_cached_data(request, key_prefix, data, serializer):
16 """Function to get serialized data from the cache based on the request."""
17 cache_header_key = _generate_cache_header_key(key_prefix, request)
18 if cache.get(cache_header_key) is None:
19 cache.set(cache_header_key, [], None)
20
21 cache_key = get_cache_key(request, key_prefix)
22 cached_data = cache.get(cache_key, None)
23 cache_used = True
24 if not cached_data and data is not None:
25 cache_used = False
26 cached_data = serializer(data, many=True).data
27 cache.set(cache_key, cached_data)
28
29 return cached_data, cache_used
30
31
32 def set_cached_data(request, key_prefix, data):
33 """Function to save data to the cache based on the request."""
34
35 cache_header_key = _generate_cache_header_key(key_prefix, request)
36 if cache.get(cache_header_key) is None:
37 cache.set(cache_header_key, [], None)
38
39 cache_key = get_cache_key(request, key_prefix)
40 cache.set(cache_key, data)
41
42
43 def get_qs_elements_for_page(qs, request, count):
44 """Return queryset elements to be shown on the current page"""
45 limit = int_or_none(request.GET.get('limit')) or settings.PROJECT_DIRECTORY_PAGE_SIZES[0]
46 limit = min(limit, settings.PROJECT_DIRECTORY_PAGE_SIZES[-1])
47 max_page_number = 1 + int(count / limit)
48 page_number = min(max_page_number, int_or_none(request.GET.get('page')) or 1)
49 start = (page_number - 1) * limit
50 end = page_number * limit
51 return qs[start:end]
52
53
54 def int_or_none(value):
55 """Return int or None given a value."""
56 try:
57 return int(value)
58 except Exception:
59 return None
60
61
62 def create_invited_user(email):
63 User = get_user_model()
64 # Check if the user already exists, based on the email address
65 try:
66 invited_user = User.objects.get(email=email)
67 except User.DoesNotExist:
68 try:
69 invited_user = User.objects.create_user(username=email, email=email)
70 except IntegrityError:
71 return None
72 return invited_user
73
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/akvo/rest/views/utils.py b/akvo/rest/views/utils.py
--- a/akvo/rest/views/utils.py
+++ b/akvo/rest/views/utils.py
@@ -63,7 +63,7 @@
User = get_user_model()
# Check if the user already exists, based on the email address
try:
- invited_user = User.objects.get(email=email)
+ invited_user = User.objects.get(email__iexact=email)
except User.DoesNotExist:
try:
invited_user = User.objects.create_user(username=email, email=email)
| {"golden_diff": "diff --git a/akvo/rest/views/utils.py b/akvo/rest/views/utils.py\n--- a/akvo/rest/views/utils.py\n+++ b/akvo/rest/views/utils.py\n@@ -63,7 +63,7 @@\n User = get_user_model()\n # Check if the user already exists, based on the email address\n try:\n- invited_user = User.objects.get(email=email)\n+ invited_user = User.objects.get(email__iexact=email)\n except User.DoesNotExist:\n try:\n invited_user = User.objects.create_user(username=email, email=email)\n", "issue": "Investigate creation of duplicate user accounts with differently cased emails\n- [ ] Verify that lookups using email are using `__iexact` or something like that. \n- [ ] Figure out a plan for existing duplicates\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Akvo RSR is covered by the GNU Affero General Public License.\n\n# See more details in the license.txt file located at the root folder of the Akvo RSR module.\n# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n\nfrom django.conf import settings\nfrom django.contrib.auth import get_user_model\nfrom django.core.cache import cache\nfrom django.utils.cache import get_cache_key, _generate_cache_header_key\nfrom django.db import IntegrityError\n\n\ndef get_cached_data(request, key_prefix, data, serializer):\n \"\"\"Function to get serialized data from the cache based on the request.\"\"\"\n cache_header_key = _generate_cache_header_key(key_prefix, request)\n if cache.get(cache_header_key) is None:\n cache.set(cache_header_key, [], None)\n\n cache_key = get_cache_key(request, key_prefix)\n cached_data = cache.get(cache_key, None)\n cache_used = True\n if not cached_data and data is not None:\n cache_used = False\n cached_data = serializer(data, many=True).data\n cache.set(cache_key, cached_data)\n\n return cached_data, cache_used\n\n\ndef set_cached_data(request, key_prefix, data):\n \"\"\"Function to save data to the cache based on the request.\"\"\"\n\n cache_header_key = _generate_cache_header_key(key_prefix, request)\n if cache.get(cache_header_key) is None:\n cache.set(cache_header_key, [], None)\n\n cache_key = get_cache_key(request, key_prefix)\n cache.set(cache_key, data)\n\n\ndef get_qs_elements_for_page(qs, request, count):\n \"\"\"Return queryset elements to be shown on the current page\"\"\"\n limit = int_or_none(request.GET.get('limit')) or settings.PROJECT_DIRECTORY_PAGE_SIZES[0]\n limit = min(limit, settings.PROJECT_DIRECTORY_PAGE_SIZES[-1])\n max_page_number = 1 + int(count / limit)\n page_number = min(max_page_number, int_or_none(request.GET.get('page')) or 1)\n start = (page_number - 1) * limit\n end = page_number * limit\n return qs[start:end]\n\n\ndef int_or_none(value):\n \"\"\"Return int or None given a value.\"\"\"\n try:\n return int(value)\n except Exception:\n return None\n\n\ndef create_invited_user(email):\n User = get_user_model()\n # Check if the user already exists, based on the email address\n try:\n invited_user = User.objects.get(email=email)\n except User.DoesNotExist:\n try:\n invited_user = User.objects.create_user(username=email, email=email)\n except IntegrityError:\n return None\n return invited_user\n", "path": "akvo/rest/views/utils.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Akvo RSR is covered by the GNU Affero General Public License.\n\n# See more details in the license.txt file located at the root folder of the Akvo RSR module.\n# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n\nfrom django.conf import settings\nfrom django.contrib.auth import get_user_model\nfrom django.core.cache import cache\nfrom django.utils.cache import get_cache_key, _generate_cache_header_key\nfrom django.db import IntegrityError\n\n\ndef get_cached_data(request, key_prefix, data, serializer):\n \"\"\"Function to get serialized data from the cache based on the request.\"\"\"\n cache_header_key = _generate_cache_header_key(key_prefix, request)\n if cache.get(cache_header_key) is None:\n cache.set(cache_header_key, [], None)\n\n cache_key = get_cache_key(request, key_prefix)\n cached_data = cache.get(cache_key, None)\n cache_used = True\n if not cached_data and data is not None:\n cache_used = False\n cached_data = serializer(data, many=True).data\n cache.set(cache_key, cached_data)\n\n return cached_data, cache_used\n\n\ndef set_cached_data(request, key_prefix, data):\n \"\"\"Function to save data to the cache based on the request.\"\"\"\n\n cache_header_key = _generate_cache_header_key(key_prefix, request)\n if cache.get(cache_header_key) is None:\n cache.set(cache_header_key, [], None)\n\n cache_key = get_cache_key(request, key_prefix)\n cache.set(cache_key, data)\n\n\ndef get_qs_elements_for_page(qs, request, count):\n \"\"\"Return queryset elements to be shown on the current page\"\"\"\n limit = int_or_none(request.GET.get('limit')) or settings.PROJECT_DIRECTORY_PAGE_SIZES[0]\n limit = min(limit, settings.PROJECT_DIRECTORY_PAGE_SIZES[-1])\n max_page_number = 1 + int(count / limit)\n page_number = min(max_page_number, int_or_none(request.GET.get('page')) or 1)\n start = (page_number - 1) * limit\n end = page_number * limit\n return qs[start:end]\n\n\ndef int_or_none(value):\n \"\"\"Return int or None given a value.\"\"\"\n try:\n return int(value)\n except Exception:\n return None\n\n\ndef create_invited_user(email):\n User = get_user_model()\n # Check if the user already exists, based on the email address\n try:\n invited_user = User.objects.get(email__iexact=email)\n except User.DoesNotExist:\n try:\n invited_user = User.objects.create_user(username=email, email=email)\n except IntegrityError:\n return None\n return invited_user\n", "path": "akvo/rest/views/utils.py"}]} | 1,028 | 124 |
gh_patches_debug_7429 | rasdani/github-patches | git_diff | cloudtools__troposphere-457 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Elasticsearch Domain DomainName shouldn't be required
According to the CF documentation, `DomainName` isn't required: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticsearch-domain.html
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `troposphere/elasticsearch.py`
Content:
```
1 # Copyright (c) 2012-2015, Mark Peek <[email protected]>
2 # All rights reserved.
3 #
4 # See LICENSE file for full license.
5
6 from . import AWSProperty, AWSObject
7 from .validators import boolean, integer, integer_range, positive_integer
8
9 VALID_VOLUME_TYPES = ('standard', 'gp2', 'io1')
10
11 try:
12 from awacs.aws import Policy
13 policytypes = (dict, Policy)
14 except ImportError:
15 policytypes = dict,
16
17
18 def validate_volume_type(volume_type):
19 """Validate VolumeType for ElasticsearchDomain"""
20 if volume_type not in VALID_VOLUME_TYPES:
21 raise ValueError("Elasticsearch Domain VolumeType must be one of: %s" %
22 ", ".join(VALID_VOLUME_TYPES))
23 return volume_type
24
25
26 class EBSOptions(AWSProperty):
27 props = {
28 'EBSEnabled': (boolean, False),
29 'Iops': (positive_integer, False),
30 'VolumeSize': (integer, False),
31 'VolumeType': (validate_volume_type, False)
32 }
33
34 def validate(self):
35 volume_type = self.properties.get('VolumeType')
36 iops = self.properties.get('Iops')
37 if volume_type == 'io1' and not iops:
38 raise ValueError("Must specify Iops if VolumeType is 'io1'.")
39
40
41 class ElasticsearchClusterConfig(AWSProperty):
42 props = {
43 'DedicatedMasterCount': (integer, False),
44 'DedicatedMasterEnabled': (boolean, False),
45 'DedicatedMasterType': (basestring, False),
46 'InstanceCount': (integer, False),
47 'InstanceType': (basestring, False),
48 'ZoneAwarenessEnabled': (boolean, False)
49 }
50
51
52 class SnapshotOptions(AWSProperty):
53 props = {
54 'AutomatedSnapshotStartHour': (integer_range(0, 23), False)
55 }
56
57
58 class ElasticsearchDomain(AWSObject):
59 resource_type = "AWS::Elasticsearch::Domain"
60
61 props = {
62 'AccessPolicies': (policytypes, False),
63 'AdvancedOptions': (dict, False),
64 'DomainName': (basestring, True),
65 'EBSOptions': (EBSOptions, False),
66 'ElasticsearchClusterConfig': (ElasticsearchClusterConfig, False),
67 'SnapshotOptions': (SnapshotOptions, False),
68 'Tags': (list, False)
69 }
70
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/troposphere/elasticsearch.py b/troposphere/elasticsearch.py
--- a/troposphere/elasticsearch.py
+++ b/troposphere/elasticsearch.py
@@ -61,7 +61,7 @@
props = {
'AccessPolicies': (policytypes, False),
'AdvancedOptions': (dict, False),
- 'DomainName': (basestring, True),
+ 'DomainName': (basestring, False),
'EBSOptions': (EBSOptions, False),
'ElasticsearchClusterConfig': (ElasticsearchClusterConfig, False),
'SnapshotOptions': (SnapshotOptions, False),
| {"golden_diff": "diff --git a/troposphere/elasticsearch.py b/troposphere/elasticsearch.py\n--- a/troposphere/elasticsearch.py\n+++ b/troposphere/elasticsearch.py\n@@ -61,7 +61,7 @@\n props = {\n 'AccessPolicies': (policytypes, False),\n 'AdvancedOptions': (dict, False),\n- 'DomainName': (basestring, True),\n+ 'DomainName': (basestring, False),\n 'EBSOptions': (EBSOptions, False),\n 'ElasticsearchClusterConfig': (ElasticsearchClusterConfig, False),\n 'SnapshotOptions': (SnapshotOptions, False),\n", "issue": "Elasticsearch Domain DomainName shouldn't be required\nAccording to the CF documentation, `DomainName` isn't required: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticsearch-domain.html\n\n", "before_files": [{"content": "# Copyright (c) 2012-2015, Mark Peek <[email protected]>\n# All rights reserved.\n#\n# See LICENSE file for full license.\n\nfrom . import AWSProperty, AWSObject\nfrom .validators import boolean, integer, integer_range, positive_integer\n\nVALID_VOLUME_TYPES = ('standard', 'gp2', 'io1')\n\ntry:\n from awacs.aws import Policy\n policytypes = (dict, Policy)\nexcept ImportError:\n policytypes = dict,\n\n\ndef validate_volume_type(volume_type):\n \"\"\"Validate VolumeType for ElasticsearchDomain\"\"\"\n if volume_type not in VALID_VOLUME_TYPES:\n raise ValueError(\"Elasticsearch Domain VolumeType must be one of: %s\" %\n \", \".join(VALID_VOLUME_TYPES))\n return volume_type\n\n\nclass EBSOptions(AWSProperty):\n props = {\n 'EBSEnabled': (boolean, False),\n 'Iops': (positive_integer, False),\n 'VolumeSize': (integer, False),\n 'VolumeType': (validate_volume_type, False)\n }\n\n def validate(self):\n volume_type = self.properties.get('VolumeType')\n iops = self.properties.get('Iops')\n if volume_type == 'io1' and not iops:\n raise ValueError(\"Must specify Iops if VolumeType is 'io1'.\")\n\n\nclass ElasticsearchClusterConfig(AWSProperty):\n props = {\n 'DedicatedMasterCount': (integer, False),\n 'DedicatedMasterEnabled': (boolean, False),\n 'DedicatedMasterType': (basestring, False),\n 'InstanceCount': (integer, False),\n 'InstanceType': (basestring, False),\n 'ZoneAwarenessEnabled': (boolean, False)\n }\n\n\nclass SnapshotOptions(AWSProperty):\n props = {\n 'AutomatedSnapshotStartHour': (integer_range(0, 23), False)\n }\n\n\nclass ElasticsearchDomain(AWSObject):\n resource_type = \"AWS::Elasticsearch::Domain\"\n\n props = {\n 'AccessPolicies': (policytypes, False),\n 'AdvancedOptions': (dict, False),\n 'DomainName': (basestring, True),\n 'EBSOptions': (EBSOptions, False),\n 'ElasticsearchClusterConfig': (ElasticsearchClusterConfig, False),\n 'SnapshotOptions': (SnapshotOptions, False),\n 'Tags': (list, False)\n }\n", "path": "troposphere/elasticsearch.py"}], "after_files": [{"content": "# Copyright (c) 2012-2015, Mark Peek <[email protected]>\n# All rights reserved.\n#\n# See LICENSE file for full license.\n\nfrom . import AWSProperty, AWSObject\nfrom .validators import boolean, integer, integer_range, positive_integer\n\nVALID_VOLUME_TYPES = ('standard', 'gp2', 'io1')\n\ntry:\n from awacs.aws import Policy\n policytypes = (dict, Policy)\nexcept ImportError:\n policytypes = dict,\n\n\ndef validate_volume_type(volume_type):\n \"\"\"Validate VolumeType for ElasticsearchDomain\"\"\"\n if volume_type not in VALID_VOLUME_TYPES:\n raise ValueError(\"Elasticsearch Domain VolumeType must be one of: %s\" %\n \", \".join(VALID_VOLUME_TYPES))\n return volume_type\n\n\nclass EBSOptions(AWSProperty):\n props = {\n 'EBSEnabled': (boolean, False),\n 'Iops': (positive_integer, False),\n 'VolumeSize': (integer, False),\n 'VolumeType': (validate_volume_type, False)\n }\n\n def validate(self):\n volume_type = self.properties.get('VolumeType')\n iops = self.properties.get('Iops')\n if volume_type == 'io1' and not iops:\n raise ValueError(\"Must specify Iops if VolumeType is 'io1'.\")\n\n\nclass ElasticsearchClusterConfig(AWSProperty):\n props = {\n 'DedicatedMasterCount': (integer, False),\n 'DedicatedMasterEnabled': (boolean, False),\n 'DedicatedMasterType': (basestring, False),\n 'InstanceCount': (integer, False),\n 'InstanceType': (basestring, False),\n 'ZoneAwarenessEnabled': (boolean, False)\n }\n\n\nclass SnapshotOptions(AWSProperty):\n props = {\n 'AutomatedSnapshotStartHour': (integer_range(0, 23), False)\n }\n\n\nclass ElasticsearchDomain(AWSObject):\n resource_type = \"AWS::Elasticsearch::Domain\"\n\n props = {\n 'AccessPolicies': (policytypes, False),\n 'AdvancedOptions': (dict, False),\n 'DomainName': (basestring, False),\n 'EBSOptions': (EBSOptions, False),\n 'ElasticsearchClusterConfig': (ElasticsearchClusterConfig, False),\n 'SnapshotOptions': (SnapshotOptions, False),\n 'Tags': (list, False)\n }\n", "path": "troposphere/elasticsearch.py"}]} | 960 | 139 |
gh_patches_debug_6167 | rasdani/github-patches | git_diff | mesonbuild__meson-2462 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
MSI installed meson fails to rerun in visual studio
Initially, I ran `meson build` from the source code directory `xxx` to create the build directory.
Later, if any `meson.build` files are modified, Visual studio fails to rerun Meson with the backtrace below. Meson is installed with MSI. It works with ninja as backend. It also works if meson isn't installed with MSI.
It seems like `mesonscript` in `regen_checker` is invalid when meson is installed with MSI.
```
>meson.exe : error : unrecognized arguments: --internal regenerate C:\Users\niklas\Documents\git\xxx C:\Users\niklas\Documents\git\xxx
1> Traceback (most recent call last):
1> File "C:\Users\niklas\AppData\Local\Programs\Python\Python36-32\lib\site-packages\cx_Freeze\initscripts\__startup__.py", line 14, in run
1> module.run()
1> File "C:\Users\niklas\AppData\Local\Programs\Python\Python36-32\lib\site-packages\cx_Freeze\initscripts\Console.py", line 26, in run
1> exec(code, m.__dict__)
1> File "meson.py", line 37, in <module>
1> File "meson.py", line 34, in main
1> File "mesonbuild\mesonmain.py", line 311, in run
1> File "mesonbuild\mesonmain.py", line 278, in run_script_command
1> File "mesonbuild\scripts\regen_checker.py", line 56, in run
1> File "mesonbuild\scripts\regen_checker.py", line 42, in regen
1> File "C:\Users\niklas\AppData\Local\Programs\Python\Python36-32\lib\subprocess.py", line 291, in check_call
1> raise CalledProcessError(retcode, cmd)
1> subprocess.CalledProcessError: Command '['C:\\Program Files\\Meson\\meson.exe', 'C:\\Users\\niklas\\Documents\\git\\xxx\\meson', '--internal', 'regenerate', 'C:\\Users\\niklas\\Documents\\git\\xxx\\build', 'C:\\Users\\niklas\\Documents\\git\\xxx', '--backend=vs2015']' returned non-zero exit status 2.
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mesonbuild/scripts/regen_checker.py`
Content:
```
1 # Copyright 2015-2016 The Meson development team
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6
7 # http://www.apache.org/licenses/LICENSE-2.0
8
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import sys, os
16 import pickle, subprocess
17
18 # This could also be used for XCode.
19
20 def need_regen(regeninfo, regen_timestamp):
21 for i in regeninfo.depfiles:
22 curfile = os.path.join(regeninfo.build_dir, i)
23 curtime = os.stat(curfile).st_mtime
24 if curtime > regen_timestamp:
25 return True
26 # The timestamp file gets automatically deleted by MSBuild during a 'Clean' build.
27 # We must make sure to recreate it, even if we do not regenerate the solution.
28 # Otherwise, Visual Studio will always consider the REGEN project out of date.
29 print("Everything is up-to-date, regeneration of build files is not needed.")
30 from ..backend.vs2010backend import Vs2010Backend
31 Vs2010Backend.touch_regen_timestamp(regeninfo.build_dir)
32 return False
33
34 def regen(regeninfo, mesonscript, backend):
35 cmd = [sys.executable,
36 mesonscript,
37 '--internal',
38 'regenerate',
39 regeninfo.build_dir,
40 regeninfo.source_dir,
41 '--backend=' + backend]
42 subprocess.check_call(cmd)
43
44 def run(args):
45 private_dir = args[0]
46 dumpfile = os.path.join(private_dir, 'regeninfo.dump')
47 coredata = os.path.join(private_dir, 'coredata.dat')
48 with open(dumpfile, 'rb') as f:
49 regeninfo = pickle.load(f)
50 with open(coredata, 'rb') as f:
51 coredata = pickle.load(f)
52 mesonscript = coredata.meson_script_launcher
53 backend = coredata.get_builtin_option('backend')
54 regen_timestamp = os.stat(dumpfile).st_mtime
55 if need_regen(regeninfo, regen_timestamp):
56 regen(regeninfo, mesonscript, backend)
57 sys.exit(0)
58
59 if __name__ == '__main__':
60 run(sys.argv[1:])
61
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mesonbuild/scripts/regen_checker.py b/mesonbuild/scripts/regen_checker.py
--- a/mesonbuild/scripts/regen_checker.py
+++ b/mesonbuild/scripts/regen_checker.py
@@ -32,9 +32,11 @@
return False
def regen(regeninfo, mesonscript, backend):
- cmd = [sys.executable,
- mesonscript,
- '--internal',
+ if sys.executable.lower().endswith('meson.exe'):
+ cmd_exe = [sys.executable]
+ else:
+ cmd_exe = [sys.executable, mesonscript]
+ cmd = cmd_exe + ['--internal',
'regenerate',
regeninfo.build_dir,
regeninfo.source_dir,
| {"golden_diff": "diff --git a/mesonbuild/scripts/regen_checker.py b/mesonbuild/scripts/regen_checker.py\n--- a/mesonbuild/scripts/regen_checker.py\n+++ b/mesonbuild/scripts/regen_checker.py\n@@ -32,9 +32,11 @@\n return False\n \n def regen(regeninfo, mesonscript, backend):\n- cmd = [sys.executable,\n- mesonscript,\n- '--internal',\n+ if sys.executable.lower().endswith('meson.exe'):\n+ cmd_exe = [sys.executable]\n+ else:\n+ cmd_exe = [sys.executable, mesonscript]\n+ cmd = cmd_exe + ['--internal',\n 'regenerate',\n regeninfo.build_dir,\n regeninfo.source_dir,\n", "issue": "MSI installed meson fails to rerun in visual studio\nInitially, I ran `meson build` from the source code directory `xxx` to create the build directory.\r\nLater, if any `meson.build` files are modified, Visual studio fails to rerun Meson with the backtrace below. Meson is installed with MSI. It works with ninja as backend. It also works if meson isn't installed with MSI.\r\n\r\nIt seems like `mesonscript` in `regen_checker` is invalid when meson is installed with MSI.\r\n\r\n```\r\n>meson.exe : error : unrecognized arguments: --internal regenerate C:\\Users\\niklas\\Documents\\git\\xxx C:\\Users\\niklas\\Documents\\git\\xxx\r\n1> Traceback (most recent call last):\r\n1> File \"C:\\Users\\niklas\\AppData\\Local\\Programs\\Python\\Python36-32\\lib\\site-packages\\cx_Freeze\\initscripts\\__startup__.py\", line 14, in run\r\n1> module.run()\r\n1> File \"C:\\Users\\niklas\\AppData\\Local\\Programs\\Python\\Python36-32\\lib\\site-packages\\cx_Freeze\\initscripts\\Console.py\", line 26, in run\r\n1> exec(code, m.__dict__)\r\n1> File \"meson.py\", line 37, in <module>\r\n1> File \"meson.py\", line 34, in main\r\n1> File \"mesonbuild\\mesonmain.py\", line 311, in run\r\n1> File \"mesonbuild\\mesonmain.py\", line 278, in run_script_command\r\n1> File \"mesonbuild\\scripts\\regen_checker.py\", line 56, in run\r\n1> File \"mesonbuild\\scripts\\regen_checker.py\", line 42, in regen\r\n1> File \"C:\\Users\\niklas\\AppData\\Local\\Programs\\Python\\Python36-32\\lib\\subprocess.py\", line 291, in check_call\r\n1> raise CalledProcessError(retcode, cmd)\r\n1> subprocess.CalledProcessError: Command '['C:\\\\Program Files\\\\Meson\\\\meson.exe', 'C:\\\\Users\\\\niklas\\\\Documents\\\\git\\\\xxx\\\\meson', '--internal', 'regenerate', 'C:\\\\Users\\\\niklas\\\\Documents\\\\git\\\\xxx\\\\build', 'C:\\\\Users\\\\niklas\\\\Documents\\\\git\\\\xxx', '--backend=vs2015']' returned non-zero exit status 2.\r\n```\n", "before_files": [{"content": "# Copyright 2015-2016 The Meson development team\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport sys, os\nimport pickle, subprocess\n\n# This could also be used for XCode.\n\ndef need_regen(regeninfo, regen_timestamp):\n for i in regeninfo.depfiles:\n curfile = os.path.join(regeninfo.build_dir, i)\n curtime = os.stat(curfile).st_mtime\n if curtime > regen_timestamp:\n return True\n # The timestamp file gets automatically deleted by MSBuild during a 'Clean' build.\n # We must make sure to recreate it, even if we do not regenerate the solution.\n # Otherwise, Visual Studio will always consider the REGEN project out of date.\n print(\"Everything is up-to-date, regeneration of build files is not needed.\")\n from ..backend.vs2010backend import Vs2010Backend\n Vs2010Backend.touch_regen_timestamp(regeninfo.build_dir)\n return False\n\ndef regen(regeninfo, mesonscript, backend):\n cmd = [sys.executable,\n mesonscript,\n '--internal',\n 'regenerate',\n regeninfo.build_dir,\n regeninfo.source_dir,\n '--backend=' + backend]\n subprocess.check_call(cmd)\n\ndef run(args):\n private_dir = args[0]\n dumpfile = os.path.join(private_dir, 'regeninfo.dump')\n coredata = os.path.join(private_dir, 'coredata.dat')\n with open(dumpfile, 'rb') as f:\n regeninfo = pickle.load(f)\n with open(coredata, 'rb') as f:\n coredata = pickle.load(f)\n mesonscript = coredata.meson_script_launcher\n backend = coredata.get_builtin_option('backend')\n regen_timestamp = os.stat(dumpfile).st_mtime\n if need_regen(regeninfo, regen_timestamp):\n regen(regeninfo, mesonscript, backend)\n sys.exit(0)\n\nif __name__ == '__main__':\n run(sys.argv[1:])\n", "path": "mesonbuild/scripts/regen_checker.py"}], "after_files": [{"content": "# Copyright 2015-2016 The Meson development team\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport sys, os\nimport pickle, subprocess\n\n# This could also be used for XCode.\n\ndef need_regen(regeninfo, regen_timestamp):\n for i in regeninfo.depfiles:\n curfile = os.path.join(regeninfo.build_dir, i)\n curtime = os.stat(curfile).st_mtime\n if curtime > regen_timestamp:\n return True\n # The timestamp file gets automatically deleted by MSBuild during a 'Clean' build.\n # We must make sure to recreate it, even if we do not regenerate the solution.\n # Otherwise, Visual Studio will always consider the REGEN project out of date.\n print(\"Everything is up-to-date, regeneration of build files is not needed.\")\n from ..backend.vs2010backend import Vs2010Backend\n Vs2010Backend.touch_regen_timestamp(regeninfo.build_dir)\n return False\n\ndef regen(regeninfo, mesonscript, backend):\n if sys.executable.lower().endswith('meson.exe'):\n cmd_exe = [sys.executable]\n else:\n cmd_exe = [sys.executable, mesonscript]\n cmd = cmd_exe + ['--internal',\n 'regenerate',\n regeninfo.build_dir,\n regeninfo.source_dir,\n '--backend=' + backend]\n subprocess.check_call(cmd)\n\ndef run(args):\n private_dir = args[0]\n dumpfile = os.path.join(private_dir, 'regeninfo.dump')\n coredata = os.path.join(private_dir, 'coredata.dat')\n with open(dumpfile, 'rb') as f:\n regeninfo = pickle.load(f)\n with open(coredata, 'rb') as f:\n coredata = pickle.load(f)\n mesonscript = coredata.meson_script_launcher\n backend = coredata.get_builtin_option('backend')\n regen_timestamp = os.stat(dumpfile).st_mtime\n if need_regen(regeninfo, regen_timestamp):\n regen(regeninfo, mesonscript, backend)\n sys.exit(0)\n\nif __name__ == '__main__':\n run(sys.argv[1:])\n", "path": "mesonbuild/scripts/regen_checker.py"}]} | 1,510 | 167 |
gh_patches_debug_5400 | rasdani/github-patches | git_diff | alltheplaces__alltheplaces-2874 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Spider tgifridays is broken
During the global build at 2021-05-26-14-42-23, spider **tgifridays** failed with **0 features** and **0 errors**.
Here's [the log](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/logs/tgifridays.log) and [the output](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/tgifridays.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/tgifridays.geojson))
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `locations/spiders/tgifridays.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 import datetime
3 import re
4 import json
5
6 import scrapy
7 from locations.items import GeojsonPointItem
8 from locations.hours import OpeningHours
9
10
11 DAY_MAPPING = {
12 'Monday': 'Mo',
13 'Tuesday': 'Tu',
14 'Wednesday': 'We',
15 'Thursday': 'Th',
16 'Friday': 'Fr',
17 'Saturday': 'Sa',
18 'Sunday': 'Su'
19 }
20
21
22 class TGIFridaySpider(scrapy.Spider):
23 download_delay = 0.2
24 name = "tgifridays"
25 item_attributes = { 'brand': "TGI Friday's" }
26 allowed_domains = ["tgifridays.com"]
27 start_urls = (
28 'https://locations.tgifridays.com/sitemap.xml',
29 )
30
31 def parse_hours(self, hours):
32 opening_hours = OpeningHours()
33
34 for hour in hours:
35 if hour["opens"] == "Closed":
36 continue
37 elif hour["closes"] == "Closed":
38 continue
39 else:
40 opening_hours.add_range(
41 day=hour["dayOfWeek"].replace('http://schema.org/', '')[:2],
42 open_time=hour["opens"],
43 close_time=hour["closes"],
44 time_format='%I:%M%p',
45 )
46
47 return opening_hours.as_opening_hours()
48
49 def parse_store(self, response):
50 # The JSON blob has an extra "}\r\n" at the end
51 data = json.loads(response.xpath('//script[@type="application/ld+json"]/text()').extract_first()[:-3])
52
53 properties = {
54 'addr_full': data['address']['streetAddress'],
55 'phone': data['telephone'],
56 'city': data['address']['addressLocality'],
57 'state': data['address']['addressRegion'],
58 'postcode': data['address']['postalCode'],
59 'country': data['address']['addressCountry'],
60 'ref': data['@id'],
61 'website': data['url'],
62 'lat': data['geo']['latitude'],
63 'lon': data['geo']['longitude'],
64 'name': data['name'],
65 }
66
67 hours = self.parse_hours(data.get("openingHoursSpecification", []))
68 if hours:
69 properties["opening_hours"] = hours
70
71 yield GeojsonPointItem(**properties)
72
73 def parse(self, response):
74 response.selector.remove_namespaces()
75 city_urls = response.xpath('//url/loc/text()').extract()
76 for path in city_urls:
77 if path.count('/') == 5:
78 yield scrapy.Request(
79 path.strip(),
80 callback=self.parse_store,
81 )
82
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/locations/spiders/tgifridays.py b/locations/spiders/tgifridays.py
--- a/locations/spiders/tgifridays.py
+++ b/locations/spiders/tgifridays.py
@@ -32,9 +32,9 @@
opening_hours = OpeningHours()
for hour in hours:
- if hour["opens"] == "Closed":
+ if hour["opens"] in ("Closed", ""):
continue
- elif hour["closes"] == "Closed":
+ elif hour["closes"] in ("Closed", ""):
continue
else:
opening_hours.add_range(
| {"golden_diff": "diff --git a/locations/spiders/tgifridays.py b/locations/spiders/tgifridays.py\n--- a/locations/spiders/tgifridays.py\n+++ b/locations/spiders/tgifridays.py\n@@ -32,9 +32,9 @@\n opening_hours = OpeningHours()\n \n for hour in hours:\n- if hour[\"opens\"] == \"Closed\":\n+ if hour[\"opens\"] in (\"Closed\", \"\"):\n continue\n- elif hour[\"closes\"] == \"Closed\":\n+ elif hour[\"closes\"] in (\"Closed\", \"\"):\n continue\n else:\n opening_hours.add_range(\n", "issue": "Spider tgifridays is broken\nDuring the global build at 2021-05-26-14-42-23, spider **tgifridays** failed with **0 features** and **0 errors**.\n\nHere's [the log](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/logs/tgifridays.log) and [the output](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/tgifridays.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/tgifridays.geojson))\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nimport datetime\nimport re\nimport json\n\nimport scrapy\nfrom locations.items import GeojsonPointItem\nfrom locations.hours import OpeningHours\n\n\nDAY_MAPPING = {\n 'Monday': 'Mo',\n 'Tuesday': 'Tu',\n 'Wednesday': 'We',\n 'Thursday': 'Th',\n 'Friday': 'Fr',\n 'Saturday': 'Sa',\n 'Sunday': 'Su'\n}\n\n\nclass TGIFridaySpider(scrapy.Spider):\n download_delay = 0.2\n name = \"tgifridays\"\n item_attributes = { 'brand': \"TGI Friday's\" }\n allowed_domains = [\"tgifridays.com\"]\n start_urls = (\n 'https://locations.tgifridays.com/sitemap.xml',\n )\n\n def parse_hours(self, hours):\n opening_hours = OpeningHours()\n\n for hour in hours:\n if hour[\"opens\"] == \"Closed\":\n continue\n elif hour[\"closes\"] == \"Closed\":\n continue\n else:\n opening_hours.add_range(\n day=hour[\"dayOfWeek\"].replace('http://schema.org/', '')[:2],\n open_time=hour[\"opens\"],\n close_time=hour[\"closes\"],\n time_format='%I:%M%p',\n )\n\n return opening_hours.as_opening_hours()\n\n def parse_store(self, response):\n # The JSON blob has an extra \"}\\r\\n\" at the end\n data = json.loads(response.xpath('//script[@type=\"application/ld+json\"]/text()').extract_first()[:-3])\n\n properties = {\n 'addr_full': data['address']['streetAddress'],\n 'phone': data['telephone'],\n 'city': data['address']['addressLocality'],\n 'state': data['address']['addressRegion'],\n 'postcode': data['address']['postalCode'],\n 'country': data['address']['addressCountry'],\n 'ref': data['@id'],\n 'website': data['url'],\n 'lat': data['geo']['latitude'],\n 'lon': data['geo']['longitude'],\n 'name': data['name'],\n }\n\n hours = self.parse_hours(data.get(\"openingHoursSpecification\", []))\n if hours:\n properties[\"opening_hours\"] = hours\n\n yield GeojsonPointItem(**properties)\n\n def parse(self, response):\n response.selector.remove_namespaces()\n city_urls = response.xpath('//url/loc/text()').extract()\n for path in city_urls:\n if path.count('/') == 5:\n yield scrapy.Request(\n path.strip(),\n callback=self.parse_store,\n )\n", "path": "locations/spiders/tgifridays.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\nimport datetime\nimport re\nimport json\n\nimport scrapy\nfrom locations.items import GeojsonPointItem\nfrom locations.hours import OpeningHours\n\n\nDAY_MAPPING = {\n 'Monday': 'Mo',\n 'Tuesday': 'Tu',\n 'Wednesday': 'We',\n 'Thursday': 'Th',\n 'Friday': 'Fr',\n 'Saturday': 'Sa',\n 'Sunday': 'Su'\n}\n\n\nclass TGIFridaySpider(scrapy.Spider):\n download_delay = 0.2\n name = \"tgifridays\"\n item_attributes = { 'brand': \"TGI Friday's\" }\n allowed_domains = [\"tgifridays.com\"]\n start_urls = (\n 'https://locations.tgifridays.com/sitemap.xml',\n )\n\n def parse_hours(self, hours):\n opening_hours = OpeningHours()\n\n for hour in hours:\n if hour[\"opens\"] in (\"Closed\", \"\"):\n continue\n elif hour[\"closes\"] in (\"Closed\", \"\"):\n continue\n else:\n opening_hours.add_range(\n day=hour[\"dayOfWeek\"].replace('http://schema.org/', '')[:2],\n open_time=hour[\"opens\"],\n close_time=hour[\"closes\"],\n time_format='%I:%M%p',\n )\n\n return opening_hours.as_opening_hours()\n\n def parse_store(self, response):\n # The JSON blob has an extra \"}\\r\\n\" at the end\n data = json.loads(response.xpath('//script[@type=\"application/ld+json\"]/text()').extract_first()[:-3])\n\n properties = {\n 'addr_full': data['address']['streetAddress'],\n 'phone': data['telephone'],\n 'city': data['address']['addressLocality'],\n 'state': data['address']['addressRegion'],\n 'postcode': data['address']['postalCode'],\n 'country': data['address']['addressCountry'],\n 'ref': data['@id'],\n 'website': data['url'],\n 'lat': data['geo']['latitude'],\n 'lon': data['geo']['longitude'],\n 'name': data['name'],\n }\n\n hours = self.parse_hours(data.get(\"openingHoursSpecification\", []))\n if hours:\n properties[\"opening_hours\"] = hours\n\n yield GeojsonPointItem(**properties)\n\n def parse(self, response):\n response.selector.remove_namespaces()\n city_urls = response.xpath('//url/loc/text()').extract()\n for path in city_urls:\n if path.count('/') == 5:\n yield scrapy.Request(\n path.strip(),\n callback=self.parse_store,\n )\n", "path": "locations/spiders/tgifridays.py"}]} | 1,164 | 139 |
gh_patches_debug_30186 | rasdani/github-patches | git_diff | sunpy__sunpy-5968 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
IRIS SJI maps call undefined header in self.wavelength
### Describe the bug
When creating SJI maps by feeding data and header separately into a Map() and then doing a plot, this causes an error because it calls an undefined header.
### To Reproduce
import glob
from [astropy.io](http://astropy.io/) import fits
from [sunpy.map](http://sunpy.map/) import Map
data_file = glob.glob('IRIS/*_SJI_2832_*fits')
data_file.sort()
hdul = [fits.open](http://fits.open/)(data_file[0])
header = hdul[0].header
data = hdul[0].data
Map(data[0], header).plot()
### What happened?
---------------------------------------------------------------------------
NameError Traceback (most recent call last)
/tmp/ipykernel_73554/1651218312.py in <module>
7 data = hdul[0].data
8
----> 9 Map(data[0], header).plot()
~/SunEnvironment/lib64/python3.8/site-packages/astropy/units/decorators.py in wrapper(*func_args, **func_kwargs)
251 # Call the original function with any equivalencies in force.
252 with add_enabled_equivalencies(self.equivalencies):
--> 253 return_ = wrapped_function(*func_args, **func_kwargs)
254
255 valid_empty = (inspect.Signature.empty, None)
~/SunEnvironment/lib64/python3.8/site-packages/sunpy/map/mapbase.py in plot(self, annotate, axes, title, autoalign, clip_interval, **imshow_kwargs)
2406 plot_settings_title = plot_settings.pop('title')
2407 else:
-> 2408 plot_settings_title = self.latex_name
2409
2410 # Anything left in plot_settings is given to imshow
~/SunEnvironment/lib64/python3.8/site-packages/sunpy/map/mapbase.py in latex_name(self)
735 def latex_name(self):
736 """LaTeX formatted description of the Map."""
--> 737 if isinstance(self.measurement, u.Quantity):
738 return self._base_name().format(measurement=self.measurement._repr_latex_())
739 else:
~/SunEnvironment/lib64/python3.8/site-packages/sunpy/map/mapbase.py in measurement(self)
898 defaults to dimensionless units.
899 """
--> 900 return self.wavelength
901
902 @property
~/SunEnvironment/lib64/python3.8/site-packages/sunpy/map/sources/iris.py in wavelength(self)
61 Taken from WAVELNTH, or if not present TWAVE1.
62 """
---> 63 return header.get('wavelnth', header.get('twave1')) * self.waveunit
64
65 @classmethod
NameError: name 'header' is not defined
### Expected behavior
_No response_
### Screenshots

### System Details
sunpy.__version__ : 3.1.3
astropy.__version__: 4.3.1
### Installation method
pip
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sunpy/map/sources/iris.py`
Content:
```
1
2 import astropy.units as u
3
4 from sunpy.map.mapbase import GenericMap, SpatialPair
5
6 __all__ = ['SJIMap']
7
8
9 class SJIMap(GenericMap):
10 """
11 A 2D IRIS Slit Jaw Imager Map.
12
13 The Interface Region Imaging Spectrograph (IRIS) small explorer spacecraft
14 provides simultaneous spectra and images of the photosphere, chromosphere,
15 transition region, and corona with 0.33 to 0.4 arcsec spatial resolution,
16 2-second temporal resolution and 1 km/s velocity resolution over a
17 field-of- view of up to 175 arcsec by 175 arcsec. IRIS consists of a 19-cm
18 UV telescope that feeds a slit-based dual-bandpass imaging spectrograph.
19
20 Slit-jaw images in four different passbands (C ii 1330, Si iv 1400,
21 Mg ii k 2796 and Mg ii wing 2830 A) can be taken simultaneously with
22 spectral rasters that sample regions up to 130 arcsec by 175 arcsec at a
23 variety of spatial samplings (from 0.33 arcsec and up).
24 IRIS is sensitive to emission from plasma at temperatures between
25 5000 K and 10 MK.
26
27 IRIS was launched into a Sun-synchronous orbit on 27 June 2013.
28
29 .. warning::
30
31 This object can only handle level 1 SJI files.
32
33 References
34 ----------
35 * `IRIS Mission Page <https://iris.lmsal.com>`_
36 * `IRIS Analysis Guide <https://iris.lmsal.com/itn26/itn26.pdf>`_
37 * `IRIS Instrument Paper <https://doi.org/10.1007/s11207-014-0485-y>`_
38 """
39 @property
40 def detector(self):
41 return "SJI"
42
43 @property
44 def spatial_units(self):
45 """
46 If not present in CUNIT{1,2} keywords, defaults to arcsec.
47 """
48 return SpatialPair(u.Unit(self.meta.get('cunit1', 'arcsec')),
49 u.Unit(self.meta.get('cunit2', 'arcsec')))
50
51 @property
52 def waveunit(self):
53 """
54 Taken from WAVEUNIT, or if not present defaults to Angstrom.
55 """
56 return u.Unit(header.get('waveunit', "Angstrom"))
57
58 @property
59 def wavelength(self):
60 """
61 Taken from WAVELNTH, or if not present TWAVE1.
62 """
63 return header.get('wavelnth', header.get('twave1')) * self.waveunit
64
65 @classmethod
66 def is_datasource_for(cls, data, header, **kwargs):
67 """Determines if header corresponds to an IRIS SJI image"""
68 tele = str(header.get('TELESCOP', '')).startswith('IRIS')
69 obs = str(header.get('INSTRUME', '')).startswith('SJI')
70 level = header.get('lvl_num') == 1
71 return tele and obs
72
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/sunpy/map/sources/iris.py b/sunpy/map/sources/iris.py
--- a/sunpy/map/sources/iris.py
+++ b/sunpy/map/sources/iris.py
@@ -26,10 +26,6 @@
IRIS was launched into a Sun-synchronous orbit on 27 June 2013.
- .. warning::
-
- This object can only handle level 1 SJI files.
-
References
----------
* `IRIS Mission Page <https://iris.lmsal.com>`_
@@ -53,19 +49,27 @@
"""
Taken from WAVEUNIT, or if not present defaults to Angstrom.
"""
- return u.Unit(header.get('waveunit', "Angstrom"))
+ return u.Unit(self.meta.get('waveunit', "Angstrom"))
@property
def wavelength(self):
"""
Taken from WAVELNTH, or if not present TWAVE1.
"""
- return header.get('wavelnth', header.get('twave1')) * self.waveunit
+ return self.meta.get('wavelnth', self.meta.get('twave1')) * self.waveunit
+
+ @property
+ def unit(self):
+ unit_str = self.meta.get('bunit', None)
+ if unit_str is None:
+ return
+ # Remove "corrected" so that the unit can be parsed
+ unit_str = unit_str.lower().replace('corrected', '').strip()
+ return self._parse_fits_unit(unit_str)
@classmethod
def is_datasource_for(cls, data, header, **kwargs):
"""Determines if header corresponds to an IRIS SJI image"""
tele = str(header.get('TELESCOP', '')).startswith('IRIS')
obs = str(header.get('INSTRUME', '')).startswith('SJI')
- level = header.get('lvl_num') == 1
return tele and obs
| {"golden_diff": "diff --git a/sunpy/map/sources/iris.py b/sunpy/map/sources/iris.py\n--- a/sunpy/map/sources/iris.py\n+++ b/sunpy/map/sources/iris.py\n@@ -26,10 +26,6 @@\n \n IRIS was launched into a Sun-synchronous orbit on 27 June 2013.\n \n- .. warning::\n-\n- This object can only handle level 1 SJI files.\n-\n References\n ----------\n * `IRIS Mission Page <https://iris.lmsal.com>`_\n@@ -53,19 +49,27 @@\n \"\"\"\n Taken from WAVEUNIT, or if not present defaults to Angstrom.\n \"\"\"\n- return u.Unit(header.get('waveunit', \"Angstrom\"))\n+ return u.Unit(self.meta.get('waveunit', \"Angstrom\"))\n \n @property\n def wavelength(self):\n \"\"\"\n Taken from WAVELNTH, or if not present TWAVE1.\n \"\"\"\n- return header.get('wavelnth', header.get('twave1')) * self.waveunit\n+ return self.meta.get('wavelnth', self.meta.get('twave1')) * self.waveunit\n+\n+ @property\n+ def unit(self):\n+ unit_str = self.meta.get('bunit', None)\n+ if unit_str is None:\n+ return\n+ # Remove \"corrected\" so that the unit can be parsed\n+ unit_str = unit_str.lower().replace('corrected', '').strip()\n+ return self._parse_fits_unit(unit_str)\n \n @classmethod\n def is_datasource_for(cls, data, header, **kwargs):\n \"\"\"Determines if header corresponds to an IRIS SJI image\"\"\"\n tele = str(header.get('TELESCOP', '')).startswith('IRIS')\n obs = str(header.get('INSTRUME', '')).startswith('SJI')\n- level = header.get('lvl_num') == 1\n return tele and obs\n", "issue": "IRIS SJI maps call undefined header in self.wavelength\n### Describe the bug\n\nWhen creating SJI maps by feeding data and header separately into a Map() and then doing a plot, this causes an error because it calls an undefined header.\n\n### To Reproduce\n\nimport glob\r\nfrom [astropy.io](http://astropy.io/) import fits\r\nfrom [sunpy.map](http://sunpy.map/) import Map\r\n\r\ndata_file = glob.glob('IRIS/*_SJI_2832_*fits')\r\ndata_file.sort()\r\n\r\nhdul = [fits.open](http://fits.open/)(data_file[0])\r\n \r\nheader = hdul[0].header\r\ndata = hdul[0].data\r\n\r\nMap(data[0], header).plot()\n\n### What happened?\n\n---------------------------------------------------------------------------\r\nNameError Traceback (most recent call last)\r\n/tmp/ipykernel_73554/1651218312.py in <module>\r\n 7 data = hdul[0].data\r\n 8 \r\n----> 9 Map(data[0], header).plot()\r\n\r\n~/SunEnvironment/lib64/python3.8/site-packages/astropy/units/decorators.py in wrapper(*func_args, **func_kwargs)\r\n 251 # Call the original function with any equivalencies in force.\r\n 252 with add_enabled_equivalencies(self.equivalencies):\r\n--> 253 return_ = wrapped_function(*func_args, **func_kwargs)\r\n 254 \r\n 255 valid_empty = (inspect.Signature.empty, None)\r\n\r\n~/SunEnvironment/lib64/python3.8/site-packages/sunpy/map/mapbase.py in plot(self, annotate, axes, title, autoalign, clip_interval, **imshow_kwargs)\r\n 2406 plot_settings_title = plot_settings.pop('title')\r\n 2407 else:\r\n-> 2408 plot_settings_title = self.latex_name\r\n 2409 \r\n 2410 # Anything left in plot_settings is given to imshow\r\n\r\n~/SunEnvironment/lib64/python3.8/site-packages/sunpy/map/mapbase.py in latex_name(self)\r\n 735 def latex_name(self):\r\n 736 \"\"\"LaTeX formatted description of the Map.\"\"\"\r\n--> 737 if isinstance(self.measurement, u.Quantity):\r\n 738 return self._base_name().format(measurement=self.measurement._repr_latex_())\r\n 739 else:\r\n\r\n~/SunEnvironment/lib64/python3.8/site-packages/sunpy/map/mapbase.py in measurement(self)\r\n 898 defaults to dimensionless units.\r\n 899 \"\"\"\r\n--> 900 return self.wavelength\r\n 901 \r\n 902 @property\r\n\r\n~/SunEnvironment/lib64/python3.8/site-packages/sunpy/map/sources/iris.py in wavelength(self)\r\n 61 Taken from WAVELNTH, or if not present TWAVE1.\r\n 62 \"\"\"\r\n---> 63 return header.get('wavelnth', header.get('twave1')) * self.waveunit\r\n 64 \r\n 65 @classmethod\r\n\r\nNameError: name 'header' is not defined\r\n\n\n### Expected behavior\n\n_No response_\n\n### Screenshots\n\n\r\n\n\n### System Details\n\nsunpy.__version__ : 3.1.3\r\nastropy.__version__: 4.3.1\n\n### Installation method\n\npip\n", "before_files": [{"content": "\nimport astropy.units as u\n\nfrom sunpy.map.mapbase import GenericMap, SpatialPair\n\n__all__ = ['SJIMap']\n\n\nclass SJIMap(GenericMap):\n \"\"\"\n A 2D IRIS Slit Jaw Imager Map.\n\n The Interface Region Imaging Spectrograph (IRIS) small explorer spacecraft\n provides simultaneous spectra and images of the photosphere, chromosphere,\n transition region, and corona with 0.33 to 0.4 arcsec spatial resolution,\n 2-second temporal resolution and 1 km/s velocity resolution over a\n field-of- view of up to 175 arcsec by 175 arcsec. IRIS consists of a 19-cm\n UV telescope that feeds a slit-based dual-bandpass imaging spectrograph.\n\n Slit-jaw images in four different passbands (C ii 1330, Si iv 1400,\n Mg ii k 2796 and Mg ii wing 2830 A) can be taken simultaneously with\n spectral rasters that sample regions up to 130 arcsec by 175 arcsec at a\n variety of spatial samplings (from 0.33 arcsec and up).\n IRIS is sensitive to emission from plasma at temperatures between\n 5000 K and 10 MK.\n\n IRIS was launched into a Sun-synchronous orbit on 27 June 2013.\n\n .. warning::\n\n This object can only handle level 1 SJI files.\n\n References\n ----------\n * `IRIS Mission Page <https://iris.lmsal.com>`_\n * `IRIS Analysis Guide <https://iris.lmsal.com/itn26/itn26.pdf>`_\n * `IRIS Instrument Paper <https://doi.org/10.1007/s11207-014-0485-y>`_\n \"\"\"\n @property\n def detector(self):\n return \"SJI\"\n\n @property\n def spatial_units(self):\n \"\"\"\n If not present in CUNIT{1,2} keywords, defaults to arcsec.\n \"\"\"\n return SpatialPair(u.Unit(self.meta.get('cunit1', 'arcsec')),\n u.Unit(self.meta.get('cunit2', 'arcsec')))\n\n @property\n def waveunit(self):\n \"\"\"\n Taken from WAVEUNIT, or if not present defaults to Angstrom.\n \"\"\"\n return u.Unit(header.get('waveunit', \"Angstrom\"))\n\n @property\n def wavelength(self):\n \"\"\"\n Taken from WAVELNTH, or if not present TWAVE1.\n \"\"\"\n return header.get('wavelnth', header.get('twave1')) * self.waveunit\n\n @classmethod\n def is_datasource_for(cls, data, header, **kwargs):\n \"\"\"Determines if header corresponds to an IRIS SJI image\"\"\"\n tele = str(header.get('TELESCOP', '')).startswith('IRIS')\n obs = str(header.get('INSTRUME', '')).startswith('SJI')\n level = header.get('lvl_num') == 1\n return tele and obs\n", "path": "sunpy/map/sources/iris.py"}], "after_files": [{"content": "\nimport astropy.units as u\n\nfrom sunpy.map.mapbase import GenericMap, SpatialPair\n\n__all__ = ['SJIMap']\n\n\nclass SJIMap(GenericMap):\n \"\"\"\n A 2D IRIS Slit Jaw Imager Map.\n\n The Interface Region Imaging Spectrograph (IRIS) small explorer spacecraft\n provides simultaneous spectra and images of the photosphere, chromosphere,\n transition region, and corona with 0.33 to 0.4 arcsec spatial resolution,\n 2-second temporal resolution and 1 km/s velocity resolution over a\n field-of- view of up to 175 arcsec by 175 arcsec. IRIS consists of a 19-cm\n UV telescope that feeds a slit-based dual-bandpass imaging spectrograph.\n\n Slit-jaw images in four different passbands (C ii 1330, Si iv 1400,\n Mg ii k 2796 and Mg ii wing 2830 A) can be taken simultaneously with\n spectral rasters that sample regions up to 130 arcsec by 175 arcsec at a\n variety of spatial samplings (from 0.33 arcsec and up).\n IRIS is sensitive to emission from plasma at temperatures between\n 5000 K and 10 MK.\n\n IRIS was launched into a Sun-synchronous orbit on 27 June 2013.\n\n References\n ----------\n * `IRIS Mission Page <https://iris.lmsal.com>`_\n * `IRIS Analysis Guide <https://iris.lmsal.com/itn26/itn26.pdf>`_\n * `IRIS Instrument Paper <https://doi.org/10.1007/s11207-014-0485-y>`_\n \"\"\"\n @property\n def detector(self):\n return \"SJI\"\n\n @property\n def spatial_units(self):\n \"\"\"\n If not present in CUNIT{1,2} keywords, defaults to arcsec.\n \"\"\"\n return SpatialPair(u.Unit(self.meta.get('cunit1', 'arcsec')),\n u.Unit(self.meta.get('cunit2', 'arcsec')))\n\n @property\n def waveunit(self):\n \"\"\"\n Taken from WAVEUNIT, or if not present defaults to Angstrom.\n \"\"\"\n return u.Unit(self.meta.get('waveunit', \"Angstrom\"))\n\n @property\n def wavelength(self):\n \"\"\"\n Taken from WAVELNTH, or if not present TWAVE1.\n \"\"\"\n return self.meta.get('wavelnth', self.meta.get('twave1')) * self.waveunit\n\n @property\n def unit(self):\n unit_str = self.meta.get('bunit', None)\n if unit_str is None:\n return\n # Remove \"corrected\" so that the unit can be parsed\n unit_str = unit_str.lower().replace('corrected', '').strip()\n return self._parse_fits_unit(unit_str)\n\n @classmethod\n def is_datasource_for(cls, data, header, **kwargs):\n \"\"\"Determines if header corresponds to an IRIS SJI image\"\"\"\n tele = str(header.get('TELESCOP', '')).startswith('IRIS')\n obs = str(header.get('INSTRUME', '')).startswith('SJI')\n return tele and obs\n", "path": "sunpy/map/sources/iris.py"}]} | 1,923 | 442 |
gh_patches_debug_20993 | rasdani/github-patches | git_diff | dask__distributed-779 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
distributed-1.15.0rc1 seems wrongly requiring "futures" from a Python-3.6 installation
Collecting futures (from distributed>=1.14; extra == "complete"->dask[complete]->-r C:\Winpython\basedir36
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2
3 import os
4 from setuptools import setup
5 import sys
6 import versioneer
7
8 requires = open('requirements.txt').read().strip().split('\n')
9
10 setup(name='distributed',
11 version=versioneer.get_version(),
12 cmdclass=versioneer.get_cmdclass(),
13 description='Distributed computing',
14 url='https://distributed.readthedocs.io/en/latest/',
15 maintainer='Matthew Rocklin',
16 maintainer_email='[email protected]',
17 license='BSD',
18 package_data={ '': ['templates/index.html'], },
19 include_package_data=True,
20 install_requires=requires,
21 packages=['distributed',
22 'distributed.bokeh',
23 'distributed.bokeh.background',
24 'distributed.bokeh.status',
25 'distributed.bokeh.tasks',
26 'distributed.bokeh.workers',
27 'distributed.cli',
28 'distributed.deploy',
29 'distributed.diagnostics',
30 'distributed.protocol',
31 'distributed.http'],
32 long_description=(open('README.md').read() if os.path.exists('README.md')
33 else ''),
34 entry_points='''
35 [console_scripts]
36 dask-ssh=distributed.cli.dask_ssh:go
37 dask-submit=distributed.cli.dask_submit:go
38 dask-remote=distributed.cli.dask_remote:go
39 dask-scheduler=distributed.cli.dask_scheduler:go
40 dask-worker=distributed.cli.dask_worker:go
41 ''',
42 zip_safe=False)
43
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -6,6 +6,18 @@
import versioneer
requires = open('requirements.txt').read().strip().split('\n')
+install_requires = []
+extras_require = {}
+for r in requires:
+ if ';' in r:
+ # requirements.txt conditional dependencies need to be reformatted for wheels
+ # to the form: `'[extra_name]:condition' : ['requirements']`
+ req, cond = r.split(';', 1)
+ cond = ':' + cond
+ cond_reqs = extras_require.setdefault(cond, [])
+ cond_reqs.append(req)
+ else:
+ install_requires.append(r)
setup(name='distributed',
version=versioneer.get_version(),
@@ -17,7 +29,8 @@
license='BSD',
package_data={ '': ['templates/index.html'], },
include_package_data=True,
- install_requires=requires,
+ install_requires=install_requires,
+ extras_require=extras_require,
packages=['distributed',
'distributed.bokeh',
'distributed.bokeh.background',
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -6,6 +6,18 @@\n import versioneer\n \n requires = open('requirements.txt').read().strip().split('\\n')\n+install_requires = []\n+extras_require = {}\n+for r in requires:\n+ if ';' in r:\n+ # requirements.txt conditional dependencies need to be reformatted for wheels\n+ # to the form: `'[extra_name]:condition' : ['requirements']`\n+ req, cond = r.split(';', 1)\n+ cond = ':' + cond\n+ cond_reqs = extras_require.setdefault(cond, [])\n+ cond_reqs.append(req)\n+ else:\n+ install_requires.append(r)\n \n setup(name='distributed',\n version=versioneer.get_version(),\n@@ -17,7 +29,8 @@\n license='BSD',\n package_data={ '': ['templates/index.html'], },\n include_package_data=True,\n- install_requires=requires,\n+ install_requires=install_requires,\n+ extras_require=extras_require,\n packages=['distributed',\n 'distributed.bokeh',\n 'distributed.bokeh.background',\n", "issue": "distributed-1.15.0rc1 seems wrongly requiring \"futures\" from a Python-3.6 installation\nCollecting futures (from distributed>=1.14; extra == \"complete\"->dask[complete]->-r C:\\Winpython\\basedir36\n", "before_files": [{"content": "#!/usr/bin/env python\n\nimport os\nfrom setuptools import setup\nimport sys\nimport versioneer\n\nrequires = open('requirements.txt').read().strip().split('\\n')\n\nsetup(name='distributed',\n version=versioneer.get_version(),\n cmdclass=versioneer.get_cmdclass(),\n description='Distributed computing',\n url='https://distributed.readthedocs.io/en/latest/',\n maintainer='Matthew Rocklin',\n maintainer_email='[email protected]',\n license='BSD',\n package_data={ '': ['templates/index.html'], },\n include_package_data=True,\n install_requires=requires,\n packages=['distributed',\n 'distributed.bokeh',\n 'distributed.bokeh.background',\n 'distributed.bokeh.status',\n 'distributed.bokeh.tasks',\n 'distributed.bokeh.workers',\n 'distributed.cli',\n 'distributed.deploy',\n 'distributed.diagnostics',\n 'distributed.protocol',\n 'distributed.http'],\n long_description=(open('README.md').read() if os.path.exists('README.md')\n else ''),\n entry_points='''\n [console_scripts]\n dask-ssh=distributed.cli.dask_ssh:go\n dask-submit=distributed.cli.dask_submit:go\n dask-remote=distributed.cli.dask_remote:go\n dask-scheduler=distributed.cli.dask_scheduler:go\n dask-worker=distributed.cli.dask_worker:go\n ''',\n zip_safe=False)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\nimport os\nfrom setuptools import setup\nimport sys\nimport versioneer\n\nrequires = open('requirements.txt').read().strip().split('\\n')\ninstall_requires = []\nextras_require = {}\nfor r in requires:\n if ';' in r:\n # requirements.txt conditional dependencies need to be reformatted for wheels\n # to the form: `'[extra_name]:condition' : ['requirements']`\n req, cond = r.split(';', 1)\n cond = ':' + cond\n cond_reqs = extras_require.setdefault(cond, [])\n cond_reqs.append(req)\n else:\n install_requires.append(r)\n\nsetup(name='distributed',\n version=versioneer.get_version(),\n cmdclass=versioneer.get_cmdclass(),\n description='Distributed computing',\n url='https://distributed.readthedocs.io/en/latest/',\n maintainer='Matthew Rocklin',\n maintainer_email='[email protected]',\n license='BSD',\n package_data={ '': ['templates/index.html'], },\n include_package_data=True,\n install_requires=install_requires,\n extras_require=extras_require,\n packages=['distributed',\n 'distributed.bokeh',\n 'distributed.bokeh.background',\n 'distributed.bokeh.status',\n 'distributed.bokeh.tasks',\n 'distributed.bokeh.workers',\n 'distributed.cli',\n 'distributed.deploy',\n 'distributed.diagnostics',\n 'distributed.protocol',\n 'distributed.http'],\n long_description=(open('README.md').read() if os.path.exists('README.md')\n else ''),\n entry_points='''\n [console_scripts]\n dask-ssh=distributed.cli.dask_ssh:go\n dask-submit=distributed.cli.dask_submit:go\n dask-remote=distributed.cli.dask_remote:go\n dask-scheduler=distributed.cli.dask_scheduler:go\n dask-worker=distributed.cli.dask_worker:go\n ''',\n zip_safe=False)\n", "path": "setup.py"}]} | 699 | 249 |
gh_patches_debug_13258 | rasdani/github-patches | git_diff | lutris__lutris-2955 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
PCSX2 Runner: Add config path/file options (Feature request)
It would be nice to have a way to specify a config file as for example some games run better with a multi-threaded microVU than others. It would also enable to have different window sizes set for those seeking square pixels as some NTSC games run at 640x448 and others use 512x448. Same goes for PAL region games. :slightly_smiling_face:
The command line is: `PCSX2 --cfg=<str>`
I'm absolutely fine if you put it on low priority as probably only few people use PCSX2 anyways. :wink:
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lutris/runners/pcsx2.py`
Content:
```
1 # Standard Library
2 from gettext import gettext as _
3
4 # Lutris Modules
5 from lutris.runners.runner import Runner
6 from lutris.util import system
7
8
9 class pcsx2(Runner):
10 human_name = _("PCSX2")
11 description = _("PlayStation 2 emulator")
12 platforms = [_("Sony PlayStation 2")]
13 runnable_alone = True
14 runner_executable = "pcsx2/PCSX2"
15 game_options = [{
16 "option": "main_file",
17 "type": "file",
18 "label": _("ISO file"),
19 "default_path": "game_path",
20 }]
21
22 runner_options = [
23 {
24 "option": "fullscreen",
25 "type": "bool",
26 "label": _("Fullscreen"),
27 "default": False,
28 },
29 {
30 "option": "full_boot",
31 "type": "bool",
32 "label": _("Fullboot"),
33 "default": False
34 },
35 {
36 "option": "nogui",
37 "type": "bool",
38 "label": _("No GUI"),
39 "default": False
40 },
41 {
42 "option": "config_file",
43 "type": "file",
44 "label": _("Custom config file"),
45 "advanced": True,
46 },
47 {
48 "option": "config_path",
49 "type": "directory_chooser",
50 "label": _("Custom config path"),
51 "advanced": True,
52 },
53 ]
54
55 def play(self):
56 arguments = [self.get_executable()]
57
58 if self.runner_config.get("fullscreen"):
59 arguments.append("--fullscreen")
60 if self.runner_config.get("full_boot"):
61 arguments.append("--fullboot")
62 if self.runner_config.get("nogui"):
63 arguments.append("--nogui")
64 if self.runner_config.get("config_file"):
65 arguments.append("--cfg=%s", self.runner_config["config_file"])
66 if self.runner_config.get("config_path"):
67 arguments.append("--cfgpath=%s", self.runner_config["config_path"])
68
69 iso = self.game_config.get("main_file") or ""
70 if not system.path_exists(iso):
71 return {"error": "FILE_NOT_FOUND", "file": iso}
72 arguments.append(iso)
73 return {"command": arguments}
74
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/lutris/runners/pcsx2.py b/lutris/runners/pcsx2.py
--- a/lutris/runners/pcsx2.py
+++ b/lutris/runners/pcsx2.py
@@ -62,9 +62,9 @@
if self.runner_config.get("nogui"):
arguments.append("--nogui")
if self.runner_config.get("config_file"):
- arguments.append("--cfg=%s", self.runner_config["config_file"])
+ arguments.append("--cfg={}".format(self.runner_config["config_file"]))
if self.runner_config.get("config_path"):
- arguments.append("--cfgpath=%s", self.runner_config["config_path"])
+ arguments.append("--cfgpath={}".format(self.runner_config["config_path"]))
iso = self.game_config.get("main_file") or ""
if not system.path_exists(iso):
| {"golden_diff": "diff --git a/lutris/runners/pcsx2.py b/lutris/runners/pcsx2.py\n--- a/lutris/runners/pcsx2.py\n+++ b/lutris/runners/pcsx2.py\n@@ -62,9 +62,9 @@\n if self.runner_config.get(\"nogui\"):\n arguments.append(\"--nogui\")\n if self.runner_config.get(\"config_file\"):\n- arguments.append(\"--cfg=%s\", self.runner_config[\"config_file\"])\n+ arguments.append(\"--cfg={}\".format(self.runner_config[\"config_file\"]))\n if self.runner_config.get(\"config_path\"):\n- arguments.append(\"--cfgpath=%s\", self.runner_config[\"config_path\"])\n+ arguments.append(\"--cfgpath={}\".format(self.runner_config[\"config_path\"]))\n \n iso = self.game_config.get(\"main_file\") or \"\"\n if not system.path_exists(iso):\n", "issue": "PCSX2 Runner: Add config path/file options (Feature request)\nIt would be nice to have a way to specify a config file as for example some games run better with a multi-threaded microVU than others. It would also enable to have different window sizes set for those seeking square pixels as some NTSC games run at 640x448 and others use 512x448. Same goes for PAL region games. :slightly_smiling_face: \r\n\r\nThe command line is: `PCSX2 --cfg=<str>`\r\n\r\nI'm absolutely fine if you put it on low priority as probably only few people use PCSX2 anyways. :wink: \n", "before_files": [{"content": "# Standard Library\nfrom gettext import gettext as _\n\n# Lutris Modules\nfrom lutris.runners.runner import Runner\nfrom lutris.util import system\n\n\nclass pcsx2(Runner):\n human_name = _(\"PCSX2\")\n description = _(\"PlayStation 2 emulator\")\n platforms = [_(\"Sony PlayStation 2\")]\n runnable_alone = True\n runner_executable = \"pcsx2/PCSX2\"\n game_options = [{\n \"option\": \"main_file\",\n \"type\": \"file\",\n \"label\": _(\"ISO file\"),\n \"default_path\": \"game_path\",\n }]\n\n runner_options = [\n {\n \"option\": \"fullscreen\",\n \"type\": \"bool\",\n \"label\": _(\"Fullscreen\"),\n \"default\": False,\n },\n {\n \"option\": \"full_boot\",\n \"type\": \"bool\",\n \"label\": _(\"Fullboot\"),\n \"default\": False\n },\n {\n \"option\": \"nogui\",\n \"type\": \"bool\",\n \"label\": _(\"No GUI\"),\n \"default\": False\n },\n {\n \"option\": \"config_file\",\n \"type\": \"file\",\n \"label\": _(\"Custom config file\"),\n \"advanced\": True,\n },\n {\n \"option\": \"config_path\",\n \"type\": \"directory_chooser\",\n \"label\": _(\"Custom config path\"),\n \"advanced\": True,\n },\n ]\n\n def play(self):\n arguments = [self.get_executable()]\n\n if self.runner_config.get(\"fullscreen\"):\n arguments.append(\"--fullscreen\")\n if self.runner_config.get(\"full_boot\"):\n arguments.append(\"--fullboot\")\n if self.runner_config.get(\"nogui\"):\n arguments.append(\"--nogui\")\n if self.runner_config.get(\"config_file\"):\n arguments.append(\"--cfg=%s\", self.runner_config[\"config_file\"])\n if self.runner_config.get(\"config_path\"):\n arguments.append(\"--cfgpath=%s\", self.runner_config[\"config_path\"])\n\n iso = self.game_config.get(\"main_file\") or \"\"\n if not system.path_exists(iso):\n return {\"error\": \"FILE_NOT_FOUND\", \"file\": iso}\n arguments.append(iso)\n return {\"command\": arguments}\n", "path": "lutris/runners/pcsx2.py"}], "after_files": [{"content": "# Standard Library\nfrom gettext import gettext as _\n\n# Lutris Modules\nfrom lutris.runners.runner import Runner\nfrom lutris.util import system\n\n\nclass pcsx2(Runner):\n human_name = _(\"PCSX2\")\n description = _(\"PlayStation 2 emulator\")\n platforms = [_(\"Sony PlayStation 2\")]\n runnable_alone = True\n runner_executable = \"pcsx2/PCSX2\"\n game_options = [{\n \"option\": \"main_file\",\n \"type\": \"file\",\n \"label\": _(\"ISO file\"),\n \"default_path\": \"game_path\",\n }]\n\n runner_options = [\n {\n \"option\": \"fullscreen\",\n \"type\": \"bool\",\n \"label\": _(\"Fullscreen\"),\n \"default\": False,\n },\n {\n \"option\": \"full_boot\",\n \"type\": \"bool\",\n \"label\": _(\"Fullboot\"),\n \"default\": False\n },\n {\n \"option\": \"nogui\",\n \"type\": \"bool\",\n \"label\": _(\"No GUI\"),\n \"default\": False\n },\n {\n \"option\": \"config_file\",\n \"type\": \"file\",\n \"label\": _(\"Custom config file\"),\n \"advanced\": True,\n },\n {\n \"option\": \"config_path\",\n \"type\": \"directory_chooser\",\n \"label\": _(\"Custom config path\"),\n \"advanced\": True,\n },\n ]\n\n def play(self):\n arguments = [self.get_executable()]\n\n if self.runner_config.get(\"fullscreen\"):\n arguments.append(\"--fullscreen\")\n if self.runner_config.get(\"full_boot\"):\n arguments.append(\"--fullboot\")\n if self.runner_config.get(\"nogui\"):\n arguments.append(\"--nogui\")\n if self.runner_config.get(\"config_file\"):\n arguments.append(\"--cfg={}\".format(self.runner_config[\"config_file\"]))\n if self.runner_config.get(\"config_path\"):\n arguments.append(\"--cfgpath={}\".format(self.runner_config[\"config_path\"]))\n\n iso = self.game_config.get(\"main_file\") or \"\"\n if not system.path_exists(iso):\n return {\"error\": \"FILE_NOT_FOUND\", \"file\": iso}\n arguments.append(iso)\n return {\"command\": arguments}\n", "path": "lutris/runners/pcsx2.py"}]} | 1,018 | 190 |
gh_patches_debug_2941 | rasdani/github-patches | git_diff | learningequality__kolibri-10078 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Kolibri 0.16 - Resources of type HTML5 and exercises are not displayed
## Observed behavior
This is a follow up to https://github.com/learningequality/kolibri/pull/9724#issuecomment-1408889097
In the latest develop build both exercises and html resources are not being displayed when a user is navigating through the Library.
## Expected behavior
It should be possible to preview the resource.
## Steps to reproduce the issue
1. Install the the following [0. 16 build ](https://buildkite.com/learningequality/kolibri-debian/builds/5813#018603a8-a7d9-4c79-98d0-e2a0db6a7c69) and import the QA channel.
2. Go to Library > QA Channel
3. Click on any resource withing the HTML5 folder or the Exercises folder
## Videos
HTML5:
https://user-images.githubusercontent.com/79847249/215529161-a0e88738-b221-416a-beea-cf0c6192450f.mp4
EXERCISES:
https://user-images.githubusercontent.com/79847249/215529190-28ecdf59-db72-4b3a-a6df-2c72ab2f395c.mp4
## Console error
```
pluginMediator.js:122 Kolibri Modules: kolibri.plugins.learn.app registered
pluginMediator.js:122 Kolibri Modules: kolibri.plugins.media_player.main registered
pluginMediator.js:122 Kolibri Modules: kolibri.plugins.pdf_viewer.main registered
pluginMediator.js:122 Kolibri Modules: kolibri.plugins.epub_viewer.main registered
pluginMediator.js:122 Kolibri Modules: kolibri.plugins.html5_viewer.main registered
vue.runtime.esm.js:5753 GET http://127.0.0.1:51957/content/static/hashi/hashi-0efeb19f7e4ded20c73f.html 404 (Not Found)
insertBefore @ vue.runtime.esm.js:5753
insert @ vue.runtime.esm.js:6083
(anonymous) @ vue.runtime.esm.js:6030
createElm @ vue.runtime.esm.js:5969
(anonymous) @ vue.runtime.esm.js:6560
Vue._update @ vue.runtime.esm.js:3963
updateComponent @ vue.runtime.esm.js:4081
Watcher.get @ vue.runtime.esm.js:4495
Watcher.run @ vue.runtime.esm.js:4570
flushSchedulerQueue @ vue.runtime.esm.js:4326
(anonymous) @ vue.runtime.esm.js:1989
flushCallbacks @ vue.runtime.esm.js:1915
Promise.then (async)
timerFunc @ vue.runtime.esm.js:1942
nextTick @ vue.runtime.esm.js:1999
(anonymous) @ vue.runtime.esm.js:4418
Watcher.update @ vue.runtime.esm.js:4560
Vue.$forceUpdate @ vue.runtime.esm.js:3984
forceRender @ vue.runtime.esm.js:3668
(anonymous) @ vue.runtime.esm.js:3690
(anonymous) @ vue.runtime.esm.js:336
vue.runtime.esm.js:5753 GET http://127.0.0.1:51957/content/static/hashi/hashi-0efeb19f7e4ded20c73f.html 404 (Not Found)
insertBefore @ vue.runtime.esm.js:5753
insert @ vue.runtime.esm.js:6083
(anonymous) @ vue.runtime.esm.js:6030
createElm @ vue.runtime.esm.js:5969
(anonymous) @ vue.runtime.esm.js:6260
patchVnode @ vue.runtime.esm.js:6363
(anonymous) @ vue.runtime.esm.js:6526
Vue._update @ vue.runtime.esm.js:3963
updateComponent @ vue.runtime.esm.js:4081
Watcher.get @ vue.runtime.esm.js:4495
Watcher.run @ vue.runtime.esm.js:4570
flushSchedulerQueue @ vue.runtime.esm.js:4326
(anonymous) @ vue.runtime.esm.js:1989
flushCallbacks @ vue.runtime.esm.js:1915
Promise.then (async)
timerFunc @ vue.runtime.esm.js:1942
nextTick @ vue.runtime.esm.js:1999
(anonymous) @ vue.runtime.esm.js:4418
Watcher.update @ vue.runtime.esm.js:4560
Dep.notify @ vue.runtime.esm.js:730
set @ vue.runtime.esm.js:1055
sharedPropertyDefinition.set @ vue.runtime.esm.js:4644
(anonymous) @ ContentPage.vue:312
pluginMediator.js:122 Kolibri Modules: kolibri.plugins.perseus_viewer.main registered
```
## Usage Details
Windows 10, Ubuntu - Chrome, Firefox
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kolibri/deployment/default/alt_wsgi.py`
Content:
```
1 """
2 WSGI config for the alternate origin server used for serving
3 sandboxed content
4 """
5 import os
6
7 import kolibri.core.content
8 from kolibri.core.content.utils import paths
9 from kolibri.core.content.zip_wsgi import get_application
10 from kolibri.utils.kolibri_whitenoise import DynamicWhiteNoise
11
12 os.environ.setdefault(
13 "DJANGO_SETTINGS_MODULE", "kolibri.deployment.default.settings.base"
14 )
15
16
17 def generate_alt_wsgi_application():
18 alt_content_path = "/" + paths.get_content_url(
19 paths.zip_content_path_prefix()
20 ).lstrip("/")
21
22 content_dirs = [paths.get_content_dir_path()] + paths.get_content_fallback_paths()
23
24 content_static_path = os.path.join(
25 os.path.dirname(kolibri.core.content.__file__), "static"
26 )
27
28 # Mount static files
29 return DynamicWhiteNoise(
30 get_application(),
31 dynamic_locations=[
32 (alt_content_path, content_dir) for content_dir in content_dirs
33 ]
34 + [(paths.zip_content_static_root(), content_static_path)],
35 app_paths=paths.get_zip_content_base_path(),
36 )
37
38
39 alt_application = generate_alt_wsgi_application()
40
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/kolibri/deployment/default/alt_wsgi.py b/kolibri/deployment/default/alt_wsgi.py
--- a/kolibri/deployment/default/alt_wsgi.py
+++ b/kolibri/deployment/default/alt_wsgi.py
@@ -32,7 +32,7 @@
(alt_content_path, content_dir) for content_dir in content_dirs
]
+ [(paths.zip_content_static_root(), content_static_path)],
- app_paths=paths.get_zip_content_base_path(),
+ app_paths=[paths.get_zip_content_base_path()],
)
| {"golden_diff": "diff --git a/kolibri/deployment/default/alt_wsgi.py b/kolibri/deployment/default/alt_wsgi.py\n--- a/kolibri/deployment/default/alt_wsgi.py\n+++ b/kolibri/deployment/default/alt_wsgi.py\n@@ -32,7 +32,7 @@\n (alt_content_path, content_dir) for content_dir in content_dirs\n ]\n + [(paths.zip_content_static_root(), content_static_path)],\n- app_paths=paths.get_zip_content_base_path(),\n+ app_paths=[paths.get_zip_content_base_path()],\n )\n", "issue": "Kolibri 0.16 - Resources of type HTML5 and exercises are not displayed\n## Observed behavior\r\nThis is a follow up to https://github.com/learningequality/kolibri/pull/9724#issuecomment-1408889097\r\n\r\nIn the latest develop build both exercises and html resources are not being displayed when a user is navigating through the Library.\r\n\r\n## Expected behavior\r\nIt should be possible to preview the resource.\r\n\r\n## Steps to reproduce the issue\r\n1. Install the the following [0. 16 build ](https://buildkite.com/learningequality/kolibri-debian/builds/5813#018603a8-a7d9-4c79-98d0-e2a0db6a7c69) and import the QA channel.\r\n2. Go to Library > QA Channel\r\n3. Click on any resource withing the HTML5 folder or the Exercises folder\r\n\r\n## Videos\r\n\r\nHTML5:\r\n\r\nhttps://user-images.githubusercontent.com/79847249/215529161-a0e88738-b221-416a-beea-cf0c6192450f.mp4\r\n\r\nEXERCISES:\r\n\r\nhttps://user-images.githubusercontent.com/79847249/215529190-28ecdf59-db72-4b3a-a6df-2c72ab2f395c.mp4\r\n\r\n## Console error\r\n\r\n```\r\npluginMediator.js:122 Kolibri Modules: kolibri.plugins.learn.app registered\r\npluginMediator.js:122 Kolibri Modules: kolibri.plugins.media_player.main registered\r\npluginMediator.js:122 Kolibri Modules: kolibri.plugins.pdf_viewer.main registered\r\npluginMediator.js:122 Kolibri Modules: kolibri.plugins.epub_viewer.main registered\r\npluginMediator.js:122 Kolibri Modules: kolibri.plugins.html5_viewer.main registered\r\nvue.runtime.esm.js:5753 GET http://127.0.0.1:51957/content/static/hashi/hashi-0efeb19f7e4ded20c73f.html 404 (Not Found)\r\ninsertBefore @ vue.runtime.esm.js:5753\r\ninsert @ vue.runtime.esm.js:6083\r\n(anonymous) @ vue.runtime.esm.js:6030\r\ncreateElm @ vue.runtime.esm.js:5969\r\n(anonymous) @ vue.runtime.esm.js:6560\r\nVue._update @ vue.runtime.esm.js:3963\r\nupdateComponent @ vue.runtime.esm.js:4081\r\nWatcher.get @ vue.runtime.esm.js:4495\r\nWatcher.run @ vue.runtime.esm.js:4570\r\nflushSchedulerQueue @ vue.runtime.esm.js:4326\r\n(anonymous) @ vue.runtime.esm.js:1989\r\nflushCallbacks @ vue.runtime.esm.js:1915\r\nPromise.then (async)\r\ntimerFunc @ vue.runtime.esm.js:1942\r\nnextTick @ vue.runtime.esm.js:1999\r\n(anonymous) @ vue.runtime.esm.js:4418\r\nWatcher.update @ vue.runtime.esm.js:4560\r\nVue.$forceUpdate @ vue.runtime.esm.js:3984\r\nforceRender @ vue.runtime.esm.js:3668\r\n(anonymous) @ vue.runtime.esm.js:3690\r\n(anonymous) @ vue.runtime.esm.js:336\r\nvue.runtime.esm.js:5753 GET http://127.0.0.1:51957/content/static/hashi/hashi-0efeb19f7e4ded20c73f.html 404 (Not Found)\r\ninsertBefore @ vue.runtime.esm.js:5753\r\ninsert @ vue.runtime.esm.js:6083\r\n(anonymous) @ vue.runtime.esm.js:6030\r\ncreateElm @ vue.runtime.esm.js:5969\r\n(anonymous) @ vue.runtime.esm.js:6260\r\npatchVnode @ vue.runtime.esm.js:6363\r\n(anonymous) @ vue.runtime.esm.js:6526\r\nVue._update @ vue.runtime.esm.js:3963\r\nupdateComponent @ vue.runtime.esm.js:4081\r\nWatcher.get @ vue.runtime.esm.js:4495\r\nWatcher.run @ vue.runtime.esm.js:4570\r\nflushSchedulerQueue @ vue.runtime.esm.js:4326\r\n(anonymous) @ vue.runtime.esm.js:1989\r\nflushCallbacks @ vue.runtime.esm.js:1915\r\nPromise.then (async)\r\ntimerFunc @ vue.runtime.esm.js:1942\r\nnextTick @ vue.runtime.esm.js:1999\r\n(anonymous) @ vue.runtime.esm.js:4418\r\nWatcher.update @ vue.runtime.esm.js:4560\r\nDep.notify @ vue.runtime.esm.js:730\r\nset @ vue.runtime.esm.js:1055\r\nsharedPropertyDefinition.set @ vue.runtime.esm.js:4644\r\n(anonymous) @ ContentPage.vue:312\r\npluginMediator.js:122 Kolibri Modules: kolibri.plugins.perseus_viewer.main registered\r\n```\r\n\r\n## Usage Details\r\nWindows 10, Ubuntu - Chrome, Firefox\n", "before_files": [{"content": "\"\"\"\nWSGI config for the alternate origin server used for serving\nsandboxed content\n\"\"\"\nimport os\n\nimport kolibri.core.content\nfrom kolibri.core.content.utils import paths\nfrom kolibri.core.content.zip_wsgi import get_application\nfrom kolibri.utils.kolibri_whitenoise import DynamicWhiteNoise\n\nos.environ.setdefault(\n \"DJANGO_SETTINGS_MODULE\", \"kolibri.deployment.default.settings.base\"\n)\n\n\ndef generate_alt_wsgi_application():\n alt_content_path = \"/\" + paths.get_content_url(\n paths.zip_content_path_prefix()\n ).lstrip(\"/\")\n\n content_dirs = [paths.get_content_dir_path()] + paths.get_content_fallback_paths()\n\n content_static_path = os.path.join(\n os.path.dirname(kolibri.core.content.__file__), \"static\"\n )\n\n # Mount static files\n return DynamicWhiteNoise(\n get_application(),\n dynamic_locations=[\n (alt_content_path, content_dir) for content_dir in content_dirs\n ]\n + [(paths.zip_content_static_root(), content_static_path)],\n app_paths=paths.get_zip_content_base_path(),\n )\n\n\nalt_application = generate_alt_wsgi_application()\n", "path": "kolibri/deployment/default/alt_wsgi.py"}], "after_files": [{"content": "\"\"\"\nWSGI config for the alternate origin server used for serving\nsandboxed content\n\"\"\"\nimport os\n\nimport kolibri.core.content\nfrom kolibri.core.content.utils import paths\nfrom kolibri.core.content.zip_wsgi import get_application\nfrom kolibri.utils.kolibri_whitenoise import DynamicWhiteNoise\n\nos.environ.setdefault(\n \"DJANGO_SETTINGS_MODULE\", \"kolibri.deployment.default.settings.base\"\n)\n\n\ndef generate_alt_wsgi_application():\n alt_content_path = \"/\" + paths.get_content_url(\n paths.zip_content_path_prefix()\n ).lstrip(\"/\")\n\n content_dirs = [paths.get_content_dir_path()] + paths.get_content_fallback_paths()\n\n content_static_path = os.path.join(\n os.path.dirname(kolibri.core.content.__file__), \"static\"\n )\n\n # Mount static files\n return DynamicWhiteNoise(\n get_application(),\n dynamic_locations=[\n (alt_content_path, content_dir) for content_dir in content_dirs\n ]\n + [(paths.zip_content_static_root(), content_static_path)],\n app_paths=[paths.get_zip_content_base_path()],\n )\n\n\nalt_application = generate_alt_wsgi_application()\n", "path": "kolibri/deployment/default/alt_wsgi.py"}]} | 1,797 | 125 |
gh_patches_debug_2612 | rasdani/github-patches | git_diff | scikit-hep__pyhf-307 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add --version flag to pyhf CLI
# Description
As [suggested by Lukas](https://github.com/diana-hep/pyhf/pull/304#issuecomment-428856809), adding a `--version` flag to the pyhf CLI could be useful.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pyhf/commandline.py`
Content:
```
1 import logging
2 logging.basicConfig()
3 log = logging.getLogger(__name__)
4
5 import click
6 import json
7 import os
8 import jsonpatch
9 import sys
10
11 from . import readxml
12 from . import writexml
13 from .utils import runOnePoint
14 from .pdf import Model
15
16
17 @click.group(context_settings=dict(help_option_names=['-h', '--help']))
18 def pyhf():
19 pass
20
21 @pyhf.command()
22 @click.argument('entrypoint-xml', type=click.Path(exists=True))
23 @click.option('--basedir', help='The base directory for the XML files to point relative to.', type=click.Path(exists=True), default=os.getcwd())
24 @click.option('--output-file', help='The location of the output json file. If not specified, prints to screen.', default=None)
25 @click.option('--track-progress/--hide-progress', default=True)
26 def xml2json(entrypoint_xml, basedir, output_file, track_progress):
27 """ Entrypoint XML: The top-level XML file for the PDF definition. """
28 spec = readxml.parse(entrypoint_xml, basedir, track_progress=track_progress)
29 if output_file is None:
30 print(json.dumps(spec, indent=4, sort_keys=True))
31 else:
32 with open(output_file, 'w+') as out_file:
33 json.dump(spec, out_file, indent=4, sort_keys=True)
34 log.debug("Written to {0:s}".format(output_file))
35 sys.exit(0)
36
37 @pyhf.command()
38 @click.argument('workspace', default='-')
39 @click.argument('xmlfile', default='-')
40 @click.option('--specroot', default=click.Path(exists=True))
41 @click.option('--dataroot', default=click.Path(exists=True))
42 def json2xml(workspace, xmlfile, specroot, dataroot):
43 with click.open_file(workspace, 'r') as specstream:
44 d = json.load(specstream)
45 with click.open_file(xmlfile, 'w') as outstream:
46 outstream.write(writexml.writexml(d, specroot, dataroot,'').decode('utf-8'))
47 sys.exit(0)
48
49 @pyhf.command()
50 @click.argument('workspace', default='-')
51 @click.option('--output-file', help='The location of the output json file. If not specified, prints to screen.', default=None)
52 @click.option('--measurement', default=None)
53 @click.option('-p', '--patch', multiple=True)
54 @click.option('--qualify-names/--no-qualify-names', default=False)
55 def cls(workspace, output_file, measurement, qualify_names, patch):
56 with click.open_file(workspace, 'r') as specstream:
57 d = json.load(specstream)
58 measurements = d['toplvl']['measurements']
59 measurement_names = [m['name'] for m in measurements]
60 measurement_index = 0
61 log.debug('measurements defined:\n\t{0:s}'.format('\n\t'.join(measurement_names)))
62 if measurement and measurement not in measurement_names:
63 log.error('no measurement by name \'{0:s}\' exists, pick from one of the valid ones above'.format(measurement))
64 sys.exit(1)
65 else:
66 if not measurement and len(measurements) > 1:
67 log.warning('multiple measurements defined. Taking the first measurement.')
68 measurement_index = 0
69 elif measurement:
70 measurement_index = measurement_names.index(measurement)
71
72 log.debug('calculating CLs for measurement {0:s}'.format(measurements[measurement_index]['name']))
73 spec = {'channels':d['channels']}
74 for p in patch:
75 with click.open_file(p, 'r') as read_file:
76 p = jsonpatch.JsonPatch(json.loads(read_file.read()))
77 spec = p.apply(spec)
78 p = Model(spec, poiname=measurements[measurement_index]['config']['poi'], qualify_names=qualify_names)
79 result = runOnePoint(1.0, sum((d['data'][c['name']] for c in d['channels']),[]) + p.config.auxdata, p)
80 result = {'CLs_obs': result[-2].tolist()[0], 'CLs_exp': result[-1].ravel().tolist()}
81 if output_file is None:
82 print(json.dumps(result, indent=4, sort_keys=True))
83 else:
84 with open(output_file, 'w+') as out_file:
85 json.dump(result, out_file, indent=4, sort_keys=True)
86 log.debug("Written to {0:s}".format(output_file))
87 sys.exit(0)
88
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pyhf/commandline.py b/pyhf/commandline.py
--- a/pyhf/commandline.py
+++ b/pyhf/commandline.py
@@ -12,9 +12,11 @@
from . import writexml
from .utils import runOnePoint
from .pdf import Model
+from .version import __version__
@click.group(context_settings=dict(help_option_names=['-h', '--help']))
[email protected]_option(version=__version__)
def pyhf():
pass
| {"golden_diff": "diff --git a/pyhf/commandline.py b/pyhf/commandline.py\n--- a/pyhf/commandline.py\n+++ b/pyhf/commandline.py\n@@ -12,9 +12,11 @@\n from . import writexml\n from .utils import runOnePoint\n from .pdf import Model\n+from .version import __version__\n \n \n @click.group(context_settings=dict(help_option_names=['-h', '--help']))\[email protected]_option(version=__version__)\n def pyhf():\n pass\n", "issue": "Add --version flag to pyhf CLI\n# Description\r\n\r\nAs [suggested by Lukas](https://github.com/diana-hep/pyhf/pull/304#issuecomment-428856809), adding a `--version` flag to the pyhf CLI could be useful.\n", "before_files": [{"content": "import logging\nlogging.basicConfig()\nlog = logging.getLogger(__name__)\n\nimport click\nimport json\nimport os\nimport jsonpatch\nimport sys\n\nfrom . import readxml\nfrom . import writexml\nfrom .utils import runOnePoint\nfrom .pdf import Model\n\n\[email protected](context_settings=dict(help_option_names=['-h', '--help']))\ndef pyhf():\n pass\n\[email protected]()\[email protected]('entrypoint-xml', type=click.Path(exists=True))\[email protected]('--basedir', help='The base directory for the XML files to point relative to.', type=click.Path(exists=True), default=os.getcwd())\[email protected]('--output-file', help='The location of the output json file. If not specified, prints to screen.', default=None)\[email protected]('--track-progress/--hide-progress', default=True)\ndef xml2json(entrypoint_xml, basedir, output_file, track_progress):\n \"\"\" Entrypoint XML: The top-level XML file for the PDF definition. \"\"\"\n spec = readxml.parse(entrypoint_xml, basedir, track_progress=track_progress)\n if output_file is None:\n print(json.dumps(spec, indent=4, sort_keys=True))\n else:\n with open(output_file, 'w+') as out_file:\n json.dump(spec, out_file, indent=4, sort_keys=True)\n log.debug(\"Written to {0:s}\".format(output_file))\n sys.exit(0)\n\[email protected]()\[email protected]('workspace', default='-')\[email protected]('xmlfile', default='-')\[email protected]('--specroot', default=click.Path(exists=True))\[email protected]('--dataroot', default=click.Path(exists=True))\ndef json2xml(workspace, xmlfile, specroot, dataroot):\n with click.open_file(workspace, 'r') as specstream:\n d = json.load(specstream)\n with click.open_file(xmlfile, 'w') as outstream:\n outstream.write(writexml.writexml(d, specroot, dataroot,'').decode('utf-8'))\n sys.exit(0)\n\[email protected]()\[email protected]('workspace', default='-')\[email protected]('--output-file', help='The location of the output json file. If not specified, prints to screen.', default=None)\[email protected]('--measurement', default=None)\[email protected]('-p', '--patch', multiple=True)\[email protected]('--qualify-names/--no-qualify-names', default=False)\ndef cls(workspace, output_file, measurement, qualify_names, patch):\n with click.open_file(workspace, 'r') as specstream:\n d = json.load(specstream)\n measurements = d['toplvl']['measurements']\n measurement_names = [m['name'] for m in measurements]\n measurement_index = 0\n log.debug('measurements defined:\\n\\t{0:s}'.format('\\n\\t'.join(measurement_names)))\n if measurement and measurement not in measurement_names:\n log.error('no measurement by name \\'{0:s}\\' exists, pick from one of the valid ones above'.format(measurement))\n sys.exit(1)\n else:\n if not measurement and len(measurements) > 1:\n log.warning('multiple measurements defined. Taking the first measurement.')\n measurement_index = 0\n elif measurement:\n measurement_index = measurement_names.index(measurement)\n\n log.debug('calculating CLs for measurement {0:s}'.format(measurements[measurement_index]['name']))\n spec = {'channels':d['channels']}\n for p in patch:\n with click.open_file(p, 'r') as read_file:\n p = jsonpatch.JsonPatch(json.loads(read_file.read()))\n spec = p.apply(spec)\n p = Model(spec, poiname=measurements[measurement_index]['config']['poi'], qualify_names=qualify_names)\n result = runOnePoint(1.0, sum((d['data'][c['name']] for c in d['channels']),[]) + p.config.auxdata, p)\n result = {'CLs_obs': result[-2].tolist()[0], 'CLs_exp': result[-1].ravel().tolist()}\n if output_file is None:\n print(json.dumps(result, indent=4, sort_keys=True))\n else:\n with open(output_file, 'w+') as out_file:\n json.dump(result, out_file, indent=4, sort_keys=True)\n log.debug(\"Written to {0:s}\".format(output_file))\n sys.exit(0)\n", "path": "pyhf/commandline.py"}], "after_files": [{"content": "import logging\nlogging.basicConfig()\nlog = logging.getLogger(__name__)\n\nimport click\nimport json\nimport os\nimport jsonpatch\nimport sys\n\nfrom . import readxml\nfrom . import writexml\nfrom .utils import runOnePoint\nfrom .pdf import Model\nfrom .version import __version__\n\n\[email protected](context_settings=dict(help_option_names=['-h', '--help']))\[email protected]_option(version=__version__)\ndef pyhf():\n pass\n\[email protected]()\[email protected]('entrypoint-xml', type=click.Path(exists=True))\[email protected]('--basedir', help='The base directory for the XML files to point relative to.', type=click.Path(exists=True), default=os.getcwd())\[email protected]('--output-file', help='The location of the output json file. If not specified, prints to screen.', default=None)\[email protected]('--track-progress/--hide-progress', default=True)\ndef xml2json(entrypoint_xml, basedir, output_file, track_progress):\n \"\"\" Entrypoint XML: The top-level XML file for the PDF definition. \"\"\"\n spec = readxml.parse(entrypoint_xml, basedir, track_progress=track_progress)\n if output_file is None:\n print(json.dumps(spec, indent=4, sort_keys=True))\n else:\n with open(output_file, 'w+') as out_file:\n json.dump(spec, out_file, indent=4, sort_keys=True)\n log.debug(\"Written to {0:s}\".format(output_file))\n sys.exit(0)\n\[email protected]()\[email protected]('workspace', default='-')\[email protected]('xmlfile', default='-')\[email protected]('--specroot', default=click.Path(exists=True))\[email protected]('--dataroot', default=click.Path(exists=True))\ndef json2xml(workspace, xmlfile, specroot, dataroot):\n with click.open_file(workspace, 'r') as specstream:\n d = json.load(specstream)\n with click.open_file(xmlfile, 'w') as outstream:\n outstream.write(writexml.writexml(d, specroot, dataroot,'').decode('utf-8'))\n sys.exit(0)\n\[email protected]()\[email protected]('workspace', default='-')\[email protected]('--output-file', help='The location of the output json file. If not specified, prints to screen.', default=None)\[email protected]('--measurement', default=None)\[email protected]('-p', '--patch', multiple=True)\[email protected]('--qualify-names/--no-qualify-names', default=False)\ndef cls(workspace, output_file, measurement, qualify_names, patch):\n with click.open_file(workspace, 'r') as specstream:\n d = json.load(specstream)\n measurements = d['toplvl']['measurements']\n measurement_names = [m['name'] for m in measurements]\n measurement_index = 0\n log.debug('measurements defined:\\n\\t{0:s}'.format('\\n\\t'.join(measurement_names)))\n if measurement and measurement not in measurement_names:\n log.error('no measurement by name \\'{0:s}\\' exists, pick from one of the valid ones above'.format(measurement))\n sys.exit(1)\n else:\n if not measurement and len(measurements) > 1:\n log.warning('multiple measurements defined. Taking the first measurement.')\n measurement_index = 0\n elif measurement:\n measurement_index = measurement_names.index(measurement)\n\n log.debug('calculating CLs for measurement {0:s}'.format(measurements[measurement_index]['name']))\n spec = {'channels':d['channels']}\n for p in patch:\n with click.open_file(p, 'r') as read_file:\n p = jsonpatch.JsonPatch(json.loads(read_file.read()))\n spec = p.apply(spec)\n p = Model(spec, poiname=measurements[measurement_index]['config']['poi'], qualify_names=qualify_names)\n result = runOnePoint(1.0, sum((d['data'][c['name']] for c in d['channels']),[]) + p.config.auxdata, p)\n result = {'CLs_obs': result[-2].tolist()[0], 'CLs_exp': result[-1].ravel().tolist()}\n if output_file is None:\n print(json.dumps(result, indent=4, sort_keys=True))\n else:\n with open(output_file, 'w+') as out_file:\n json.dump(result, out_file, indent=4, sort_keys=True)\n log.debug(\"Written to {0:s}\".format(output_file))\n sys.exit(0)\n", "path": "pyhf/commandline.py"}]} | 1,453 | 107 |
gh_patches_debug_33199 | rasdani/github-patches | git_diff | python-poetry__poetry-1395 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
poetry shell does not activate virtualenv
<!-- Checked checkbox should look like this: [x] -->
- [x] I am on the [latest](https://github.com/sdispater/poetry/releases/latest) Poetry version.
- [x] I have searched the [issues](https://github.com/sdispater/poetry/issues) of this repo and believe that this is not a duplicate.
<!--
Once those are done, if you're able to fill in the following list with your information,
it'd be very helpful to whoever handles the issue.
-->
- **OS version and name**: Mac OS X, High Sierra
- **Poetry version**: 0.12.5
## Issue
Similar to ```pipenv shell```, I would have expected that when running ```poetry shell``` the virtualenv gets activated, but apparently this is not the case...
```console
➜ which python
/Users/timon/.pyenv/shims/python
➜ poetry shell
Spawning shell within /Users/timon/Library/Caches/pypoetry/virtualenvs/YOLO-SAR-py3.7
➜ which python
/Users/timon/.pyenv/shims/python
➜ source /Users/timon/Library/Caches/pypoetry/virtualenvs/yolo-sar-py3.7/bin/activate
➜ which python
/Users/timon/Library/Caches/pypoetry/virtualenvs/yolo-sar-py3.7/bin/python
```
for comparison
```console
➜ poetry run which python
/Users/timon/Library/Caches/pypoetry/virtualenvs/yolo-sar-py3.7/bin/python
```
Am I misunderstanding something and this is expected behaviour or is it a bug?
Thanks a lot already for your time :)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `poetry/utils/shell.py`
Content:
```
1 import os
2
3 from shellingham import detect_shell
4 from shellingham import ShellDetectionFailure
5
6
7 class Shell:
8 """
9 Represents the current shell.
10 """
11
12 _shell = None
13
14 def __init__(self, name, path): # type: (str, str) -> None
15 self._name = name
16 self._path = path
17
18 @property
19 def name(self): # type: () -> str
20 return self._name
21
22 @property
23 def path(self): # type: () -> str
24 return self._path
25
26 @classmethod
27 def get(cls): # type: () -> Shell
28 """
29 Retrieve the current shell.
30 """
31 if cls._shell is not None:
32 return cls._shell
33
34 try:
35 name, path = detect_shell(os.getpid())
36 except (RuntimeError, ShellDetectionFailure):
37 raise RuntimeError("Unable to detect the current shell.")
38
39 cls._shell = cls(name, path)
40
41 return cls._shell
42
43 def __repr__(self): # type: () -> str
44 return '{}("{}", "{}")'.format(self.__class__.__name__, self._name, self._path)
45
```
Path: `poetry/console/commands/shell.py`
Content:
```
1 import sys
2
3 from os import environ
4 from distutils.util import strtobool
5
6 from .env_command import EnvCommand
7
8
9 class ShellCommand(EnvCommand):
10
11 name = "shell"
12 description = "Spawns a shell within the virtual environment."
13
14 help = """The <info>shell</> command spawns a shell, according to the
15 <comment>$SHELL</> environment variable, within the virtual environment.
16 If one doesn't exist yet, it will be created.
17 """
18
19 def handle(self):
20 from poetry.utils.shell import Shell
21
22 # Check if it's already activated or doesn't exist and won't be created
23 venv_activated = strtobool(environ.get("POETRY_ACTIVE", "0")) or getattr(
24 sys, "real_prefix", sys.prefix
25 ) == str(self.env.path)
26 if venv_activated:
27 self.line(
28 "Virtual environment already activated: "
29 "<info>{}</>".format(self.env.path)
30 )
31
32 return
33
34 self.line("Spawning shell within <info>{}</>".format(self.env.path))
35
36 # Setting this to avoid spawning unnecessary nested shells
37 environ["POETRY_ACTIVE"] = "1"
38 shell = Shell.get()
39 self.env.execute(shell.path)
40 environ.pop("POETRY_ACTIVE")
41
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/poetry/console/commands/shell.py b/poetry/console/commands/shell.py
--- a/poetry/console/commands/shell.py
+++ b/poetry/console/commands/shell.py
@@ -36,5 +36,5 @@
# Setting this to avoid spawning unnecessary nested shells
environ["POETRY_ACTIVE"] = "1"
shell = Shell.get()
- self.env.execute(shell.path)
+ shell.activate(self.env)
environ.pop("POETRY_ACTIVE")
diff --git a/poetry/utils/shell.py b/poetry/utils/shell.py
--- a/poetry/utils/shell.py
+++ b/poetry/utils/shell.py
@@ -1,8 +1,16 @@
import os
+import signal
+import sys
+import pexpect
+
+from clikit.utils.terminal import Terminal
from shellingham import detect_shell
from shellingham import ShellDetectionFailure
+from ._compat import WINDOWS
+from .env import VirtualEnv
+
class Shell:
"""
@@ -40,5 +48,51 @@
return cls._shell
+ def activate(self, env): # type: (VirtualEnv) -> None
+ if WINDOWS:
+ return env.execute(self.path)
+
+ terminal = Terminal()
+ with env.temp_environ():
+ c = pexpect.spawn(
+ self._path, ["-i"], dimensions=(terminal.height, terminal.width)
+ )
+
+ c.setecho(False)
+ activate_script = self._get_activate_script()
+ bin_dir = "Scripts" if WINDOWS else "bin"
+ activate_path = env.path / bin_dir / activate_script
+ c.sendline("{} {}".format(self._get_source_command(), activate_path))
+
+ def resize(sig, data):
+ terminal = Terminal()
+ c.setwinsize(terminal.height, terminal.width)
+
+ signal.signal(signal.SIGWINCH, resize)
+
+ # Interact with the new shell.
+ c.interact(escape_character=None)
+ c.close()
+
+ sys.exit(c.exitstatus)
+
+ def _get_activate_script(self):
+ if "fish" == self._name:
+ suffix = ".fish"
+ elif "csh" == self._name:
+ suffix = ".csh"
+ else:
+ suffix = ""
+
+ return "activate" + suffix
+
+ def _get_source_command(self):
+ if "fish" == self._name:
+ return "source"
+ elif "csh" == self._name:
+ return "source"
+
+ return "."
+
def __repr__(self): # type: () -> str
return '{}("{}", "{}")'.format(self.__class__.__name__, self._name, self._path)
| {"golden_diff": "diff --git a/poetry/console/commands/shell.py b/poetry/console/commands/shell.py\n--- a/poetry/console/commands/shell.py\n+++ b/poetry/console/commands/shell.py\n@@ -36,5 +36,5 @@\n # Setting this to avoid spawning unnecessary nested shells\n environ[\"POETRY_ACTIVE\"] = \"1\"\n shell = Shell.get()\n- self.env.execute(shell.path)\n+ shell.activate(self.env)\n environ.pop(\"POETRY_ACTIVE\")\ndiff --git a/poetry/utils/shell.py b/poetry/utils/shell.py\n--- a/poetry/utils/shell.py\n+++ b/poetry/utils/shell.py\n@@ -1,8 +1,16 @@\n import os\n+import signal\n+import sys\n \n+import pexpect\n+\n+from clikit.utils.terminal import Terminal\n from shellingham import detect_shell\n from shellingham import ShellDetectionFailure\n \n+from ._compat import WINDOWS\n+from .env import VirtualEnv\n+\n \n class Shell:\n \"\"\"\n@@ -40,5 +48,51 @@\n \n return cls._shell\n \n+ def activate(self, env): # type: (VirtualEnv) -> None\n+ if WINDOWS:\n+ return env.execute(self.path)\n+\n+ terminal = Terminal()\n+ with env.temp_environ():\n+ c = pexpect.spawn(\n+ self._path, [\"-i\"], dimensions=(terminal.height, terminal.width)\n+ )\n+\n+ c.setecho(False)\n+ activate_script = self._get_activate_script()\n+ bin_dir = \"Scripts\" if WINDOWS else \"bin\"\n+ activate_path = env.path / bin_dir / activate_script\n+ c.sendline(\"{} {}\".format(self._get_source_command(), activate_path))\n+\n+ def resize(sig, data):\n+ terminal = Terminal()\n+ c.setwinsize(terminal.height, terminal.width)\n+\n+ signal.signal(signal.SIGWINCH, resize)\n+\n+ # Interact with the new shell.\n+ c.interact(escape_character=None)\n+ c.close()\n+\n+ sys.exit(c.exitstatus)\n+\n+ def _get_activate_script(self):\n+ if \"fish\" == self._name:\n+ suffix = \".fish\"\n+ elif \"csh\" == self._name:\n+ suffix = \".csh\"\n+ else:\n+ suffix = \"\"\n+\n+ return \"activate\" + suffix\n+\n+ def _get_source_command(self):\n+ if \"fish\" == self._name:\n+ return \"source\"\n+ elif \"csh\" == self._name:\n+ return \"source\"\n+\n+ return \".\"\n+\n def __repr__(self): # type: () -> str\n return '{}(\"{}\", \"{}\")'.format(self.__class__.__name__, self._name, self._path)\n", "issue": "poetry shell does not activate virtualenv \n<!-- Checked checkbox should look like this: [x] -->\r\n- [x] I am on the [latest](https://github.com/sdispater/poetry/releases/latest) Poetry version.\r\n- [x] I have searched the [issues](https://github.com/sdispater/poetry/issues) of this repo and believe that this is not a duplicate.\r\n\r\n<!--\r\n Once those are done, if you're able to fill in the following list with your information,\r\n it'd be very helpful to whoever handles the issue.\r\n-->\r\n\r\n- **OS version and name**: Mac OS X, High Sierra\r\n- **Poetry version**: 0.12.5\r\n\r\n## Issue\r\nSimilar to ```pipenv shell```, I would have expected that when running ```poetry shell``` the virtualenv gets activated, but apparently this is not the case...\r\n\r\n\r\n```console\r\n\u279c which python\r\n/Users/timon/.pyenv/shims/python\r\n\u279c poetry shell\r\nSpawning shell within /Users/timon/Library/Caches/pypoetry/virtualenvs/YOLO-SAR-py3.7\r\n\u279c which python\r\n/Users/timon/.pyenv/shims/python\r\n\u279c source /Users/timon/Library/Caches/pypoetry/virtualenvs/yolo-sar-py3.7/bin/activate\r\n\u279c which python\r\n/Users/timon/Library/Caches/pypoetry/virtualenvs/yolo-sar-py3.7/bin/python\r\n```\r\n\r\nfor comparison\r\n```console\r\n\u279c poetry run which python\r\n/Users/timon/Library/Caches/pypoetry/virtualenvs/yolo-sar-py3.7/bin/python\r\n```\r\n\r\n\r\nAm I misunderstanding something and this is expected behaviour or is it a bug? \r\n\r\nThanks a lot already for your time :)\n", "before_files": [{"content": "import os\n\nfrom shellingham import detect_shell\nfrom shellingham import ShellDetectionFailure\n\n\nclass Shell:\n \"\"\"\n Represents the current shell.\n \"\"\"\n\n _shell = None\n\n def __init__(self, name, path): # type: (str, str) -> None\n self._name = name\n self._path = path\n\n @property\n def name(self): # type: () -> str\n return self._name\n\n @property\n def path(self): # type: () -> str\n return self._path\n\n @classmethod\n def get(cls): # type: () -> Shell\n \"\"\"\n Retrieve the current shell.\n \"\"\"\n if cls._shell is not None:\n return cls._shell\n\n try:\n name, path = detect_shell(os.getpid())\n except (RuntimeError, ShellDetectionFailure):\n raise RuntimeError(\"Unable to detect the current shell.\")\n\n cls._shell = cls(name, path)\n\n return cls._shell\n\n def __repr__(self): # type: () -> str\n return '{}(\"{}\", \"{}\")'.format(self.__class__.__name__, self._name, self._path)\n", "path": "poetry/utils/shell.py"}, {"content": "import sys\n\nfrom os import environ\nfrom distutils.util import strtobool\n\nfrom .env_command import EnvCommand\n\n\nclass ShellCommand(EnvCommand):\n\n name = \"shell\"\n description = \"Spawns a shell within the virtual environment.\"\n\n help = \"\"\"The <info>shell</> command spawns a shell, according to the\n<comment>$SHELL</> environment variable, within the virtual environment.\nIf one doesn't exist yet, it will be created.\n\"\"\"\n\n def handle(self):\n from poetry.utils.shell import Shell\n\n # Check if it's already activated or doesn't exist and won't be created\n venv_activated = strtobool(environ.get(\"POETRY_ACTIVE\", \"0\")) or getattr(\n sys, \"real_prefix\", sys.prefix\n ) == str(self.env.path)\n if venv_activated:\n self.line(\n \"Virtual environment already activated: \"\n \"<info>{}</>\".format(self.env.path)\n )\n\n return\n\n self.line(\"Spawning shell within <info>{}</>\".format(self.env.path))\n\n # Setting this to avoid spawning unnecessary nested shells\n environ[\"POETRY_ACTIVE\"] = \"1\"\n shell = Shell.get()\n self.env.execute(shell.path)\n environ.pop(\"POETRY_ACTIVE\")\n", "path": "poetry/console/commands/shell.py"}], "after_files": [{"content": "import os\nimport signal\nimport sys\n\nimport pexpect\n\nfrom clikit.utils.terminal import Terminal\nfrom shellingham import detect_shell\nfrom shellingham import ShellDetectionFailure\n\nfrom ._compat import WINDOWS\nfrom .env import VirtualEnv\n\n\nclass Shell:\n \"\"\"\n Represents the current shell.\n \"\"\"\n\n _shell = None\n\n def __init__(self, name, path): # type: (str, str) -> None\n self._name = name\n self._path = path\n\n @property\n def name(self): # type: () -> str\n return self._name\n\n @property\n def path(self): # type: () -> str\n return self._path\n\n @classmethod\n def get(cls): # type: () -> Shell\n \"\"\"\n Retrieve the current shell.\n \"\"\"\n if cls._shell is not None:\n return cls._shell\n\n try:\n name, path = detect_shell(os.getpid())\n except (RuntimeError, ShellDetectionFailure):\n raise RuntimeError(\"Unable to detect the current shell.\")\n\n cls._shell = cls(name, path)\n\n return cls._shell\n\n def activate(self, env): # type: (VirtualEnv) -> None\n if WINDOWS:\n return env.execute(self.path)\n\n terminal = Terminal()\n with env.temp_environ():\n c = pexpect.spawn(\n self._path, [\"-i\"], dimensions=(terminal.height, terminal.width)\n )\n\n c.setecho(False)\n activate_script = self._get_activate_script()\n bin_dir = \"Scripts\" if WINDOWS else \"bin\"\n activate_path = env.path / bin_dir / activate_script\n c.sendline(\"{} {}\".format(self._get_source_command(), activate_path))\n\n def resize(sig, data):\n terminal = Terminal()\n c.setwinsize(terminal.height, terminal.width)\n\n signal.signal(signal.SIGWINCH, resize)\n\n # Interact with the new shell.\n c.interact(escape_character=None)\n c.close()\n\n sys.exit(c.exitstatus)\n\n def _get_activate_script(self):\n if \"fish\" == self._name:\n suffix = \".fish\"\n elif \"csh\" == self._name:\n suffix = \".csh\"\n else:\n suffix = \"\"\n\n return \"activate\" + suffix\n\n def _get_source_command(self):\n if \"fish\" == self._name:\n return \"source\"\n elif \"csh\" == self._name:\n return \"source\"\n\n return \".\"\n\n def __repr__(self): # type: () -> str\n return '{}(\"{}\", \"{}\")'.format(self.__class__.__name__, self._name, self._path)\n", "path": "poetry/utils/shell.py"}, {"content": "import sys\n\nfrom os import environ\nfrom distutils.util import strtobool\n\nfrom .env_command import EnvCommand\n\n\nclass ShellCommand(EnvCommand):\n\n name = \"shell\"\n description = \"Spawns a shell within the virtual environment.\"\n\n help = \"\"\"The <info>shell</> command spawns a shell, according to the\n<comment>$SHELL</> environment variable, within the virtual environment.\nIf one doesn't exist yet, it will be created.\n\"\"\"\n\n def handle(self):\n from poetry.utils.shell import Shell\n\n # Check if it's already activated or doesn't exist and won't be created\n venv_activated = strtobool(environ.get(\"POETRY_ACTIVE\", \"0\")) or getattr(\n sys, \"real_prefix\", sys.prefix\n ) == str(self.env.path)\n if venv_activated:\n self.line(\n \"Virtual environment already activated: \"\n \"<info>{}</>\".format(self.env.path)\n )\n\n return\n\n self.line(\"Spawning shell within <info>{}</>\".format(self.env.path))\n\n # Setting this to avoid spawning unnecessary nested shells\n environ[\"POETRY_ACTIVE\"] = \"1\"\n shell = Shell.get()\n shell.activate(self.env)\n environ.pop(\"POETRY_ACTIVE\")\n", "path": "poetry/console/commands/shell.py"}]} | 1,351 | 623 |
gh_patches_debug_10279 | rasdani/github-patches | git_diff | streamlit__streamlit-8497 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Altair support - Layer charts, `.resolve_scale` dont appear to work
# Summary
Altair appears to work just fine inside streamlit, but I have problems getting layer charts to work. Note - Concat (vert/horizontal) of altair charts works fine, but doing something like
```
(chart1 + chart2).resolve_scale(y='independent')
```
results in everything going blank and no chart canvas displayed
# Steps to reproduce
What are the steps we should take to reproduce the bug:
1. Build 2 altair charts inside the same streamlit script, ideally with a shared x-axis like time/date (so it makes sense to crate a compound chart)
2. try to layer them using `(chart1 + chart2).resolve_scale(y='independent')`
## Expected behavior:
The layer chart should display with different y-axes
## Actual behavior:
Nothing displays - the chart fades as if to reload but everything goes blank
## Is this a regression?
this works elsewhere, e.g. in jupyterlab
# Debug info
- Streamlit version: 0.60.0
- Python version: Python 3.7.4
- Using Conda
- OS version: Mac OS X Catalina
- Browser version: Brave, Version 1.10.97 Chromium: 83.0.4103.116
# Additional information
If needed, add any other context about the problem here. For example, did this bug come from https://discuss.streamlit.io or another site? Link the original source here!
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `e2e_playwright/st_altair_chart.py`
Content:
```
1 # Copyright (c) Streamlit Inc. (2018-2022) Snowflake Inc. (2022-2024)
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import altair as alt
16 import numpy as np
17 import pandas as pd
18
19 import streamlit as st
20
21 np.random.seed(0)
22
23 data = np.random.randn(200, 3)
24 df = pd.DataFrame(data, columns=["a", "b", "c"])
25 chart = alt.Chart(df).mark_circle().encode(x="a", y="b", size="c", color="c")
26
27 st.write("Show default vega lite theme:")
28 st.altair_chart(chart, theme=None)
29
30 st.write("Show streamlit theme:")
31 st.altair_chart(chart, theme="streamlit")
32
33 st.write("Overwrite theme config:")
34 chart = (
35 alt.Chart(df, usermeta={"embedOptions": {"theme": None}})
36 .mark_circle()
37 .encode(x="a", y="b", size="c", color="c")
38 )
39 st.altair_chart(chart, theme="streamlit")
40
41 data = pd.DataFrame(
42 {
43 "a": ["A", "B", "C", "D", "E", "F", "G", "H", "I"],
44 "b": [28, 55, 43, 91, 81, 53, 19, 87, 52],
45 }
46 )
47
48 chart = alt.Chart(data).mark_bar().encode(x="a", y="b")
49
50 st.write("Bar chart with overwritten theme props:")
51 st.altair_chart(chart.configure_mark(color="black"), theme="streamlit")
52
53 # mark_arc was added in 4.2, but we have to support altair 4.0-4.1, so we
54 # have to skip this part of the test when testing min versions.
55 major, minor, patch = alt.__version__.split(".")
56 if not (major == "4" and minor < "2"):
57 source = pd.DataFrame(
58 {"category": [1, 2, 3, 4, 5, 6], "value": [4, 6, 10, 3, 7, 8]}
59 )
60
61 chart = (
62 alt.Chart(source)
63 .mark_arc(innerRadius=50)
64 .encode(
65 theta=alt.Theta(field="value", type="quantitative"),
66 color=alt.Color(field="category", type="nominal"),
67 )
68 )
69
70 st.write("Pie Chart with more than 4 Legend items")
71 st.altair_chart(chart, theme="streamlit")
72
73 # taken from vega_datasets barley example
74 barley = alt.UrlData(
75 "https://cdn.jsdelivr.net/npm/[email protected]/data/barley.json"
76 )
77
78 barley_chart = (
79 alt.Chart(barley)
80 .mark_bar()
81 .encode(x="year:O", y="sum(yield):Q", color="year:N", column="site:N")
82 )
83
84 st.write("Grouped Bar Chart with default theme:")
85 st.altair_chart(barley_chart, theme=None)
86
87 st.write("Grouped Bar Chart with streamlit theme:")
88 st.altair_chart(barley_chart, theme="streamlit")
89
90 st.write("Chart with use_container_width used")
91 st.altair_chart(barley_chart, theme=None, use_container_width=True)
92
93 st.write("Layered chart")
94 # Taken from vega_datasets
95 stocks = alt.UrlData(
96 "https://cdn.jsdelivr.net/npm/[email protected]/data/stocks.csv"
97 )
98
99 base = (
100 alt.Chart(stocks)
101 .encode(x="date:T", y="price:Q", color="symbol:N")
102 .transform_filter(alt.datum.symbol == "GOOG")
103 )
104
105 new_base_chart = base.mark_line() + base.mark_point()
106 st.altair_chart(new_base_chart)
107
108 x = np.linspace(10, 100, 10)
109 y1 = 5 * x
110 y2 = 1 / x
111
112 df1 = pd.DataFrame.from_dict({"x": x, "y1": y1, "y2": y2})
113
114 c1 = alt.Chart(df1).mark_line().encode(alt.X("x"), alt.Y("y1"))
115
116 c2 = alt.Chart(df1).mark_line().encode(alt.X("x"), alt.Y("y2"))
117
118 st.altair_chart(c1 & c2, use_container_width=True)
119
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/e2e_playwright/st_altair_chart.py b/e2e_playwright/st_altair_chart.py
--- a/e2e_playwright/st_altair_chart.py
+++ b/e2e_playwright/st_altair_chart.py
@@ -116,3 +116,20 @@
c2 = alt.Chart(df1).mark_line().encode(alt.X("x"), alt.Y("y2"))
st.altair_chart(c1 & c2, use_container_width=True)
+
+from altair.expr import datum
+
+results = [
+ [2016, 11525, 3],
+ [2017, 11517, 2],
+ [2018, 11521, 2],
+ [2019, 11519, 4],
+]
+
+dataframe = pd.DataFrame(results, columns=["Job Number", "Test Count", "Test Failures"])
+
+base = alt.Chart(dataframe).encode(alt.X("Job Number:O"))
+chart_test_count = base.mark_line().encode(alt.Y("Test Count:N"))
+chart_test_failures = base.mark_line().encode(alt.Y("Test Failures:N"))
+
+st.altair_chart((chart_test_count + chart_test_failures).resolve_scale(y="independent"))
| {"golden_diff": "diff --git a/e2e_playwright/st_altair_chart.py b/e2e_playwright/st_altair_chart.py\n--- a/e2e_playwright/st_altair_chart.py\n+++ b/e2e_playwright/st_altair_chart.py\n@@ -116,3 +116,20 @@\n c2 = alt.Chart(df1).mark_line().encode(alt.X(\"x\"), alt.Y(\"y2\"))\n \n st.altair_chart(c1 & c2, use_container_width=True)\n+\n+from altair.expr import datum\n+\n+results = [\n+ [2016, 11525, 3],\n+ [2017, 11517, 2],\n+ [2018, 11521, 2],\n+ [2019, 11519, 4],\n+]\n+\n+dataframe = pd.DataFrame(results, columns=[\"Job Number\", \"Test Count\", \"Test Failures\"])\n+\n+base = alt.Chart(dataframe).encode(alt.X(\"Job Number:O\"))\n+chart_test_count = base.mark_line().encode(alt.Y(\"Test Count:N\"))\n+chart_test_failures = base.mark_line().encode(alt.Y(\"Test Failures:N\"))\n+\n+st.altair_chart((chart_test_count + chart_test_failures).resolve_scale(y=\"independent\"))\n", "issue": "Altair support - Layer charts, `.resolve_scale` dont appear to work\n# Summary\r\n\r\nAltair appears to work just fine inside streamlit, but I have problems getting layer charts to work. Note - Concat (vert/horizontal) of altair charts works fine, but doing something like\r\n```\r\n(chart1 + chart2).resolve_scale(y='independent') \r\n```\r\nresults in everything going blank and no chart canvas displayed\r\n\r\n# Steps to reproduce\r\n\r\nWhat are the steps we should take to reproduce the bug:\r\n\r\n1. Build 2 altair charts inside the same streamlit script, ideally with a shared x-axis like time/date (so it makes sense to crate a compound chart)\r\n2. try to layer them using `(chart1 + chart2).resolve_scale(y='independent')`\r\n\r\n## Expected behavior:\r\n\r\nThe layer chart should display with different y-axes\r\n## Actual behavior:\r\n\r\nNothing displays - the chart fades as if to reload but everything goes blank\r\n\r\n## Is this a regression?\r\n\r\nthis works elsewhere, e.g. in jupyterlab\r\n\r\n# Debug info\r\n\r\n- Streamlit version: 0.60.0\r\n- Python version: Python 3.7.4\r\n- Using Conda\r\n- OS version: Mac OS X Catalina\r\n- Browser version: Brave, Version 1.10.97 Chromium: 83.0.4103.116\r\n\r\n# Additional information\r\n\r\nIf needed, add any other context about the problem here. For example, did this bug come from https://discuss.streamlit.io or another site? Link the original source here!\r\n\n", "before_files": [{"content": "# Copyright (c) Streamlit Inc. (2018-2022) Snowflake Inc. (2022-2024)\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport altair as alt\nimport numpy as np\nimport pandas as pd\n\nimport streamlit as st\n\nnp.random.seed(0)\n\ndata = np.random.randn(200, 3)\ndf = pd.DataFrame(data, columns=[\"a\", \"b\", \"c\"])\nchart = alt.Chart(df).mark_circle().encode(x=\"a\", y=\"b\", size=\"c\", color=\"c\")\n\nst.write(\"Show default vega lite theme:\")\nst.altair_chart(chart, theme=None)\n\nst.write(\"Show streamlit theme:\")\nst.altair_chart(chart, theme=\"streamlit\")\n\nst.write(\"Overwrite theme config:\")\nchart = (\n alt.Chart(df, usermeta={\"embedOptions\": {\"theme\": None}})\n .mark_circle()\n .encode(x=\"a\", y=\"b\", size=\"c\", color=\"c\")\n)\nst.altair_chart(chart, theme=\"streamlit\")\n\ndata = pd.DataFrame(\n {\n \"a\": [\"A\", \"B\", \"C\", \"D\", \"E\", \"F\", \"G\", \"H\", \"I\"],\n \"b\": [28, 55, 43, 91, 81, 53, 19, 87, 52],\n }\n)\n\nchart = alt.Chart(data).mark_bar().encode(x=\"a\", y=\"b\")\n\nst.write(\"Bar chart with overwritten theme props:\")\nst.altair_chart(chart.configure_mark(color=\"black\"), theme=\"streamlit\")\n\n# mark_arc was added in 4.2, but we have to support altair 4.0-4.1, so we\n# have to skip this part of the test when testing min versions.\nmajor, minor, patch = alt.__version__.split(\".\")\nif not (major == \"4\" and minor < \"2\"):\n source = pd.DataFrame(\n {\"category\": [1, 2, 3, 4, 5, 6], \"value\": [4, 6, 10, 3, 7, 8]}\n )\n\n chart = (\n alt.Chart(source)\n .mark_arc(innerRadius=50)\n .encode(\n theta=alt.Theta(field=\"value\", type=\"quantitative\"),\n color=alt.Color(field=\"category\", type=\"nominal\"),\n )\n )\n\n st.write(\"Pie Chart with more than 4 Legend items\")\n st.altair_chart(chart, theme=\"streamlit\")\n\n# taken from vega_datasets barley example\nbarley = alt.UrlData(\n \"https://cdn.jsdelivr.net/npm/[email protected]/data/barley.json\"\n)\n\nbarley_chart = (\n alt.Chart(barley)\n .mark_bar()\n .encode(x=\"year:O\", y=\"sum(yield):Q\", color=\"year:N\", column=\"site:N\")\n)\n\nst.write(\"Grouped Bar Chart with default theme:\")\nst.altair_chart(barley_chart, theme=None)\n\nst.write(\"Grouped Bar Chart with streamlit theme:\")\nst.altair_chart(barley_chart, theme=\"streamlit\")\n\nst.write(\"Chart with use_container_width used\")\nst.altair_chart(barley_chart, theme=None, use_container_width=True)\n\nst.write(\"Layered chart\")\n# Taken from vega_datasets\nstocks = alt.UrlData(\n \"https://cdn.jsdelivr.net/npm/[email protected]/data/stocks.csv\"\n)\n\nbase = (\n alt.Chart(stocks)\n .encode(x=\"date:T\", y=\"price:Q\", color=\"symbol:N\")\n .transform_filter(alt.datum.symbol == \"GOOG\")\n)\n\nnew_base_chart = base.mark_line() + base.mark_point()\nst.altair_chart(new_base_chart)\n\nx = np.linspace(10, 100, 10)\ny1 = 5 * x\ny2 = 1 / x\n\ndf1 = pd.DataFrame.from_dict({\"x\": x, \"y1\": y1, \"y2\": y2})\n\nc1 = alt.Chart(df1).mark_line().encode(alt.X(\"x\"), alt.Y(\"y1\"))\n\nc2 = alt.Chart(df1).mark_line().encode(alt.X(\"x\"), alt.Y(\"y2\"))\n\nst.altair_chart(c1 & c2, use_container_width=True)\n", "path": "e2e_playwright/st_altair_chart.py"}], "after_files": [{"content": "# Copyright (c) Streamlit Inc. (2018-2022) Snowflake Inc. (2022-2024)\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport altair as alt\nimport numpy as np\nimport pandas as pd\n\nimport streamlit as st\n\nnp.random.seed(0)\n\ndata = np.random.randn(200, 3)\ndf = pd.DataFrame(data, columns=[\"a\", \"b\", \"c\"])\nchart = alt.Chart(df).mark_circle().encode(x=\"a\", y=\"b\", size=\"c\", color=\"c\")\n\nst.write(\"Show default vega lite theme:\")\nst.altair_chart(chart, theme=None)\n\nst.write(\"Show streamlit theme:\")\nst.altair_chart(chart, theme=\"streamlit\")\n\nst.write(\"Overwrite theme config:\")\nchart = (\n alt.Chart(df, usermeta={\"embedOptions\": {\"theme\": None}})\n .mark_circle()\n .encode(x=\"a\", y=\"b\", size=\"c\", color=\"c\")\n)\nst.altair_chart(chart, theme=\"streamlit\")\n\ndata = pd.DataFrame(\n {\n \"a\": [\"A\", \"B\", \"C\", \"D\", \"E\", \"F\", \"G\", \"H\", \"I\"],\n \"b\": [28, 55, 43, 91, 81, 53, 19, 87, 52],\n }\n)\n\nchart = alt.Chart(data).mark_bar().encode(x=\"a\", y=\"b\")\n\nst.write(\"Bar chart with overwritten theme props:\")\nst.altair_chart(chart.configure_mark(color=\"black\"), theme=\"streamlit\")\n\n# mark_arc was added in 4.2, but we have to support altair 4.0-4.1, so we\n# have to skip this part of the test when testing min versions.\nmajor, minor, patch = alt.__version__.split(\".\")\nif not (major == \"4\" and minor < \"2\"):\n source = pd.DataFrame(\n {\"category\": [1, 2, 3, 4, 5, 6], \"value\": [4, 6, 10, 3, 7, 8]}\n )\n\n chart = (\n alt.Chart(source)\n .mark_arc(innerRadius=50)\n .encode(\n theta=alt.Theta(field=\"value\", type=\"quantitative\"),\n color=alt.Color(field=\"category\", type=\"nominal\"),\n )\n )\n\n st.write(\"Pie Chart with more than 4 Legend items\")\n st.altair_chart(chart, theme=\"streamlit\")\n\n# taken from vega_datasets barley example\nbarley = alt.UrlData(\n \"https://cdn.jsdelivr.net/npm/[email protected]/data/barley.json\"\n)\n\nbarley_chart = (\n alt.Chart(barley)\n .mark_bar()\n .encode(x=\"year:O\", y=\"sum(yield):Q\", color=\"year:N\", column=\"site:N\")\n)\n\nst.write(\"Grouped Bar Chart with default theme:\")\nst.altair_chart(barley_chart, theme=None)\n\nst.write(\"Grouped Bar Chart with streamlit theme:\")\nst.altair_chart(barley_chart, theme=\"streamlit\")\n\nst.write(\"Chart with use_container_width used\")\nst.altair_chart(barley_chart, theme=None, use_container_width=True)\n\nst.write(\"Layered chart\")\n# Taken from vega_datasets\nstocks = alt.UrlData(\n \"https://cdn.jsdelivr.net/npm/[email protected]/data/stocks.csv\"\n)\n\nbase = (\n alt.Chart(stocks)\n .encode(x=\"date:T\", y=\"price:Q\", color=\"symbol:N\")\n .transform_filter(alt.datum.symbol == \"GOOG\")\n)\n\nnew_base_chart = base.mark_line() + base.mark_point()\nst.altair_chart(new_base_chart)\n\nx = np.linspace(10, 100, 10)\ny1 = 5 * x\ny2 = 1 / x\n\ndf1 = pd.DataFrame.from_dict({\"x\": x, \"y1\": y1, \"y2\": y2})\n\nc1 = alt.Chart(df1).mark_line().encode(alt.X(\"x\"), alt.Y(\"y1\"))\n\nc2 = alt.Chart(df1).mark_line().encode(alt.X(\"x\"), alt.Y(\"y2\"))\n\nst.altair_chart(c1 & c2, use_container_width=True)\n\nfrom altair.expr import datum\n\nresults = [\n [2016, 11525, 3],\n [2017, 11517, 2],\n [2018, 11521, 2],\n [2019, 11519, 4],\n]\n\ndataframe = pd.DataFrame(results, columns=[\"Job Number\", \"Test Count\", \"Test Failures\"])\n\nbase = alt.Chart(dataframe).encode(alt.X(\"Job Number:O\"))\nchart_test_count = base.mark_line().encode(alt.Y(\"Test Count:N\"))\nchart_test_failures = base.mark_line().encode(alt.Y(\"Test Failures:N\"))\n\nst.altair_chart((chart_test_count + chart_test_failures).resolve_scale(y=\"independent\"))\n", "path": "e2e_playwright/st_altair_chart.py"}]} | 1,934 | 298 |
gh_patches_debug_15549 | rasdani/github-patches | git_diff | freedomofpress__securedrop-5674 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Dual distro support broke "securedrop-admin verify"
## Description
When adding support for Focal to the configuration tests in #5529, a check of the `MOLECULE_SCENARIO_NAME` environment variable broke `securedrop-admin verify`, where it's not set.
## Steps to Reproduce
On an admin workstation:
- Run `securedrop-admin setup -t`
- Run `securedrop-admin verify`
## Expected Behavior
That the configuration tests would run.
## Actual Behavior
You get an error [here](https://github.com/freedomofpress/securedrop/blob/76d133a7e5962f8d904e507d93e6a61575358eeb/molecule/testinfra/conftest.py#L31) saying `'NoneType' object has no attribute 'endswith'`.
## Comments
Should probably check if it's `None` or just add `""` as the default in the `os.environ.get` call.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `molecule/testinfra/conftest.py`
Content:
```
1 """
2 Configuration for TestInfra test suite for SecureDrop.
3 Handles importing host-specific test vars, so test functions
4 can be reused across multiple hosts, with varied targets.
5
6 Vars should be placed in `testinfra/vars/<hostname>.yml`.
7 """
8
9 import io
10 import os
11 import yaml
12 import testutils
13
14 # The config tests target staging by default. It's possible to override
15 # for e.g. prod, but the associated vars files are not yet ported.
16 target_host = os.environ.get('SECUREDROP_TESTINFRA_TARGET_HOST', 'staging')
17
18
19 def securedrop_import_testinfra_vars(hostname, with_header=False):
20 """
21 Import vars from a YAML file to populate tests with host-specific
22 values used in checks. For instance, the SecureDrop docroot will
23 be under /vagrant in development, but /var/www/securedrop in staging.
24
25 Vars must be stored in `testinfra/vars/<hostname>.yml`.
26 """
27 filepath = os.path.join(os.path.dirname(__file__), "vars", hostname+".yml")
28 with io.open(filepath, 'r') as f:
29 hostvars = yaml.safe_load(f)
30
31 if os.environ.get("MOLECULE_SCENARIO_NAME").endswith("focal"):
32 hostvars['securedrop_venv_site_packages'] = hostvars["securedrop_venv_site_packages"].format("3.8") # noqa: E501
33 hostvars['python_version'] = "3.8"
34 else:
35 hostvars['securedrop_venv_site_packages'] = hostvars["securedrop_venv_site_packages"].format("3.5") # noqa: E501
36 hostvars['python_version'] = "3.5"
37
38 if with_header:
39 hostvars = dict(securedrop_test_vars=hostvars)
40
41 return hostvars
42
43
44 def lookup_molecule_info():
45 """
46 Molecule automatically writes YAML files documenting dynamic host info
47 such as remote IPs. Read that file and pass back the config dict.
48 """
49 molecule_instance_config_path = os.path.abspath(
50 os.environ['MOLECULE_INSTANCE_CONFIG'])
51 with open(molecule_instance_config_path, 'r') as f:
52 molecule_instance_config = yaml.safe_load(f)
53 return molecule_instance_config
54
55
56 class Myvalues:
57 def __init__(self):
58 pass
59
60
61 value = securedrop_import_testinfra_vars(target_host)
62 res = Myvalues()
63 for key, value in value.items():
64 setattr(res, key, value)
65 testutils.securedrop_test_vars = res
66
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/molecule/testinfra/conftest.py b/molecule/testinfra/conftest.py
--- a/molecule/testinfra/conftest.py
+++ b/molecule/testinfra/conftest.py
@@ -28,7 +28,16 @@
with io.open(filepath, 'r') as f:
hostvars = yaml.safe_load(f)
- if os.environ.get("MOLECULE_SCENARIO_NAME").endswith("focal"):
+ # Testing against both Focal and Xenial must be supported for now in both
+ # staging scenarios, and in prod via `USE_FOCAL=1 ./securedrop-admin verify`
+ testing_focal = False
+ scenario_env = "MOLECULE_SCENARIO_NAME"
+ if scenario_env in os.environ and os.environ.get(scenario_env).endswith("focal"):
+ testing_focal = True
+ if "USE_FOCAL" in os.environ:
+ testing_focal = True
+
+ if testing_focal:
hostvars['securedrop_venv_site_packages'] = hostvars["securedrop_venv_site_packages"].format("3.8") # noqa: E501
hostvars['python_version'] = "3.8"
else:
| {"golden_diff": "diff --git a/molecule/testinfra/conftest.py b/molecule/testinfra/conftest.py\n--- a/molecule/testinfra/conftest.py\n+++ b/molecule/testinfra/conftest.py\n@@ -28,7 +28,16 @@\n with io.open(filepath, 'r') as f:\n hostvars = yaml.safe_load(f)\n \n- if os.environ.get(\"MOLECULE_SCENARIO_NAME\").endswith(\"focal\"):\n+ # Testing against both Focal and Xenial must be supported for now in both\n+ # staging scenarios, and in prod via `USE_FOCAL=1 ./securedrop-admin verify`\n+ testing_focal = False\n+ scenario_env = \"MOLECULE_SCENARIO_NAME\"\n+ if scenario_env in os.environ and os.environ.get(scenario_env).endswith(\"focal\"):\n+ testing_focal = True\n+ if \"USE_FOCAL\" in os.environ:\n+ testing_focal = True\n+\n+ if testing_focal:\n hostvars['securedrop_venv_site_packages'] = hostvars[\"securedrop_venv_site_packages\"].format(\"3.8\") # noqa: E501\n hostvars['python_version'] = \"3.8\"\n else:\n", "issue": "Dual distro support broke \"securedrop-admin verify\"\n## Description\r\n\r\nWhen adding support for Focal to the configuration tests in #5529, a check of the `MOLECULE_SCENARIO_NAME` environment variable broke `securedrop-admin verify`, where it's not set.\r\n\r\n## Steps to Reproduce\r\n\r\nOn an admin workstation:\r\n- Run `securedrop-admin setup -t`\r\n- Run `securedrop-admin verify`\r\n\r\n## Expected Behavior\r\n\r\nThat the configuration tests would run.\r\n\r\n## Actual Behavior\r\n\r\nYou get an error [here](https://github.com/freedomofpress/securedrop/blob/76d133a7e5962f8d904e507d93e6a61575358eeb/molecule/testinfra/conftest.py#L31) saying `'NoneType' object has no attribute 'endswith'`. \r\n\r\n## Comments\r\n\r\nShould probably check if it's `None` or just add `\"\"` as the default in the `os.environ.get` call.\n", "before_files": [{"content": "\"\"\"\nConfiguration for TestInfra test suite for SecureDrop.\nHandles importing host-specific test vars, so test functions\ncan be reused across multiple hosts, with varied targets.\n\nVars should be placed in `testinfra/vars/<hostname>.yml`.\n\"\"\"\n\nimport io\nimport os\nimport yaml\nimport testutils\n\n# The config tests target staging by default. It's possible to override\n# for e.g. prod, but the associated vars files are not yet ported.\ntarget_host = os.environ.get('SECUREDROP_TESTINFRA_TARGET_HOST', 'staging')\n\n\ndef securedrop_import_testinfra_vars(hostname, with_header=False):\n \"\"\"\n Import vars from a YAML file to populate tests with host-specific\n values used in checks. For instance, the SecureDrop docroot will\n be under /vagrant in development, but /var/www/securedrop in staging.\n\n Vars must be stored in `testinfra/vars/<hostname>.yml`.\n \"\"\"\n filepath = os.path.join(os.path.dirname(__file__), \"vars\", hostname+\".yml\")\n with io.open(filepath, 'r') as f:\n hostvars = yaml.safe_load(f)\n\n if os.environ.get(\"MOLECULE_SCENARIO_NAME\").endswith(\"focal\"):\n hostvars['securedrop_venv_site_packages'] = hostvars[\"securedrop_venv_site_packages\"].format(\"3.8\") # noqa: E501\n hostvars['python_version'] = \"3.8\"\n else:\n hostvars['securedrop_venv_site_packages'] = hostvars[\"securedrop_venv_site_packages\"].format(\"3.5\") # noqa: E501\n hostvars['python_version'] = \"3.5\"\n\n if with_header:\n hostvars = dict(securedrop_test_vars=hostvars)\n\n return hostvars\n\n\ndef lookup_molecule_info():\n \"\"\"\n Molecule automatically writes YAML files documenting dynamic host info\n such as remote IPs. Read that file and pass back the config dict.\n \"\"\"\n molecule_instance_config_path = os.path.abspath(\n os.environ['MOLECULE_INSTANCE_CONFIG'])\n with open(molecule_instance_config_path, 'r') as f:\n molecule_instance_config = yaml.safe_load(f)\n return molecule_instance_config\n\n\nclass Myvalues:\n def __init__(self):\n pass\n\n\nvalue = securedrop_import_testinfra_vars(target_host)\nres = Myvalues()\nfor key, value in value.items():\n setattr(res, key, value)\ntestutils.securedrop_test_vars = res\n", "path": "molecule/testinfra/conftest.py"}], "after_files": [{"content": "\"\"\"\nConfiguration for TestInfra test suite for SecureDrop.\nHandles importing host-specific test vars, so test functions\ncan be reused across multiple hosts, with varied targets.\n\nVars should be placed in `testinfra/vars/<hostname>.yml`.\n\"\"\"\n\nimport io\nimport os\nimport yaml\nimport testutils\n\n# The config tests target staging by default. It's possible to override\n# for e.g. prod, but the associated vars files are not yet ported.\ntarget_host = os.environ.get('SECUREDROP_TESTINFRA_TARGET_HOST', 'staging')\n\n\ndef securedrop_import_testinfra_vars(hostname, with_header=False):\n \"\"\"\n Import vars from a YAML file to populate tests with host-specific\n values used in checks. For instance, the SecureDrop docroot will\n be under /vagrant in development, but /var/www/securedrop in staging.\n\n Vars must be stored in `testinfra/vars/<hostname>.yml`.\n \"\"\"\n filepath = os.path.join(os.path.dirname(__file__), \"vars\", hostname+\".yml\")\n with io.open(filepath, 'r') as f:\n hostvars = yaml.safe_load(f)\n\n # Testing against both Focal and Xenial must be supported for now in both\n # staging scenarios, and in prod via `USE_FOCAL=1 ./securedrop-admin verify`\n testing_focal = False\n scenario_env = \"MOLECULE_SCENARIO_NAME\"\n if scenario_env in os.environ and os.environ.get(scenario_env).endswith(\"focal\"):\n testing_focal = True\n if \"USE_FOCAL\" in os.environ:\n testing_focal = True\n\n if testing_focal:\n hostvars['securedrop_venv_site_packages'] = hostvars[\"securedrop_venv_site_packages\"].format(\"3.8\") # noqa: E501\n hostvars['python_version'] = \"3.8\"\n else:\n hostvars['securedrop_venv_site_packages'] = hostvars[\"securedrop_venv_site_packages\"].format(\"3.5\") # noqa: E501\n hostvars['python_version'] = \"3.5\"\n\n if with_header:\n hostvars = dict(securedrop_test_vars=hostvars)\n\n return hostvars\n\n\ndef lookup_molecule_info():\n \"\"\"\n Molecule automatically writes YAML files documenting dynamic host info\n such as remote IPs. Read that file and pass back the config dict.\n \"\"\"\n molecule_instance_config_path = os.path.abspath(\n os.environ['MOLECULE_INSTANCE_CONFIG'])\n with open(molecule_instance_config_path, 'r') as f:\n molecule_instance_config = yaml.safe_load(f)\n return molecule_instance_config\n\n\nclass Myvalues:\n def __init__(self):\n pass\n\n\nvalue = securedrop_import_testinfra_vars(target_host)\nres = Myvalues()\nfor key, value in value.items():\n setattr(res, key, value)\ntestutils.securedrop_test_vars = res\n", "path": "molecule/testinfra/conftest.py"}]} | 1,159 | 276 |
gh_patches_debug_1314 | rasdani/github-patches | git_diff | apache__airflow-9699 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
TimeSensor triggers immediately when used over midnight (UTC)
<!--
Welcome to Apache Airflow! For a smooth issue process, try to answer the following questions.
Don't worry if they're not all applicable; just try to include what you can :-)
If you need to include code snippets or logs, please put them in fenced code
blocks. If they're super-long, please use the details tag like
<details><summary>super-long log</summary> lots of stuff </details>
Please delete these comment blocks before submitting the issue.
-->
<!--
IMPORTANT!!!
PLEASE CHECK "SIMILAR TO X EXISTING ISSUES" OPTION IF VISIBLE
NEXT TO "SUBMIT NEW ISSUE" BUTTON!!!
PLEASE CHECK IF THIS ISSUE HAS BEEN REPORTED PREVIOUSLY USING SEARCH!!!
Please complete the next sections or the issue will be closed.
This questions are the first thing we need to know to understand the context.
-->
**Apache Airflow version**: 1.10.10 (issue exists in current master as well)
**Environment**: does not seem relevant
**What happened**:
The TimeSensor does trigger if the current time is later than the defined trigger time. Looking at the [source code](https://github.com/apache/airflow/blob/master/airflow/sensors/time_sensor.py), the trigger rule is defined as
```
return timezone.utcnow().time() > self.target_time
```
This leads to problems when the DAG runs over midnight UTC. For example, suppose the following DAG:
```
with DAG('foo',
default_args={'start_date': datetime(2020, 7, 1, tzinfo=pendulum.timezone("Europe/Berlin"))},
schedule_interval="0 0 * * *") as dag:
# in summer, Europe/Berlin is two hours after UTC, hence:
time_04h00_local = TimeSensor(task_id="time_01h30", target_time=time(hour=2, minute=00))
```
This DAG will be triggered at 22:00 UTC. Then, according to the trigger rule:
```
22:00 UTC > 2:00 UTC
```
Hence, the TimeSensor will be triggered immediately.
**What you expected to happen**:
The TimeSensor should trigger at the following day if `target_time < next_execution_date.time()`
**Possible workarounds**:
One can always use the TimeDeltaSensor to archive similar effects. This does result in code that is not as readable, though.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `airflow/sensors/time_sensor.py`
Content:
```
1 #
2 # Licensed to the Apache Software Foundation (ASF) under one
3 # or more contributor license agreements. See the NOTICE file
4 # distributed with this work for additional information
5 # regarding copyright ownership. The ASF licenses this file
6 # to you under the Apache License, Version 2.0 (the
7 # "License"); you may not use this file except in compliance
8 # with the License. You may obtain a copy of the License at
9 #
10 # http://www.apache.org/licenses/LICENSE-2.0
11 #
12 # Unless required by applicable law or agreed to in writing,
13 # software distributed under the License is distributed on an
14 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 # KIND, either express or implied. See the License for the
16 # specific language governing permissions and limitations
17 # under the License.
18
19 from airflow.sensors.base_sensor_operator import BaseSensorOperator
20 from airflow.utils import timezone
21 from airflow.utils.decorators import apply_defaults
22
23
24 class TimeSensor(BaseSensorOperator):
25 """
26 Waits until the specified time of the day.
27
28 :param target_time: time after which the job succeeds
29 :type target_time: datetime.time
30 """
31
32 @apply_defaults
33 def __init__(self, target_time, *args, **kwargs):
34 super().__init__(*args, **kwargs)
35 self.target_time = target_time
36
37 def poke(self, context):
38 self.log.info('Checking if the time (%s) has come', self.target_time)
39 return timezone.utcnow().time() > self.target_time
40
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/airflow/sensors/time_sensor.py b/airflow/sensors/time_sensor.py
--- a/airflow/sensors/time_sensor.py
+++ b/airflow/sensors/time_sensor.py
@@ -36,4 +36,4 @@
def poke(self, context):
self.log.info('Checking if the time (%s) has come', self.target_time)
- return timezone.utcnow().time() > self.target_time
+ return timezone.make_naive(timezone.utcnow()).time() > self.target_time
| {"golden_diff": "diff --git a/airflow/sensors/time_sensor.py b/airflow/sensors/time_sensor.py\n--- a/airflow/sensors/time_sensor.py\n+++ b/airflow/sensors/time_sensor.py\n@@ -36,4 +36,4 @@\n \n def poke(self, context):\n self.log.info('Checking if the time (%s) has come', self.target_time)\n- return timezone.utcnow().time() > self.target_time\n+ return timezone.make_naive(timezone.utcnow()).time() > self.target_time\n", "issue": "TimeSensor triggers immediately when used over midnight (UTC)\n<!--\r\n\r\nWelcome to Apache Airflow! For a smooth issue process, try to answer the following questions.\r\nDon't worry if they're not all applicable; just try to include what you can :-)\r\n\r\nIf you need to include code snippets or logs, please put them in fenced code\r\nblocks. If they're super-long, please use the details tag like\r\n<details><summary>super-long log</summary> lots of stuff </details>\r\n\r\nPlease delete these comment blocks before submitting the issue.\r\n\r\n-->\r\n\r\n<!--\r\n\r\nIMPORTANT!!!\r\n\r\nPLEASE CHECK \"SIMILAR TO X EXISTING ISSUES\" OPTION IF VISIBLE\r\nNEXT TO \"SUBMIT NEW ISSUE\" BUTTON!!!\r\n\r\nPLEASE CHECK IF THIS ISSUE HAS BEEN REPORTED PREVIOUSLY USING SEARCH!!!\r\n\r\nPlease complete the next sections or the issue will be closed.\r\nThis questions are the first thing we need to know to understand the context.\r\n\r\n-->\r\n\r\n**Apache Airflow version**: 1.10.10 (issue exists in current master as well)\r\n\r\n**Environment**: does not seem relevant\r\n\r\n**What happened**:\r\n\r\nThe TimeSensor does trigger if the current time is later than the defined trigger time. Looking at the [source code](https://github.com/apache/airflow/blob/master/airflow/sensors/time_sensor.py), the trigger rule is defined as\r\n```\r\nreturn timezone.utcnow().time() > self.target_time\r\n```\r\nThis leads to problems when the DAG runs over midnight UTC. For example, suppose the following DAG:\r\n\r\n```\r\nwith DAG('foo', \r\n default_args={'start_date': datetime(2020, 7, 1, tzinfo=pendulum.timezone(\"Europe/Berlin\"))}, \r\n schedule_interval=\"0 0 * * *\") as dag:\r\n\r\n # in summer, Europe/Berlin is two hours after UTC, hence: \r\n time_04h00_local = TimeSensor(task_id=\"time_01h30\", target_time=time(hour=2, minute=00))\r\n```\r\n\r\nThis DAG will be triggered at 22:00 UTC. Then, according to the trigger rule:\r\n```\r\n22:00 UTC > 2:00 UTC\r\n```\r\nHence, the TimeSensor will be triggered immediately. \r\n\r\n**What you expected to happen**:\r\n\r\nThe TimeSensor should trigger at the following day if `target_time < next_execution_date.time()`\r\n\r\n**Possible workarounds**:\r\n\r\nOne can always use the TimeDeltaSensor to archive similar effects. This does result in code that is not as readable, though. \n", "before_files": [{"content": "#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\nfrom airflow.sensors.base_sensor_operator import BaseSensorOperator\nfrom airflow.utils import timezone\nfrom airflow.utils.decorators import apply_defaults\n\n\nclass TimeSensor(BaseSensorOperator):\n \"\"\"\n Waits until the specified time of the day.\n\n :param target_time: time after which the job succeeds\n :type target_time: datetime.time\n \"\"\"\n\n @apply_defaults\n def __init__(self, target_time, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.target_time = target_time\n\n def poke(self, context):\n self.log.info('Checking if the time (%s) has come', self.target_time)\n return timezone.utcnow().time() > self.target_time\n", "path": "airflow/sensors/time_sensor.py"}], "after_files": [{"content": "#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\nfrom airflow.sensors.base_sensor_operator import BaseSensorOperator\nfrom airflow.utils import timezone\nfrom airflow.utils.decorators import apply_defaults\n\n\nclass TimeSensor(BaseSensorOperator):\n \"\"\"\n Waits until the specified time of the day.\n\n :param target_time: time after which the job succeeds\n :type target_time: datetime.time\n \"\"\"\n\n @apply_defaults\n def __init__(self, target_time, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.target_time = target_time\n\n def poke(self, context):\n self.log.info('Checking if the time (%s) has come', self.target_time)\n return timezone.make_naive(timezone.utcnow()).time() > self.target_time\n", "path": "airflow/sensors/time_sensor.py"}]} | 1,200 | 114 |
gh_patches_debug_6124 | rasdani/github-patches | git_diff | conan-io__conan-3087 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
package_id() regression bug in conan 1.4.x?
Consider the following `conanfile.py`:
```python
from conans import ConanFile
class TestConan(ConanFile):
name = "Test"
version = "0.0.1"
settings = "os", "arch"
def package_id(self):
self.info.include_build_settings()
self.info.settings.os_build = self.info.settings.os
self.info.settings.arch_build = self.info.settings.arch
del self.info.settings.os
del self.info.settings.arch
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>> os_build: ",str(self.info.settings.os_build))
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>> arch_build: ",str(self.info.settings.arch_build))
```
Now test it with conan 1.3.3:
```
C:\Users\dbely\conan\conan-test>pip install conan==1.3.3
...
C:\Users\dbely\conan\conan-test>conan create . dbely/testing
...
Test/0.0.1@dbely/testing: The stored package has not changed
>>>>>>>>>>>>>>>>>>>>>>>>>>>> os_build: Windows
>>>>>>>>>>>>>>>>>>>>>>>>>>>> arch_build: x86_64
Test/0.0.1@dbely/testing: Installing package
...
C:\Users\dbely\conan\conan-test>conan search Test/0.0.1@dbely/testing
Existing packages for recipe Test/0.0.1@dbely/testing:
Package_ID: 456f15897172eef340fcbac8a70811f2beb26a93
[settings]
arch_build: x86_64
os_build: Windows
Outdated from recipe: False
```
Everything is good. Upgrade to conan 1.4.4 (all 1.4.x versions behave the same) and try again:
```
C:\Users\dbely\conan\conan-test>pip install conan==1.4.4
...
C:\Users\dbely\conan\conan-test>conan create . dbely/testing
...
Test/0.0.1@dbely/testing: A new conanfile.py version was exported
Test/0.0.1@dbely/testing: Folder: C:\Users\dbely\.conan\data\Test\0.0.1\dbely\testing\export
>>>>>>>>>>>>>>>>>>>>>>>>>>>> os_build: Windows
>>>>>>>>>>>>>>>>>>>>>>>>>>>> arch_build: x86_64
Test/0.0.1@dbely/testing: Installing package
>>>>>>>>>>>>>>>>>>>>>>>>>>>> os_build: None
>>>>>>>>>>>>>>>>>>>>>>>>>>>> arch_build: None
...
C:\Users\dbely\conan\conan-test>conan search Test/0.0.1@dbely/testing
Existing packages for recipe Test/0.0.1@dbely/testing:
Package_ID: 456f15897172eef340fcbac8a70811f2beb26a93
[settings]
arch_build: None
os_build: None
Outdated from recipe: False
```
Oops! `package_id()` is now called twice and after the second call `os_build` and `arch_build` are set to `None`. Looks like a bug to me.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `conans/client/graph/printer.py`
Content:
```
1 from conans.client.output import Color
2 from conans.model.ref import PackageReference
3 from conans.model.workspace import WORKSPACE_FILE
4
5
6 def print_graph(deps_graph, out):
7 all_nodes = []
8 ids = set()
9 for node in sorted(n for n in deps_graph.nodes if n.conan_ref):
10 package_id = PackageReference(node.conan_ref, node.conanfile.package_id())
11 if package_id not in ids:
12 all_nodes.append(node)
13 ids.add(package_id)
14 requires = [n for n in all_nodes]
15 out.writeln("Requirements", Color.BRIGHT_YELLOW)
16
17 def _recipes(nodes):
18 for node in nodes:
19 if node.remote == WORKSPACE_FILE:
20 from_text = "from '%s'" % WORKSPACE_FILE
21 else:
22 from_text = "from local cache" if not node.remote else "from '%s'" % node.remote.name
23 out.writeln(" %s %s" % (repr(node.conan_ref), from_text), Color.BRIGHT_CYAN)
24 _recipes(requires)
25 out.writeln("Packages", Color.BRIGHT_YELLOW)
26
27 def _packages(nodes):
28 for node in nodes:
29 ref, conanfile = node.conan_ref, node.conanfile
30 ref = PackageReference(ref, conanfile.info.package_id())
31 out.writeln(" %s" % (repr(ref)), Color.BRIGHT_CYAN)
32 _packages(requires)
33
34 out.writeln("")
35
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/conans/client/graph/printer.py b/conans/client/graph/printer.py
--- a/conans/client/graph/printer.py
+++ b/conans/client/graph/printer.py
@@ -7,7 +7,7 @@
all_nodes = []
ids = set()
for node in sorted(n for n in deps_graph.nodes if n.conan_ref):
- package_id = PackageReference(node.conan_ref, node.conanfile.package_id())
+ package_id = PackageReference(node.conan_ref, node.conanfile.info.package_id())
if package_id not in ids:
all_nodes.append(node)
ids.add(package_id)
| {"golden_diff": "diff --git a/conans/client/graph/printer.py b/conans/client/graph/printer.py\n--- a/conans/client/graph/printer.py\n+++ b/conans/client/graph/printer.py\n@@ -7,7 +7,7 @@\n all_nodes = []\n ids = set()\n for node in sorted(n for n in deps_graph.nodes if n.conan_ref):\n- package_id = PackageReference(node.conan_ref, node.conanfile.package_id())\n+ package_id = PackageReference(node.conan_ref, node.conanfile.info.package_id())\n if package_id not in ids:\n all_nodes.append(node)\n ids.add(package_id)\n", "issue": "package_id() regression bug in conan 1.4.x?\nConsider the following `conanfile.py`:\r\n```python\r\nfrom conans import ConanFile\r\n\r\nclass TestConan(ConanFile):\r\n name = \"Test\"\r\n version = \"0.0.1\"\r\n settings = \"os\", \"arch\"\r\n\r\n def package_id(self):\r\n self.info.include_build_settings()\r\n self.info.settings.os_build = self.info.settings.os\r\n self.info.settings.arch_build = self.info.settings.arch\r\n del self.info.settings.os\r\n del self.info.settings.arch\r\n print(\">>>>>>>>>>>>>>>>>>>>>>>>>>>> os_build: \",str(self.info.settings.os_build))\r\n print(\">>>>>>>>>>>>>>>>>>>>>>>>>>>> arch_build: \",str(self.info.settings.arch_build))\r\n```\r\n\r\nNow test it with conan 1.3.3:\r\n```\r\nC:\\Users\\dbely\\conan\\conan-test>pip install conan==1.3.3\r\n...\r\nC:\\Users\\dbely\\conan\\conan-test>conan create . dbely/testing\r\n...\r\nTest/0.0.1@dbely/testing: The stored package has not changed\r\n>>>>>>>>>>>>>>>>>>>>>>>>>>>> os_build: Windows\r\n>>>>>>>>>>>>>>>>>>>>>>>>>>>> arch_build: x86_64\r\nTest/0.0.1@dbely/testing: Installing package\r\n...\r\nC:\\Users\\dbely\\conan\\conan-test>conan search Test/0.0.1@dbely/testing\r\nExisting packages for recipe Test/0.0.1@dbely/testing:\r\n\r\n Package_ID: 456f15897172eef340fcbac8a70811f2beb26a93\r\n [settings]\r\n arch_build: x86_64\r\n os_build: Windows\r\n Outdated from recipe: False\r\n```\r\nEverything is good. Upgrade to conan 1.4.4 (all 1.4.x versions behave the same) and try again:\r\n```\r\nC:\\Users\\dbely\\conan\\conan-test>pip install conan==1.4.4\r\n...\r\nC:\\Users\\dbely\\conan\\conan-test>conan create . dbely/testing\r\n...\r\nTest/0.0.1@dbely/testing: A new conanfile.py version was exported\r\nTest/0.0.1@dbely/testing: Folder: C:\\Users\\dbely\\.conan\\data\\Test\\0.0.1\\dbely\\testing\\export\r\n>>>>>>>>>>>>>>>>>>>>>>>>>>>> os_build: Windows\r\n>>>>>>>>>>>>>>>>>>>>>>>>>>>> arch_build: x86_64\r\nTest/0.0.1@dbely/testing: Installing package\r\n>>>>>>>>>>>>>>>>>>>>>>>>>>>> os_build: None\r\n>>>>>>>>>>>>>>>>>>>>>>>>>>>> arch_build: None\r\n...\r\nC:\\Users\\dbely\\conan\\conan-test>conan search Test/0.0.1@dbely/testing\r\nExisting packages for recipe Test/0.0.1@dbely/testing:\r\n\r\n Package_ID: 456f15897172eef340fcbac8a70811f2beb26a93\r\n [settings]\r\n arch_build: None\r\n os_build: None\r\n Outdated from recipe: False\r\n```\r\nOops! `package_id()` is now called twice and after the second call `os_build` and `arch_build` are set to `None`. Looks like a bug to me. \n", "before_files": [{"content": "from conans.client.output import Color\nfrom conans.model.ref import PackageReference\nfrom conans.model.workspace import WORKSPACE_FILE\n\n\ndef print_graph(deps_graph, out):\n all_nodes = []\n ids = set()\n for node in sorted(n for n in deps_graph.nodes if n.conan_ref):\n package_id = PackageReference(node.conan_ref, node.conanfile.package_id())\n if package_id not in ids:\n all_nodes.append(node)\n ids.add(package_id)\n requires = [n for n in all_nodes]\n out.writeln(\"Requirements\", Color.BRIGHT_YELLOW)\n\n def _recipes(nodes):\n for node in nodes:\n if node.remote == WORKSPACE_FILE:\n from_text = \"from '%s'\" % WORKSPACE_FILE\n else:\n from_text = \"from local cache\" if not node.remote else \"from '%s'\" % node.remote.name\n out.writeln(\" %s %s\" % (repr(node.conan_ref), from_text), Color.BRIGHT_CYAN)\n _recipes(requires)\n out.writeln(\"Packages\", Color.BRIGHT_YELLOW)\n\n def _packages(nodes):\n for node in nodes:\n ref, conanfile = node.conan_ref, node.conanfile\n ref = PackageReference(ref, conanfile.info.package_id())\n out.writeln(\" %s\" % (repr(ref)), Color.BRIGHT_CYAN)\n _packages(requires)\n\n out.writeln(\"\")\n", "path": "conans/client/graph/printer.py"}], "after_files": [{"content": "from conans.client.output import Color\nfrom conans.model.ref import PackageReference\nfrom conans.model.workspace import WORKSPACE_FILE\n\n\ndef print_graph(deps_graph, out):\n all_nodes = []\n ids = set()\n for node in sorted(n for n in deps_graph.nodes if n.conan_ref):\n package_id = PackageReference(node.conan_ref, node.conanfile.info.package_id())\n if package_id not in ids:\n all_nodes.append(node)\n ids.add(package_id)\n requires = [n for n in all_nodes]\n out.writeln(\"Requirements\", Color.BRIGHT_YELLOW)\n\n def _recipes(nodes):\n for node in nodes:\n if node.remote == WORKSPACE_FILE:\n from_text = \"from '%s'\" % WORKSPACE_FILE\n else:\n from_text = \"from local cache\" if not node.remote else \"from '%s'\" % node.remote.name\n out.writeln(\" %s %s\" % (repr(node.conan_ref), from_text), Color.BRIGHT_CYAN)\n _recipes(requires)\n out.writeln(\"Packages\", Color.BRIGHT_YELLOW)\n\n def _packages(nodes):\n for node in nodes:\n ref, conanfile = node.conan_ref, node.conanfile\n ref = PackageReference(ref, conanfile.info.package_id())\n out.writeln(\" %s\" % (repr(ref)), Color.BRIGHT_CYAN)\n _packages(requires)\n\n out.writeln(\"\")\n", "path": "conans/client/graph/printer.py"}]} | 1,372 | 137 |
gh_patches_debug_25780 | rasdani/github-patches | git_diff | pre-commit__pre-commit-1382 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
No colors when hooks are run by a git commit
Hi,
I use pre-commit at home on linux which works perfect. But at work I have a windows pc. Here I have problems with the colorfull output.
When the hooks are run by `tox` calling `pre-commit run` there are colors as usual. When the hooks are run by a `git commit` the colors are missing.
Concrete I mean the green for 'Passed', red for 'Failed ' and yellow/brown for 'Skipped' in the overview.
There is no difference if I run it via git-bash, cmd or powershell. Also there is no difference if I use the pycharm buildin terminal or others.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pre_commit/color.py`
Content:
```
1 import os
2 import sys
3
4 if sys.platform == 'win32': # pragma: no cover (windows)
5 def _enable() -> None:
6 from ctypes import POINTER
7 from ctypes import windll
8 from ctypes import WinError
9 from ctypes import WINFUNCTYPE
10 from ctypes.wintypes import BOOL
11 from ctypes.wintypes import DWORD
12 from ctypes.wintypes import HANDLE
13
14 STD_OUTPUT_HANDLE = -11
15 ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4
16
17 def bool_errcheck(result, func, args):
18 if not result:
19 raise WinError()
20 return args
21
22 GetStdHandle = WINFUNCTYPE(HANDLE, DWORD)(
23 ('GetStdHandle', windll.kernel32), ((1, 'nStdHandle'),),
24 )
25
26 GetConsoleMode = WINFUNCTYPE(BOOL, HANDLE, POINTER(DWORD))(
27 ('GetConsoleMode', windll.kernel32),
28 ((1, 'hConsoleHandle'), (2, 'lpMode')),
29 )
30 GetConsoleMode.errcheck = bool_errcheck
31
32 SetConsoleMode = WINFUNCTYPE(BOOL, HANDLE, DWORD)(
33 ('SetConsoleMode', windll.kernel32),
34 ((1, 'hConsoleHandle'), (1, 'dwMode')),
35 )
36 SetConsoleMode.errcheck = bool_errcheck
37
38 # As of Windows 10, the Windows console supports (some) ANSI escape
39 # sequences, but it needs to be enabled using `SetConsoleMode` first.
40 #
41 # More info on the escape sequences supported:
42 # https://msdn.microsoft.com/en-us/library/windows/desktop/mt638032(v=vs.85).aspx
43 stdout = GetStdHandle(STD_OUTPUT_HANDLE)
44 flags = GetConsoleMode(stdout)
45 SetConsoleMode(stdout, flags | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
46
47 try:
48 _enable()
49 except OSError:
50 terminal_supports_color = False
51 else:
52 terminal_supports_color = True
53 else: # pragma: win32 no cover
54 terminal_supports_color = True
55
56 RED = '\033[41m'
57 GREEN = '\033[42m'
58 YELLOW = '\033[43;30m'
59 TURQUOISE = '\033[46;30m'
60 SUBTLE = '\033[2m'
61 NORMAL = '\033[m'
62
63
64 def format_color(text: str, color: str, use_color_setting: bool) -> str:
65 """Format text with color.
66
67 Args:
68 text - Text to be formatted with color if `use_color`
69 color - The color start string
70 use_color_setting - Whether or not to color
71 """
72 if use_color_setting:
73 return f'{color}{text}{NORMAL}'
74 else:
75 return text
76
77
78 COLOR_CHOICES = ('auto', 'always', 'never')
79
80
81 def use_color(setting: str) -> bool:
82 """Choose whether to use color based on the command argument.
83
84 Args:
85 setting - Either `auto`, `always`, or `never`
86 """
87 if setting not in COLOR_CHOICES:
88 raise ValueError(setting)
89
90 return (
91 setting == 'always' or (
92 setting == 'auto' and
93 sys.stdout.isatty() and
94 terminal_supports_color and
95 os.getenv('TERM') != 'dumb'
96 )
97 )
98
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pre_commit/color.py b/pre_commit/color.py
--- a/pre_commit/color.py
+++ b/pre_commit/color.py
@@ -11,7 +11,7 @@
from ctypes.wintypes import DWORD
from ctypes.wintypes import HANDLE
- STD_OUTPUT_HANDLE = -11
+ STD_ERROR_HANDLE = -12
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4
def bool_errcheck(result, func, args):
@@ -40,9 +40,9 @@
#
# More info on the escape sequences supported:
# https://msdn.microsoft.com/en-us/library/windows/desktop/mt638032(v=vs.85).aspx
- stdout = GetStdHandle(STD_OUTPUT_HANDLE)
- flags = GetConsoleMode(stdout)
- SetConsoleMode(stdout, flags | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
+ stderr = GetStdHandle(STD_ERROR_HANDLE)
+ flags = GetConsoleMode(stderr)
+ SetConsoleMode(stderr, flags | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
try:
_enable()
@@ -90,7 +90,7 @@
return (
setting == 'always' or (
setting == 'auto' and
- sys.stdout.isatty() and
+ sys.stderr.isatty() and
terminal_supports_color and
os.getenv('TERM') != 'dumb'
)
| {"golden_diff": "diff --git a/pre_commit/color.py b/pre_commit/color.py\n--- a/pre_commit/color.py\n+++ b/pre_commit/color.py\n@@ -11,7 +11,7 @@\n from ctypes.wintypes import DWORD\n from ctypes.wintypes import HANDLE\n \n- STD_OUTPUT_HANDLE = -11\n+ STD_ERROR_HANDLE = -12\n ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4\n \n def bool_errcheck(result, func, args):\n@@ -40,9 +40,9 @@\n #\n # More info on the escape sequences supported:\n # https://msdn.microsoft.com/en-us/library/windows/desktop/mt638032(v=vs.85).aspx\n- stdout = GetStdHandle(STD_OUTPUT_HANDLE)\n- flags = GetConsoleMode(stdout)\n- SetConsoleMode(stdout, flags | ENABLE_VIRTUAL_TERMINAL_PROCESSING)\n+ stderr = GetStdHandle(STD_ERROR_HANDLE)\n+ flags = GetConsoleMode(stderr)\n+ SetConsoleMode(stderr, flags | ENABLE_VIRTUAL_TERMINAL_PROCESSING)\n \n try:\n _enable()\n@@ -90,7 +90,7 @@\n return (\n setting == 'always' or (\n setting == 'auto' and\n- sys.stdout.isatty() and\n+ sys.stderr.isatty() and\n terminal_supports_color and\n os.getenv('TERM') != 'dumb'\n )\n", "issue": "No colors when hooks are run by a git commit\nHi,\r\nI use pre-commit at home on linux which works perfect. But at work I have a windows pc. Here I have problems with the colorfull output. \r\n\r\nWhen the hooks are run by `tox` calling `pre-commit run` there are colors as usual. When the hooks are run by a `git commit` the colors are missing.\r\n\r\nConcrete I mean the green for 'Passed', red for 'Failed ' and yellow/brown for 'Skipped' in the overview.\r\n\r\nThere is no difference if I run it via git-bash, cmd or powershell. Also there is no difference if I use the pycharm buildin terminal or others.\n", "before_files": [{"content": "import os\nimport sys\n\nif sys.platform == 'win32': # pragma: no cover (windows)\n def _enable() -> None:\n from ctypes import POINTER\n from ctypes import windll\n from ctypes import WinError\n from ctypes import WINFUNCTYPE\n from ctypes.wintypes import BOOL\n from ctypes.wintypes import DWORD\n from ctypes.wintypes import HANDLE\n\n STD_OUTPUT_HANDLE = -11\n ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4\n\n def bool_errcheck(result, func, args):\n if not result:\n raise WinError()\n return args\n\n GetStdHandle = WINFUNCTYPE(HANDLE, DWORD)(\n ('GetStdHandle', windll.kernel32), ((1, 'nStdHandle'),),\n )\n\n GetConsoleMode = WINFUNCTYPE(BOOL, HANDLE, POINTER(DWORD))(\n ('GetConsoleMode', windll.kernel32),\n ((1, 'hConsoleHandle'), (2, 'lpMode')),\n )\n GetConsoleMode.errcheck = bool_errcheck\n\n SetConsoleMode = WINFUNCTYPE(BOOL, HANDLE, DWORD)(\n ('SetConsoleMode', windll.kernel32),\n ((1, 'hConsoleHandle'), (1, 'dwMode')),\n )\n SetConsoleMode.errcheck = bool_errcheck\n\n # As of Windows 10, the Windows console supports (some) ANSI escape\n # sequences, but it needs to be enabled using `SetConsoleMode` first.\n #\n # More info on the escape sequences supported:\n # https://msdn.microsoft.com/en-us/library/windows/desktop/mt638032(v=vs.85).aspx\n stdout = GetStdHandle(STD_OUTPUT_HANDLE)\n flags = GetConsoleMode(stdout)\n SetConsoleMode(stdout, flags | ENABLE_VIRTUAL_TERMINAL_PROCESSING)\n\n try:\n _enable()\n except OSError:\n terminal_supports_color = False\n else:\n terminal_supports_color = True\nelse: # pragma: win32 no cover\n terminal_supports_color = True\n\nRED = '\\033[41m'\nGREEN = '\\033[42m'\nYELLOW = '\\033[43;30m'\nTURQUOISE = '\\033[46;30m'\nSUBTLE = '\\033[2m'\nNORMAL = '\\033[m'\n\n\ndef format_color(text: str, color: str, use_color_setting: bool) -> str:\n \"\"\"Format text with color.\n\n Args:\n text - Text to be formatted with color if `use_color`\n color - The color start string\n use_color_setting - Whether or not to color\n \"\"\"\n if use_color_setting:\n return f'{color}{text}{NORMAL}'\n else:\n return text\n\n\nCOLOR_CHOICES = ('auto', 'always', 'never')\n\n\ndef use_color(setting: str) -> bool:\n \"\"\"Choose whether to use color based on the command argument.\n\n Args:\n setting - Either `auto`, `always`, or `never`\n \"\"\"\n if setting not in COLOR_CHOICES:\n raise ValueError(setting)\n\n return (\n setting == 'always' or (\n setting == 'auto' and\n sys.stdout.isatty() and\n terminal_supports_color and\n os.getenv('TERM') != 'dumb'\n )\n )\n", "path": "pre_commit/color.py"}], "after_files": [{"content": "import os\nimport sys\n\nif sys.platform == 'win32': # pragma: no cover (windows)\n def _enable() -> None:\n from ctypes import POINTER\n from ctypes import windll\n from ctypes import WinError\n from ctypes import WINFUNCTYPE\n from ctypes.wintypes import BOOL\n from ctypes.wintypes import DWORD\n from ctypes.wintypes import HANDLE\n\n STD_ERROR_HANDLE = -12\n ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4\n\n def bool_errcheck(result, func, args):\n if not result:\n raise WinError()\n return args\n\n GetStdHandle = WINFUNCTYPE(HANDLE, DWORD)(\n ('GetStdHandle', windll.kernel32), ((1, 'nStdHandle'),),\n )\n\n GetConsoleMode = WINFUNCTYPE(BOOL, HANDLE, POINTER(DWORD))(\n ('GetConsoleMode', windll.kernel32),\n ((1, 'hConsoleHandle'), (2, 'lpMode')),\n )\n GetConsoleMode.errcheck = bool_errcheck\n\n SetConsoleMode = WINFUNCTYPE(BOOL, HANDLE, DWORD)(\n ('SetConsoleMode', windll.kernel32),\n ((1, 'hConsoleHandle'), (1, 'dwMode')),\n )\n SetConsoleMode.errcheck = bool_errcheck\n\n # As of Windows 10, the Windows console supports (some) ANSI escape\n # sequences, but it needs to be enabled using `SetConsoleMode` first.\n #\n # More info on the escape sequences supported:\n # https://msdn.microsoft.com/en-us/library/windows/desktop/mt638032(v=vs.85).aspx\n stderr = GetStdHandle(STD_ERROR_HANDLE)\n flags = GetConsoleMode(stderr)\n SetConsoleMode(stderr, flags | ENABLE_VIRTUAL_TERMINAL_PROCESSING)\n\n try:\n _enable()\n except OSError:\n terminal_supports_color = False\n else:\n terminal_supports_color = True\nelse: # pragma: win32 no cover\n terminal_supports_color = True\n\nRED = '\\033[41m'\nGREEN = '\\033[42m'\nYELLOW = '\\033[43;30m'\nTURQUOISE = '\\033[46;30m'\nSUBTLE = '\\033[2m'\nNORMAL = '\\033[m'\n\n\ndef format_color(text: str, color: str, use_color_setting: bool) -> str:\n \"\"\"Format text with color.\n\n Args:\n text - Text to be formatted with color if `use_color`\n color - The color start string\n use_color_setting - Whether or not to color\n \"\"\"\n if use_color_setting:\n return f'{color}{text}{NORMAL}'\n else:\n return text\n\n\nCOLOR_CHOICES = ('auto', 'always', 'never')\n\n\ndef use_color(setting: str) -> bool:\n \"\"\"Choose whether to use color based on the command argument.\n\n Args:\n setting - Either `auto`, `always`, or `never`\n \"\"\"\n if setting not in COLOR_CHOICES:\n raise ValueError(setting)\n\n return (\n setting == 'always' or (\n setting == 'auto' and\n sys.stderr.isatty() and\n terminal_supports_color and\n os.getenv('TERM') != 'dumb'\n )\n )\n", "path": "pre_commit/color.py"}]} | 1,331 | 304 |
gh_patches_debug_27472 | rasdani/github-patches | git_diff | liqd__a4-meinberlin-2899 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
voting in brainstorming
the votings are shown on pop up for ideas within brainstorming although there is no voting.
<img width="332" alt="bildschirmfoto 2019-02-05 um 15 01 57" src="https://user-images.githubusercontent.com/35491681/52278354-20299380-2957-11e9-8368-dfb42c142a3a.png">
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `meinberlin/apps/newsletters/emails.py`
Content:
```
1 from email.mime.image import MIMEImage
2
3 from django.apps import apps
4 from django.conf import settings
5 from django.contrib import auth
6
7 from adhocracy4.emails.mixins import ReportToAdminEmailMixin
8 from meinberlin.apps.contrib.emails import Email
9
10 Organisation = apps.get_model(settings.A4_ORGANISATIONS_MODEL)
11 User = auth.get_user_model()
12
13
14 class NewsletterEmail(ReportToAdminEmailMixin, Email):
15 template_name = 'meinberlin_newsletters/emails/newsletter_email'
16
17 def dispatch(self, object, *args, **kwargs):
18 organisation_pk = kwargs.pop('organisation_pk', None)
19 organisation = None
20 if organisation_pk:
21 organisation = Organisation.objects.get(pk=organisation_pk)
22 kwargs['organisation'] = organisation
23
24 return super().dispatch(object, *args, **kwargs)
25
26 def get_reply_to(self):
27 return ['{} <{}>'.format(self.object.sender_name, self.object.sender)]
28
29 def get_receivers(self):
30 return User.objects\
31 .filter(id__in=self.kwargs['participant_ids'])\
32 .filter(get_newsletters=True)\
33 .filter(is_active=True)\
34 .distinct()
35
36 def get_attachments(self):
37 attachments = super().get_attachments()
38
39 organisation = self.kwargs['organisation']
40 if organisation and organisation.logo:
41 f = open(organisation.logo.path, 'rb')
42 logo = MIMEImage(f.read())
43 logo.add_header('Content-ID', '<{}>'.format('organisation_logo'))
44 attachments += [logo]
45
46 return attachments
47
48
49 class NewsletterEmailAll(NewsletterEmail):
50
51 def get_receivers(self):
52 return User.objects\
53 .filter(is_active=True)\
54 .distinct()
55
```
Path: `meinberlin/apps/users/admin.py`
Content:
```
1 from django.contrib import admin
2 from django.contrib import auth
3 from django.contrib.auth.models import Group
4 from django.utils.translation import ugettext_lazy as _
5
6 from . import models
7 from .forms import UserAdminForm
8
9
10 class UserAdmin(auth.admin.UserAdmin):
11 form = UserAdminForm
12 fieldsets = (
13 (None, {'fields': ('username', 'email', 'password', 'groups')}),
14 (_('Permissions'), {'fields': ('is_staff', 'is_superuser')}),
15 (_('Important dates'), {'fields': ('last_login', 'date_joined')}),
16 )
17 add_fieldsets = (
18 (None, {
19 'classes': ('wide',),
20 'fields': ('username', 'email', 'password1', 'password2'),
21 }),
22 )
23 readonly_fields = ('date_joined', 'last_login')
24 list_display = (
25 'id', 'username', 'email', 'date_joined', 'last_login', 'is_staff',
26 'is_superuser'
27 )
28 list_filter = ('is_staff', 'is_superuser', 'last_login')
29 search_fields = ('username', 'email', 'id')
30
31
32 class GroupAdmin(admin.ModelAdmin):
33 fieldsets = (
34 (None, {'fields': ('name', )}),
35 )
36
37
38 admin.site.register(models.User, UserAdmin)
39 admin.site.unregister(Group)
40 admin.site.register(Group, GroupAdmin)
41
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/meinberlin/apps/newsletters/emails.py b/meinberlin/apps/newsletters/emails.py
--- a/meinberlin/apps/newsletters/emails.py
+++ b/meinberlin/apps/newsletters/emails.py
@@ -50,5 +50,6 @@
def get_receivers(self):
return User.objects\
+ .filter(get_newsletters=True)\
.filter(is_active=True)\
.distinct()
diff --git a/meinberlin/apps/users/admin.py b/meinberlin/apps/users/admin.py
--- a/meinberlin/apps/users/admin.py
+++ b/meinberlin/apps/users/admin.py
@@ -12,7 +12,8 @@
fieldsets = (
(None, {'fields': ('username', 'email', 'password', 'groups')}),
(_('Permissions'), {'fields': ('is_staff', 'is_superuser')}),
- (_('Important dates'), {'fields': ('last_login', 'date_joined')}),
+ (_('Important dates'),
+ {'fields': ('last_login', 'date_joined', 'get_newsletters')}),
)
add_fieldsets = (
(None, {
@@ -20,10 +21,10 @@
'fields': ('username', 'email', 'password1', 'password2'),
}),
)
- readonly_fields = ('date_joined', 'last_login')
+ readonly_fields = ('date_joined', 'last_login', 'get_newsletters')
list_display = (
'id', 'username', 'email', 'date_joined', 'last_login', 'is_staff',
- 'is_superuser'
+ 'is_superuser', 'get_newsletters'
)
list_filter = ('is_staff', 'is_superuser', 'last_login')
search_fields = ('username', 'email', 'id')
| {"golden_diff": "diff --git a/meinberlin/apps/newsletters/emails.py b/meinberlin/apps/newsletters/emails.py\n--- a/meinberlin/apps/newsletters/emails.py\n+++ b/meinberlin/apps/newsletters/emails.py\n@@ -50,5 +50,6 @@\n \n def get_receivers(self):\n return User.objects\\\n+ .filter(get_newsletters=True)\\\n .filter(is_active=True)\\\n .distinct()\ndiff --git a/meinberlin/apps/users/admin.py b/meinberlin/apps/users/admin.py\n--- a/meinberlin/apps/users/admin.py\n+++ b/meinberlin/apps/users/admin.py\n@@ -12,7 +12,8 @@\n fieldsets = (\n (None, {'fields': ('username', 'email', 'password', 'groups')}),\n (_('Permissions'), {'fields': ('is_staff', 'is_superuser')}),\n- (_('Important dates'), {'fields': ('last_login', 'date_joined')}),\n+ (_('Important dates'),\n+ {'fields': ('last_login', 'date_joined', 'get_newsletters')}),\n )\n add_fieldsets = (\n (None, {\n@@ -20,10 +21,10 @@\n 'fields': ('username', 'email', 'password1', 'password2'),\n }),\n )\n- readonly_fields = ('date_joined', 'last_login')\n+ readonly_fields = ('date_joined', 'last_login', 'get_newsletters')\n list_display = (\n 'id', 'username', 'email', 'date_joined', 'last_login', 'is_staff',\n- 'is_superuser'\n+ 'is_superuser', 'get_newsletters'\n )\n list_filter = ('is_staff', 'is_superuser', 'last_login')\n search_fields = ('username', 'email', 'id')\n", "issue": "voting in brainstorming\nthe votings are shown on pop up for ideas within brainstorming although there is no voting.\r\n\r\n<img width=\"332\" alt=\"bildschirmfoto 2019-02-05 um 15 01 57\" src=\"https://user-images.githubusercontent.com/35491681/52278354-20299380-2957-11e9-8368-dfb42c142a3a.png\">\r\n\n", "before_files": [{"content": "from email.mime.image import MIMEImage\n\nfrom django.apps import apps\nfrom django.conf import settings\nfrom django.contrib import auth\n\nfrom adhocracy4.emails.mixins import ReportToAdminEmailMixin\nfrom meinberlin.apps.contrib.emails import Email\n\nOrganisation = apps.get_model(settings.A4_ORGANISATIONS_MODEL)\nUser = auth.get_user_model()\n\n\nclass NewsletterEmail(ReportToAdminEmailMixin, Email):\n template_name = 'meinberlin_newsletters/emails/newsletter_email'\n\n def dispatch(self, object, *args, **kwargs):\n organisation_pk = kwargs.pop('organisation_pk', None)\n organisation = None\n if organisation_pk:\n organisation = Organisation.objects.get(pk=organisation_pk)\n kwargs['organisation'] = organisation\n\n return super().dispatch(object, *args, **kwargs)\n\n def get_reply_to(self):\n return ['{} <{}>'.format(self.object.sender_name, self.object.sender)]\n\n def get_receivers(self):\n return User.objects\\\n .filter(id__in=self.kwargs['participant_ids'])\\\n .filter(get_newsletters=True)\\\n .filter(is_active=True)\\\n .distinct()\n\n def get_attachments(self):\n attachments = super().get_attachments()\n\n organisation = self.kwargs['organisation']\n if organisation and organisation.logo:\n f = open(organisation.logo.path, 'rb')\n logo = MIMEImage(f.read())\n logo.add_header('Content-ID', '<{}>'.format('organisation_logo'))\n attachments += [logo]\n\n return attachments\n\n\nclass NewsletterEmailAll(NewsletterEmail):\n\n def get_receivers(self):\n return User.objects\\\n .filter(is_active=True)\\\n .distinct()\n", "path": "meinberlin/apps/newsletters/emails.py"}, {"content": "from django.contrib import admin\nfrom django.contrib import auth\nfrom django.contrib.auth.models import Group\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom . import models\nfrom .forms import UserAdminForm\n\n\nclass UserAdmin(auth.admin.UserAdmin):\n form = UserAdminForm\n fieldsets = (\n (None, {'fields': ('username', 'email', 'password', 'groups')}),\n (_('Permissions'), {'fields': ('is_staff', 'is_superuser')}),\n (_('Important dates'), {'fields': ('last_login', 'date_joined')}),\n )\n add_fieldsets = (\n (None, {\n 'classes': ('wide',),\n 'fields': ('username', 'email', 'password1', 'password2'),\n }),\n )\n readonly_fields = ('date_joined', 'last_login')\n list_display = (\n 'id', 'username', 'email', 'date_joined', 'last_login', 'is_staff',\n 'is_superuser'\n )\n list_filter = ('is_staff', 'is_superuser', 'last_login')\n search_fields = ('username', 'email', 'id')\n\n\nclass GroupAdmin(admin.ModelAdmin):\n fieldsets = (\n (None, {'fields': ('name', )}),\n )\n\n\nadmin.site.register(models.User, UserAdmin)\nadmin.site.unregister(Group)\nadmin.site.register(Group, GroupAdmin)\n", "path": "meinberlin/apps/users/admin.py"}], "after_files": [{"content": "from email.mime.image import MIMEImage\n\nfrom django.apps import apps\nfrom django.conf import settings\nfrom django.contrib import auth\n\nfrom adhocracy4.emails.mixins import ReportToAdminEmailMixin\nfrom meinberlin.apps.contrib.emails import Email\n\nOrganisation = apps.get_model(settings.A4_ORGANISATIONS_MODEL)\nUser = auth.get_user_model()\n\n\nclass NewsletterEmail(ReportToAdminEmailMixin, Email):\n template_name = 'meinberlin_newsletters/emails/newsletter_email'\n\n def dispatch(self, object, *args, **kwargs):\n organisation_pk = kwargs.pop('organisation_pk', None)\n organisation = None\n if organisation_pk:\n organisation = Organisation.objects.get(pk=organisation_pk)\n kwargs['organisation'] = organisation\n\n return super().dispatch(object, *args, **kwargs)\n\n def get_reply_to(self):\n return ['{} <{}>'.format(self.object.sender_name, self.object.sender)]\n\n def get_receivers(self):\n return User.objects\\\n .filter(id__in=self.kwargs['participant_ids'])\\\n .filter(get_newsletters=True)\\\n .filter(is_active=True)\\\n .distinct()\n\n def get_attachments(self):\n attachments = super().get_attachments()\n\n organisation = self.kwargs['organisation']\n if organisation and organisation.logo:\n f = open(organisation.logo.path, 'rb')\n logo = MIMEImage(f.read())\n logo.add_header('Content-ID', '<{}>'.format('organisation_logo'))\n attachments += [logo]\n\n return attachments\n\n\nclass NewsletterEmailAll(NewsletterEmail):\n\n def get_receivers(self):\n return User.objects\\\n .filter(get_newsletters=True)\\\n .filter(is_active=True)\\\n .distinct()\n", "path": "meinberlin/apps/newsletters/emails.py"}, {"content": "from django.contrib import admin\nfrom django.contrib import auth\nfrom django.contrib.auth.models import Group\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom . import models\nfrom .forms import UserAdminForm\n\n\nclass UserAdmin(auth.admin.UserAdmin):\n form = UserAdminForm\n fieldsets = (\n (None, {'fields': ('username', 'email', 'password', 'groups')}),\n (_('Permissions'), {'fields': ('is_staff', 'is_superuser')}),\n (_('Important dates'),\n {'fields': ('last_login', 'date_joined', 'get_newsletters')}),\n )\n add_fieldsets = (\n (None, {\n 'classes': ('wide',),\n 'fields': ('username', 'email', 'password1', 'password2'),\n }),\n )\n readonly_fields = ('date_joined', 'last_login', 'get_newsletters')\n list_display = (\n 'id', 'username', 'email', 'date_joined', 'last_login', 'is_staff',\n 'is_superuser', 'get_newsletters'\n )\n list_filter = ('is_staff', 'is_superuser', 'last_login')\n search_fields = ('username', 'email', 'id')\n\n\nclass GroupAdmin(admin.ModelAdmin):\n fieldsets = (\n (None, {'fields': ('name', )}),\n )\n\n\nadmin.site.register(models.User, UserAdmin)\nadmin.site.unregister(Group)\nadmin.site.register(Group, GroupAdmin)\n", "path": "meinberlin/apps/users/admin.py"}]} | 1,236 | 401 |
gh_patches_debug_20696 | rasdani/github-patches | git_diff | DataDog__dd-trace-py-887 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
tests.internal.runtime.test_runtime_metrics.TestRuntimeWorker.test_worker_metrics fails randomly
```
def test_worker_metrics(self):
self.tracer.configure(collect_metrics=True)
with self.override_global_tracer(self.tracer):
self.tracer._dogstatsd_client = DogStatsd()
self.tracer._dogstatsd_client.socket = FakeSocket()
root = self.start_span('parent', service='parent')
context = root.context
self.start_span('child', service='child', child_of=context)
self.worker = RuntimeWorker(self.tracer._dogstatsd_client)
self.worker.start()
self.worker.stop()
# get all received metrics
received = []
while True:
new = self.tracer._dogstatsd_client.socket.recv()
if not new:
break
received.append(new)
# DEV: sleep since metrics will still be getting collected and written
time.sleep(.5)
# expect received all default metrics
> self.assertEqual(len(received), len(DEFAULT_RUNTIME_METRICS))
E AssertionError: 0 != 10
tests/internal/runtime/test_runtime_metrics.py:75: AssertionError
```
https://circleci.com/gh/DataDog/dd-trace-py/114364
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ddtrace/internal/runtime/runtime_metrics.py`
Content:
```
1 import threading
2 import time
3 import itertools
4
5 from ..logger import get_logger
6 from .constants import (
7 DEFAULT_RUNTIME_METRICS,
8 DEFAULT_RUNTIME_TAGS,
9 )
10 from .metric_collectors import (
11 GCRuntimeMetricCollector,
12 PSUtilRuntimeMetricCollector,
13 )
14 from .tag_collectors import (
15 TracerTagCollector,
16 )
17
18 log = get_logger(__name__)
19
20
21 class RuntimeCollectorsIterable(object):
22 def __init__(self, enabled=None):
23 self._enabled = enabled or self.ENABLED
24 # Initialize the collectors.
25 self._collectors = [c() for c in self.COLLECTORS]
26
27 def __iter__(self):
28 collected = (
29 collector.collect(self._enabled)
30 for collector in self._collectors
31 )
32 return itertools.chain.from_iterable(collected)
33
34 def __repr__(self):
35 return '{}(enabled={})'.format(
36 self.__class__.__name__,
37 self._enabled,
38 )
39
40
41 class RuntimeTags(RuntimeCollectorsIterable):
42 ENABLED = DEFAULT_RUNTIME_TAGS
43 COLLECTORS = [
44 TracerTagCollector,
45 ]
46
47
48 class RuntimeMetrics(RuntimeCollectorsIterable):
49 ENABLED = DEFAULT_RUNTIME_METRICS
50 COLLECTORS = [
51 GCRuntimeMetricCollector,
52 PSUtilRuntimeMetricCollector,
53 ]
54
55
56 class RuntimeWorker(object):
57 """ Worker thread for collecting and writing runtime metrics to a DogStatsd
58 client.
59 """
60
61 FLUSH_INTERVAL = 10
62
63 def __init__(self, statsd_client, flush_interval=None):
64 self._stay_alive = None
65 self._thread = None
66 self._flush_interval = flush_interval or self.FLUSH_INTERVAL
67 self._statsd_client = statsd_client
68 self._runtime_metrics = RuntimeMetrics()
69
70 def _target(self):
71 while self._stay_alive:
72 self.flush()
73 time.sleep(self._flush_interval)
74
75 def start(self):
76 if not self._thread:
77 log.debug('Starting {}'.format(self))
78 self._stay_alive = True
79 self._thread = threading.Thread(target=self._target)
80 self._thread.setDaemon(True)
81 self._thread.start()
82
83 def stop(self):
84 if self._thread and self._stay_alive:
85 log.debug('Stopping {}'.format(self))
86 self._stay_alive = False
87
88 def _write_metric(self, key, value):
89 log.debug('Writing metric {}:{}'.format(key, value))
90 self._statsd_client.gauge(key, value)
91
92 def flush(self):
93 if not self._statsd_client:
94 log.warn('Attempted flush with uninitialized or failed statsd client')
95 return
96
97 for key, value in self._runtime_metrics:
98 self._write_metric(key, value)
99
100 def reset(self):
101 self._runtime_metrics = RuntimeMetrics()
102
103 def __repr__(self):
104 return '{}(runtime_metrics={})'.format(
105 self.__class__.__name__,
106 self._runtime_metrics,
107 )
108
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/ddtrace/internal/runtime/runtime_metrics.py b/ddtrace/internal/runtime/runtime_metrics.py
--- a/ddtrace/internal/runtime/runtime_metrics.py
+++ b/ddtrace/internal/runtime/runtime_metrics.py
@@ -60,10 +60,10 @@
FLUSH_INTERVAL = 10
- def __init__(self, statsd_client, flush_interval=None):
+ def __init__(self, statsd_client, flush_interval=FLUSH_INTERVAL):
self._stay_alive = None
self._thread = None
- self._flush_interval = flush_interval or self.FLUSH_INTERVAL
+ self._flush_interval = flush_interval
self._statsd_client = statsd_client
self._runtime_metrics = RuntimeMetrics()
@@ -85,6 +85,10 @@
log.debug('Stopping {}'.format(self))
self._stay_alive = False
+ def join(self, timeout=None):
+ if self._thread:
+ return self._thread.join(timeout)
+
def _write_metric(self, key, value):
log.debug('Writing metric {}:{}'.format(key, value))
self._statsd_client.gauge(key, value)
| {"golden_diff": "diff --git a/ddtrace/internal/runtime/runtime_metrics.py b/ddtrace/internal/runtime/runtime_metrics.py\n--- a/ddtrace/internal/runtime/runtime_metrics.py\n+++ b/ddtrace/internal/runtime/runtime_metrics.py\n@@ -60,10 +60,10 @@\n \n FLUSH_INTERVAL = 10\n \n- def __init__(self, statsd_client, flush_interval=None):\n+ def __init__(self, statsd_client, flush_interval=FLUSH_INTERVAL):\n self._stay_alive = None\n self._thread = None\n- self._flush_interval = flush_interval or self.FLUSH_INTERVAL\n+ self._flush_interval = flush_interval\n self._statsd_client = statsd_client\n self._runtime_metrics = RuntimeMetrics()\n \n@@ -85,6 +85,10 @@\n log.debug('Stopping {}'.format(self))\n self._stay_alive = False\n \n+ def join(self, timeout=None):\n+ if self._thread:\n+ return self._thread.join(timeout)\n+\n def _write_metric(self, key, value):\n log.debug('Writing metric {}:{}'.format(key, value))\n self._statsd_client.gauge(key, value)\n", "issue": "tests.internal.runtime.test_runtime_metrics.TestRuntimeWorker.test_worker_metrics fails randomly\n```\r\n def test_worker_metrics(self):\r\n self.tracer.configure(collect_metrics=True)\r\n \r\n with self.override_global_tracer(self.tracer):\r\n self.tracer._dogstatsd_client = DogStatsd()\r\n self.tracer._dogstatsd_client.socket = FakeSocket()\r\n \r\n root = self.start_span('parent', service='parent')\r\n context = root.context\r\n self.start_span('child', service='child', child_of=context)\r\n \r\n self.worker = RuntimeWorker(self.tracer._dogstatsd_client)\r\n self.worker.start()\r\n self.worker.stop()\r\n \r\n # get all received metrics\r\n received = []\r\n while True:\r\n new = self.tracer._dogstatsd_client.socket.recv()\r\n if not new:\r\n break\r\n \r\n received.append(new)\r\n # DEV: sleep since metrics will still be getting collected and written\r\n time.sleep(.5)\r\n \r\n # expect received all default metrics\r\n> self.assertEqual(len(received), len(DEFAULT_RUNTIME_METRICS))\r\nE AssertionError: 0 != 10\r\n\r\ntests/internal/runtime/test_runtime_metrics.py:75: AssertionError\r\n```\r\n\r\nhttps://circleci.com/gh/DataDog/dd-trace-py/114364\n", "before_files": [{"content": "import threading\nimport time\nimport itertools\n\nfrom ..logger import get_logger\nfrom .constants import (\n DEFAULT_RUNTIME_METRICS,\n DEFAULT_RUNTIME_TAGS,\n)\nfrom .metric_collectors import (\n GCRuntimeMetricCollector,\n PSUtilRuntimeMetricCollector,\n)\nfrom .tag_collectors import (\n TracerTagCollector,\n)\n\nlog = get_logger(__name__)\n\n\nclass RuntimeCollectorsIterable(object):\n def __init__(self, enabled=None):\n self._enabled = enabled or self.ENABLED\n # Initialize the collectors.\n self._collectors = [c() for c in self.COLLECTORS]\n\n def __iter__(self):\n collected = (\n collector.collect(self._enabled)\n for collector in self._collectors\n )\n return itertools.chain.from_iterable(collected)\n\n def __repr__(self):\n return '{}(enabled={})'.format(\n self.__class__.__name__,\n self._enabled,\n )\n\n\nclass RuntimeTags(RuntimeCollectorsIterable):\n ENABLED = DEFAULT_RUNTIME_TAGS\n COLLECTORS = [\n TracerTagCollector,\n ]\n\n\nclass RuntimeMetrics(RuntimeCollectorsIterable):\n ENABLED = DEFAULT_RUNTIME_METRICS\n COLLECTORS = [\n GCRuntimeMetricCollector,\n PSUtilRuntimeMetricCollector,\n ]\n\n\nclass RuntimeWorker(object):\n \"\"\" Worker thread for collecting and writing runtime metrics to a DogStatsd\n client.\n \"\"\"\n\n FLUSH_INTERVAL = 10\n\n def __init__(self, statsd_client, flush_interval=None):\n self._stay_alive = None\n self._thread = None\n self._flush_interval = flush_interval or self.FLUSH_INTERVAL\n self._statsd_client = statsd_client\n self._runtime_metrics = RuntimeMetrics()\n\n def _target(self):\n while self._stay_alive:\n self.flush()\n time.sleep(self._flush_interval)\n\n def start(self):\n if not self._thread:\n log.debug('Starting {}'.format(self))\n self._stay_alive = True\n self._thread = threading.Thread(target=self._target)\n self._thread.setDaemon(True)\n self._thread.start()\n\n def stop(self):\n if self._thread and self._stay_alive:\n log.debug('Stopping {}'.format(self))\n self._stay_alive = False\n\n def _write_metric(self, key, value):\n log.debug('Writing metric {}:{}'.format(key, value))\n self._statsd_client.gauge(key, value)\n\n def flush(self):\n if not self._statsd_client:\n log.warn('Attempted flush with uninitialized or failed statsd client')\n return\n\n for key, value in self._runtime_metrics:\n self._write_metric(key, value)\n\n def reset(self):\n self._runtime_metrics = RuntimeMetrics()\n\n def __repr__(self):\n return '{}(runtime_metrics={})'.format(\n self.__class__.__name__,\n self._runtime_metrics,\n )\n", "path": "ddtrace/internal/runtime/runtime_metrics.py"}], "after_files": [{"content": "import threading\nimport time\nimport itertools\n\nfrom ..logger import get_logger\nfrom .constants import (\n DEFAULT_RUNTIME_METRICS,\n DEFAULT_RUNTIME_TAGS,\n)\nfrom .metric_collectors import (\n GCRuntimeMetricCollector,\n PSUtilRuntimeMetricCollector,\n)\nfrom .tag_collectors import (\n TracerTagCollector,\n)\n\nlog = get_logger(__name__)\n\n\nclass RuntimeCollectorsIterable(object):\n def __init__(self, enabled=None):\n self._enabled = enabled or self.ENABLED\n # Initialize the collectors.\n self._collectors = [c() for c in self.COLLECTORS]\n\n def __iter__(self):\n collected = (\n collector.collect(self._enabled)\n for collector in self._collectors\n )\n return itertools.chain.from_iterable(collected)\n\n def __repr__(self):\n return '{}(enabled={})'.format(\n self.__class__.__name__,\n self._enabled,\n )\n\n\nclass RuntimeTags(RuntimeCollectorsIterable):\n ENABLED = DEFAULT_RUNTIME_TAGS\n COLLECTORS = [\n TracerTagCollector,\n ]\n\n\nclass RuntimeMetrics(RuntimeCollectorsIterable):\n ENABLED = DEFAULT_RUNTIME_METRICS\n COLLECTORS = [\n GCRuntimeMetricCollector,\n PSUtilRuntimeMetricCollector,\n ]\n\n\nclass RuntimeWorker(object):\n \"\"\" Worker thread for collecting and writing runtime metrics to a DogStatsd\n client.\n \"\"\"\n\n FLUSH_INTERVAL = 10\n\n def __init__(self, statsd_client, flush_interval=FLUSH_INTERVAL):\n self._stay_alive = None\n self._thread = None\n self._flush_interval = flush_interval\n self._statsd_client = statsd_client\n self._runtime_metrics = RuntimeMetrics()\n\n def _target(self):\n while self._stay_alive:\n self.flush()\n time.sleep(self._flush_interval)\n\n def start(self):\n if not self._thread:\n log.debug('Starting {}'.format(self))\n self._stay_alive = True\n self._thread = threading.Thread(target=self._target)\n self._thread.setDaemon(True)\n self._thread.start()\n\n def stop(self):\n if self._thread and self._stay_alive:\n log.debug('Stopping {}'.format(self))\n self._stay_alive = False\n\n def join(self, timeout=None):\n if self._thread:\n return self._thread.join(timeout)\n\n def _write_metric(self, key, value):\n log.debug('Writing metric {}:{}'.format(key, value))\n self._statsd_client.gauge(key, value)\n\n def flush(self):\n if not self._statsd_client:\n log.warn('Attempted flush with uninitialized or failed statsd client')\n return\n\n for key, value in self._runtime_metrics:\n self._write_metric(key, value)\n\n def reset(self):\n self._runtime_metrics = RuntimeMetrics()\n\n def __repr__(self):\n return '{}(runtime_metrics={})'.format(\n self.__class__.__name__,\n self._runtime_metrics,\n )\n", "path": "ddtrace/internal/runtime/runtime_metrics.py"}]} | 1,384 | 253 |
gh_patches_debug_14602 | rasdani/github-patches | git_diff | akvo__akvo-rsr-3173 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error in disaggregation view
The PGView for disaggregation is incorrect. It includes data from all updates rather than just approved updates.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `akvo/rsr/models/result/indicator_period_aggregation.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 # Akvo Reporting is covered by the GNU Affero General Public License.
4 # See more details in the license.txt file located at the root folder of the Akvo RSR module.
5 # For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.
6
7 from django.db import models
8
9 from django_pgviews import view as pg
10
11
12 ACTUAL_VALUE_SQL = """
13 SELECT
14 -- row_number() OVER... creates an artificial "pk" column, without which Django will protest
15 row_number() OVER (ORDER BY period.id) AS id,
16 period.id AS period_id,
17 indicator.measure as measure,
18 sum((update.value) :: DECIMAL(20,2)) AS value,
19 sum((update.numerator) :: DECIMAL(20,2)) AS numerator,
20 sum((update.denominator) :: DECIMAL(20,2)) AS denominator
21 FROM
22 rsr_indicatorperiod period,
23 rsr_indicator indicator,
24 rsr_indicatorperioddata update
25 WHERE
26 (
27 (((indicator.id = period.indicator_id) AND
28 (period.id = update.period_id)) AND
29 ((update.status) :: TEXT = 'A' :: TEXT)) AND
30 ((update.value) :: TEXT ~ '^\d+\.?\d{0,2}$' :: TEXT OR update.value IS NULL)
31 )
32 GROUP BY period.id, indicator.measure;
33 """
34
35
36 class PeriodActualValue(pg.View):
37 # on_delete=models.DO_NOTHING is needed to prevent problems with PG trying to delete views' data
38 period = models.ForeignKey('IndicatorPeriod', on_delete=models.DO_NOTHING)
39 measure = models.CharField(max_length=1)
40 value = models.IntegerField()
41 numerator = models.IntegerField()
42 denominator = models.IntegerField()
43
44 sql = ACTUAL_VALUE_SQL
45
46 class Meta:
47 app_label = 'rsr'
48 db_table = 'rsr_indicator_period_actual_value'
49 managed = False
50
51
52 DISAGG_SQL = """
53 WITH aggregated_disaggs AS (
54 SELECT
55 dimension_id,
56 sum(("value") :: DECIMAL(20,2)) AS value,
57 sum((numerator) :: DECIMAL(20,2)) AS numerator,
58 sum((denominator) :: DECIMAL(20,2)) AS denominator
59 FROM
60 rsr_disaggregation
61 GROUP BY
62 dimension_id
63 ),
64 period_disaggs AS (
65 SELECT DISTINCT
66 indicator.id AS indicator_id,
67 period.id AS period_id,
68 dimension.name AS dimension_name,
69 dimension.value AS dimension_value,
70 agg.value,
71 agg.numerator,
72 agg.denominator
73 FROM
74 rsr_indicator indicator,
75 rsr_indicatorperiod period,
76 rsr_indicatorperioddata update,
77 aggregated_disaggs agg,
78 rsr_indicatordimension dimension
79 WHERE
80 indicator.id = period.indicator_id AND
81 period.id = update.period_id AND
82 indicator.id = dimension.indicator_id AND
83 dimension.id = agg.dimension_id
84 )
85 SELECT
86 row_number() OVER (ORDER BY indicator_id) AS id,
87 *
88 FROM period_disaggs
89 """
90
91
92 class PeriodDisaggregation(pg.View):
93 indicator = models.ForeignKey('Indicator', on_delete=models.DO_NOTHING)
94 period = models.ForeignKey('IndicatorPeriod', on_delete=models.DO_NOTHING)
95 dimension_name = models.CharField(max_length=100)
96 dimension_value = models.CharField(max_length=100)
97 value = models.IntegerField()
98 numerator = models.IntegerField()
99 denominator = models.IntegerField()
100
101 sql = DISAGG_SQL
102
103 class Meta:
104 app_label = 'rsr'
105 db_table = 'rsr_indicator_period_disaggregation'
106 managed = False
107
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/akvo/rsr/models/result/indicator_period_aggregation.py b/akvo/rsr/models/result/indicator_period_aggregation.py
--- a/akvo/rsr/models/result/indicator_period_aggregation.py
+++ b/akvo/rsr/models/result/indicator_period_aggregation.py
@@ -52,12 +52,16 @@
DISAGG_SQL = """
WITH aggregated_disaggs AS (
SELECT
- dimension_id,
- sum(("value") :: DECIMAL(20,2)) AS value,
- sum((numerator) :: DECIMAL(20,2)) AS numerator,
- sum((denominator) :: DECIMAL(20,2)) AS denominator
+ disagg.dimension_id AS dimension_id,
+ sum((disagg.value) :: DECIMAL(20,2)) AS value,
+ sum((disagg.numerator) :: DECIMAL(20,2)) AS numerator,
+ sum((disagg.denominator) :: DECIMAL(20,2)) AS denominator
FROM
- rsr_disaggregation
+ rsr_disaggregation disagg,
+ rsr_indicatorperioddata "update"
+ WHERE
+ update.status = 'A' AND
+ disagg.update_id = update.id
GROUP BY
dimension_id
),
| {"golden_diff": "diff --git a/akvo/rsr/models/result/indicator_period_aggregation.py b/akvo/rsr/models/result/indicator_period_aggregation.py\n--- a/akvo/rsr/models/result/indicator_period_aggregation.py\n+++ b/akvo/rsr/models/result/indicator_period_aggregation.py\n@@ -52,12 +52,16 @@\n DISAGG_SQL = \"\"\"\n WITH aggregated_disaggs AS (\n SELECT\n- dimension_id,\n- sum((\"value\") :: DECIMAL(20,2)) AS value,\n- sum((numerator) :: DECIMAL(20,2)) AS numerator,\n- sum((denominator) :: DECIMAL(20,2)) AS denominator\n+ disagg.dimension_id AS dimension_id,\n+ sum((disagg.value) :: DECIMAL(20,2)) AS value,\n+ sum((disagg.numerator) :: DECIMAL(20,2)) AS numerator,\n+ sum((disagg.denominator) :: DECIMAL(20,2)) AS denominator\n FROM\n- rsr_disaggregation\n+ rsr_disaggregation disagg,\n+ rsr_indicatorperioddata \"update\"\n+ WHERE\n+ update.status = 'A' AND\n+ disagg.update_id = update.id\n GROUP BY\n dimension_id\n ),\n", "issue": "Error in disaggregation view\nThe PGView for disaggregation is incorrect. It includes data from all updates rather than just approved updates.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Akvo Reporting is covered by the GNU Affero General Public License.\n# See more details in the license.txt file located at the root folder of the Akvo RSR module.\n# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n\nfrom django.db import models\n\nfrom django_pgviews import view as pg\n\n\nACTUAL_VALUE_SQL = \"\"\"\n SELECT\n -- row_number() OVER... creates an artificial \"pk\" column, without which Django will protest\n row_number() OVER (ORDER BY period.id) AS id,\n period.id AS period_id,\n indicator.measure as measure,\n sum((update.value) :: DECIMAL(20,2)) AS value,\n sum((update.numerator) :: DECIMAL(20,2)) AS numerator,\n sum((update.denominator) :: DECIMAL(20,2)) AS denominator\n FROM\n rsr_indicatorperiod period,\n rsr_indicator indicator,\n rsr_indicatorperioddata update\n WHERE\n (\n (((indicator.id = period.indicator_id) AND\n (period.id = update.period_id)) AND\n ((update.status) :: TEXT = 'A' :: TEXT)) AND\n ((update.value) :: TEXT ~ '^\\d+\\.?\\d{0,2}$' :: TEXT OR update.value IS NULL)\n )\n GROUP BY period.id, indicator.measure;\n\"\"\"\n\n\nclass PeriodActualValue(pg.View):\n # on_delete=models.DO_NOTHING is needed to prevent problems with PG trying to delete views' data\n period = models.ForeignKey('IndicatorPeriod', on_delete=models.DO_NOTHING)\n measure = models.CharField(max_length=1)\n value = models.IntegerField()\n numerator = models.IntegerField()\n denominator = models.IntegerField()\n\n sql = ACTUAL_VALUE_SQL\n\n class Meta:\n app_label = 'rsr'\n db_table = 'rsr_indicator_period_actual_value'\n managed = False\n\n\nDISAGG_SQL = \"\"\"\n WITH aggregated_disaggs AS (\n SELECT\n dimension_id,\n sum((\"value\") :: DECIMAL(20,2)) AS value,\n sum((numerator) :: DECIMAL(20,2)) AS numerator,\n sum((denominator) :: DECIMAL(20,2)) AS denominator\n FROM\n rsr_disaggregation\n GROUP BY\n dimension_id\n ),\n period_disaggs AS (\n SELECT DISTINCT\n indicator.id AS indicator_id,\n period.id AS period_id,\n dimension.name AS dimension_name,\n dimension.value AS dimension_value,\n agg.value,\n agg.numerator,\n agg.denominator\n FROM\n rsr_indicator indicator,\n rsr_indicatorperiod period,\n rsr_indicatorperioddata update,\n aggregated_disaggs agg,\n rsr_indicatordimension dimension\n WHERE\n indicator.id = period.indicator_id AND\n period.id = update.period_id AND\n indicator.id = dimension.indicator_id AND\n dimension.id = agg.dimension_id\n )\n SELECT\n row_number() OVER (ORDER BY indicator_id) AS id,\n *\n FROM period_disaggs\n\"\"\"\n\n\nclass PeriodDisaggregation(pg.View):\n indicator = models.ForeignKey('Indicator', on_delete=models.DO_NOTHING)\n period = models.ForeignKey('IndicatorPeriod', on_delete=models.DO_NOTHING)\n dimension_name = models.CharField(max_length=100)\n dimension_value = models.CharField(max_length=100)\n value = models.IntegerField()\n numerator = models.IntegerField()\n denominator = models.IntegerField()\n\n sql = DISAGG_SQL\n\n class Meta:\n app_label = 'rsr'\n db_table = 'rsr_indicator_period_disaggregation'\n managed = False\n", "path": "akvo/rsr/models/result/indicator_period_aggregation.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Akvo Reporting is covered by the GNU Affero General Public License.\n# See more details in the license.txt file located at the root folder of the Akvo RSR module.\n# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n\nfrom django.db import models\n\nfrom django_pgviews import view as pg\n\n\nACTUAL_VALUE_SQL = \"\"\"\n SELECT\n -- row_number() OVER... creates an artificial \"pk\" column, without which Django will protest\n row_number() OVER (ORDER BY period.id) AS id,\n period.id AS period_id,\n indicator.measure as measure,\n sum((update.value) :: DECIMAL(20,2)) AS value,\n sum((update.numerator) :: DECIMAL(20,2)) AS numerator,\n sum((update.denominator) :: DECIMAL(20,2)) AS denominator\n FROM\n rsr_indicatorperiod period,\n rsr_indicator indicator,\n rsr_indicatorperioddata update\n WHERE\n (\n (((indicator.id = period.indicator_id) AND\n (period.id = update.period_id)) AND\n ((update.status) :: TEXT = 'A' :: TEXT)) AND\n ((update.value) :: TEXT ~ '^\\d+\\.?\\d{0,2}$' :: TEXT OR update.value IS NULL)\n )\n GROUP BY period.id, indicator.measure;\n\"\"\"\n\n\nclass PeriodActualValue(pg.View):\n # on_delete=models.DO_NOTHING is needed to prevent problems with PG trying to delete views' data\n period = models.ForeignKey('IndicatorPeriod', on_delete=models.DO_NOTHING)\n measure = models.CharField(max_length=1)\n value = models.IntegerField()\n numerator = models.IntegerField()\n denominator = models.IntegerField()\n\n sql = ACTUAL_VALUE_SQL\n\n class Meta:\n app_label = 'rsr'\n db_table = 'rsr_indicator_period_actual_value'\n managed = False\n\n\nDISAGG_SQL = \"\"\"\n WITH aggregated_disaggs AS (\n SELECT\n disagg.dimension_id AS dimension_id,\n sum((disagg.value) :: DECIMAL(20,2)) AS value,\n sum((disagg.numerator) :: DECIMAL(20,2)) AS numerator,\n sum((disagg.denominator) :: DECIMAL(20,2)) AS denominator\n FROM\n rsr_disaggregation disagg,\n rsr_indicatorperioddata \"update\"\n WHERE\n update.status = 'A' AND\n disagg.update_id = update.id\n GROUP BY\n dimension_id\n ),\n period_disaggs AS (\n SELECT DISTINCT\n indicator.id AS indicator_id,\n period.id AS period_id,\n dimension.name AS dimension_name,\n dimension.value AS dimension_value,\n agg.value,\n agg.numerator,\n agg.denominator\n FROM\n rsr_indicator indicator,\n rsr_indicatorperiod period,\n rsr_indicatorperioddata update,\n aggregated_disaggs agg,\n rsr_indicatordimension dimension\n WHERE\n indicator.id = period.indicator_id AND\n period.id = update.period_id AND\n indicator.id = dimension.indicator_id AND\n dimension.id = agg.dimension_id\n )\n SELECT\n row_number() OVER (ORDER BY indicator_id) AS id,\n *\n FROM period_disaggs\n\"\"\"\n\n\nclass PeriodDisaggregation(pg.View):\n indicator = models.ForeignKey('Indicator', on_delete=models.DO_NOTHING)\n period = models.ForeignKey('IndicatorPeriod', on_delete=models.DO_NOTHING)\n dimension_name = models.CharField(max_length=100)\n dimension_value = models.CharField(max_length=100)\n value = models.IntegerField()\n numerator = models.IntegerField()\n denominator = models.IntegerField()\n\n sql = DISAGG_SQL\n\n class Meta:\n app_label = 'rsr'\n db_table = 'rsr_indicator_period_disaggregation'\n managed = False\n", "path": "akvo/rsr/models/result/indicator_period_aggregation.py"}]} | 1,312 | 293 |
gh_patches_debug_15246 | rasdani/github-patches | git_diff | open-telemetry__opentelemetry-python-1194 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Update baggage header name
As per the spec, baggage propagation must use the header as specified in the w3c baggage specification https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/baggage/api.md#baggage-propagation
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `opentelemetry-api/src/opentelemetry/baggage/propagation/__init__.py`
Content:
```
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 #
15 import typing
16 import urllib.parse
17
18 from opentelemetry import baggage
19 from opentelemetry.context import get_current
20 from opentelemetry.context.context import Context
21 from opentelemetry.trace.propagation import textmap
22
23
24 class BaggagePropagator(textmap.TextMapPropagator):
25 MAX_HEADER_LENGTH = 8192
26 MAX_PAIR_LENGTH = 4096
27 MAX_PAIRS = 180
28 _BAGGAGE_HEADER_NAME = "otcorrelations"
29
30 def extract(
31 self,
32 get_from_carrier: textmap.Getter[textmap.TextMapPropagatorT],
33 carrier: textmap.TextMapPropagatorT,
34 context: typing.Optional[Context] = None,
35 ) -> Context:
36 """Extract Baggage from the carrier.
37
38 See
39 `opentelemetry.trace.propagation.textmap.TextMapPropagator.extract`
40 """
41
42 if context is None:
43 context = get_current()
44
45 header = _extract_first_element(
46 get_from_carrier(carrier, self._BAGGAGE_HEADER_NAME)
47 )
48
49 if not header or len(header) > self.MAX_HEADER_LENGTH:
50 return context
51
52 baggage_entries = header.split(",")
53 total_baggage_entries = self.MAX_PAIRS
54 for entry in baggage_entries:
55 if total_baggage_entries <= 0:
56 return context
57 total_baggage_entries -= 1
58 if len(entry) > self.MAX_PAIR_LENGTH:
59 continue
60 try:
61 name, value = entry.split("=", 1)
62 except Exception: # pylint: disable=broad-except
63 continue
64 context = baggage.set_baggage(
65 urllib.parse.unquote(name).strip(),
66 urllib.parse.unquote(value).strip(),
67 context=context,
68 )
69
70 return context
71
72 def inject(
73 self,
74 set_in_carrier: textmap.Setter[textmap.TextMapPropagatorT],
75 carrier: textmap.TextMapPropagatorT,
76 context: typing.Optional[Context] = None,
77 ) -> None:
78 """Injects Baggage into the carrier.
79
80 See
81 `opentelemetry.trace.propagation.textmap.TextMapPropagator.inject`
82 """
83 baggage_entries = baggage.get_all(context=context)
84 if not baggage_entries:
85 return
86
87 baggage_string = _format_baggage(baggage_entries)
88 set_in_carrier(
89 carrier, self._BAGGAGE_HEADER_NAME, baggage_string,
90 )
91
92
93 def _format_baggage(baggage_entries: typing.Mapping[str, object]) -> str:
94 return ",".join(
95 key + "=" + urllib.parse.quote_plus(str(value))
96 for key, value in baggage_entries.items()
97 )
98
99
100 def _extract_first_element(
101 items: typing.Iterable[textmap.TextMapPropagatorT],
102 ) -> typing.Optional[textmap.TextMapPropagatorT]:
103 if items is None:
104 return None
105 return next(iter(items), None)
106
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/opentelemetry-api/src/opentelemetry/baggage/propagation/__init__.py b/opentelemetry-api/src/opentelemetry/baggage/propagation/__init__.py
--- a/opentelemetry-api/src/opentelemetry/baggage/propagation/__init__.py
+++ b/opentelemetry-api/src/opentelemetry/baggage/propagation/__init__.py
@@ -25,7 +25,7 @@
MAX_HEADER_LENGTH = 8192
MAX_PAIR_LENGTH = 4096
MAX_PAIRS = 180
- _BAGGAGE_HEADER_NAME = "otcorrelations"
+ _BAGGAGE_HEADER_NAME = "baggage"
def extract(
self,
@@ -85,9 +85,7 @@
return
baggage_string = _format_baggage(baggage_entries)
- set_in_carrier(
- carrier, self._BAGGAGE_HEADER_NAME, baggage_string,
- )
+ set_in_carrier(carrier, self._BAGGAGE_HEADER_NAME, baggage_string)
def _format_baggage(baggage_entries: typing.Mapping[str, object]) -> str:
| {"golden_diff": "diff --git a/opentelemetry-api/src/opentelemetry/baggage/propagation/__init__.py b/opentelemetry-api/src/opentelemetry/baggage/propagation/__init__.py\n--- a/opentelemetry-api/src/opentelemetry/baggage/propagation/__init__.py\n+++ b/opentelemetry-api/src/opentelemetry/baggage/propagation/__init__.py\n@@ -25,7 +25,7 @@\n MAX_HEADER_LENGTH = 8192\n MAX_PAIR_LENGTH = 4096\n MAX_PAIRS = 180\n- _BAGGAGE_HEADER_NAME = \"otcorrelations\"\n+ _BAGGAGE_HEADER_NAME = \"baggage\"\n \n def extract(\n self,\n@@ -85,9 +85,7 @@\n return\n \n baggage_string = _format_baggage(baggage_entries)\n- set_in_carrier(\n- carrier, self._BAGGAGE_HEADER_NAME, baggage_string,\n- )\n+ set_in_carrier(carrier, self._BAGGAGE_HEADER_NAME, baggage_string)\n \n \n def _format_baggage(baggage_entries: typing.Mapping[str, object]) -> str:\n", "issue": "Update baggage header name\nAs per the spec, baggage propagation must use the header as specified in the w3c baggage specification https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/baggage/api.md#baggage-propagation\n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport typing\nimport urllib.parse\n\nfrom opentelemetry import baggage\nfrom opentelemetry.context import get_current\nfrom opentelemetry.context.context import Context\nfrom opentelemetry.trace.propagation import textmap\n\n\nclass BaggagePropagator(textmap.TextMapPropagator):\n MAX_HEADER_LENGTH = 8192\n MAX_PAIR_LENGTH = 4096\n MAX_PAIRS = 180\n _BAGGAGE_HEADER_NAME = \"otcorrelations\"\n\n def extract(\n self,\n get_from_carrier: textmap.Getter[textmap.TextMapPropagatorT],\n carrier: textmap.TextMapPropagatorT,\n context: typing.Optional[Context] = None,\n ) -> Context:\n \"\"\"Extract Baggage from the carrier.\n\n See\n `opentelemetry.trace.propagation.textmap.TextMapPropagator.extract`\n \"\"\"\n\n if context is None:\n context = get_current()\n\n header = _extract_first_element(\n get_from_carrier(carrier, self._BAGGAGE_HEADER_NAME)\n )\n\n if not header or len(header) > self.MAX_HEADER_LENGTH:\n return context\n\n baggage_entries = header.split(\",\")\n total_baggage_entries = self.MAX_PAIRS\n for entry in baggage_entries:\n if total_baggage_entries <= 0:\n return context\n total_baggage_entries -= 1\n if len(entry) > self.MAX_PAIR_LENGTH:\n continue\n try:\n name, value = entry.split(\"=\", 1)\n except Exception: # pylint: disable=broad-except\n continue\n context = baggage.set_baggage(\n urllib.parse.unquote(name).strip(),\n urllib.parse.unquote(value).strip(),\n context=context,\n )\n\n return context\n\n def inject(\n self,\n set_in_carrier: textmap.Setter[textmap.TextMapPropagatorT],\n carrier: textmap.TextMapPropagatorT,\n context: typing.Optional[Context] = None,\n ) -> None:\n \"\"\"Injects Baggage into the carrier.\n\n See\n `opentelemetry.trace.propagation.textmap.TextMapPropagator.inject`\n \"\"\"\n baggage_entries = baggage.get_all(context=context)\n if not baggage_entries:\n return\n\n baggage_string = _format_baggage(baggage_entries)\n set_in_carrier(\n carrier, self._BAGGAGE_HEADER_NAME, baggage_string,\n )\n\n\ndef _format_baggage(baggage_entries: typing.Mapping[str, object]) -> str:\n return \",\".join(\n key + \"=\" + urllib.parse.quote_plus(str(value))\n for key, value in baggage_entries.items()\n )\n\n\ndef _extract_first_element(\n items: typing.Iterable[textmap.TextMapPropagatorT],\n) -> typing.Optional[textmap.TextMapPropagatorT]:\n if items is None:\n return None\n return next(iter(items), None)\n", "path": "opentelemetry-api/src/opentelemetry/baggage/propagation/__init__.py"}], "after_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport typing\nimport urllib.parse\n\nfrom opentelemetry import baggage\nfrom opentelemetry.context import get_current\nfrom opentelemetry.context.context import Context\nfrom opentelemetry.trace.propagation import textmap\n\n\nclass BaggagePropagator(textmap.TextMapPropagator):\n MAX_HEADER_LENGTH = 8192\n MAX_PAIR_LENGTH = 4096\n MAX_PAIRS = 180\n _BAGGAGE_HEADER_NAME = \"baggage\"\n\n def extract(\n self,\n get_from_carrier: textmap.Getter[textmap.TextMapPropagatorT],\n carrier: textmap.TextMapPropagatorT,\n context: typing.Optional[Context] = None,\n ) -> Context:\n \"\"\"Extract Baggage from the carrier.\n\n See\n `opentelemetry.trace.propagation.textmap.TextMapPropagator.extract`\n \"\"\"\n\n if context is None:\n context = get_current()\n\n header = _extract_first_element(\n get_from_carrier(carrier, self._BAGGAGE_HEADER_NAME)\n )\n\n if not header or len(header) > self.MAX_HEADER_LENGTH:\n return context\n\n baggage_entries = header.split(\",\")\n total_baggage_entries = self.MAX_PAIRS\n for entry in baggage_entries:\n if total_baggage_entries <= 0:\n return context\n total_baggage_entries -= 1\n if len(entry) > self.MAX_PAIR_LENGTH:\n continue\n try:\n name, value = entry.split(\"=\", 1)\n except Exception: # pylint: disable=broad-except\n continue\n context = baggage.set_baggage(\n urllib.parse.unquote(name).strip(),\n urllib.parse.unquote(value).strip(),\n context=context,\n )\n\n return context\n\n def inject(\n self,\n set_in_carrier: textmap.Setter[textmap.TextMapPropagatorT],\n carrier: textmap.TextMapPropagatorT,\n context: typing.Optional[Context] = None,\n ) -> None:\n \"\"\"Injects Baggage into the carrier.\n\n See\n `opentelemetry.trace.propagation.textmap.TextMapPropagator.inject`\n \"\"\"\n baggage_entries = baggage.get_all(context=context)\n if not baggage_entries:\n return\n\n baggage_string = _format_baggage(baggage_entries)\n set_in_carrier(carrier, self._BAGGAGE_HEADER_NAME, baggage_string)\n\n\ndef _format_baggage(baggage_entries: typing.Mapping[str, object]) -> str:\n return \",\".join(\n key + \"=\" + urllib.parse.quote_plus(str(value))\n for key, value in baggage_entries.items()\n )\n\n\ndef _extract_first_element(\n items: typing.Iterable[textmap.TextMapPropagatorT],\n) -> typing.Optional[textmap.TextMapPropagatorT]:\n if items is None:\n return None\n return next(iter(items), None)\n", "path": "opentelemetry-api/src/opentelemetry/baggage/propagation/__init__.py"}]} | 1,295 | 260 |
gh_patches_debug_456 | rasdani/github-patches | git_diff | dbt-labs__dbt-core-2537 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Python 3.6.2 doesn't work with dbt 0.17.0
### Describe the bug
Running dbt on python <= 3.6.2 results in an error that `name 'TimestampSnapshotConfig' is not defined`. 3.6.3 is unaffected.
### Steps To Reproduce
Install python 3.6.2
Install dbt
Try to use dbt
### Expected behavior
dbt should run, not crash, etc
### System information
**Which database are you using dbt with?**
Any
**The output of `dbt --version`:**
```
0.17.0
```
**The operating system you're using:**
macos, linux
**The output of `python --version`:**
`Python 3.6.2`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `core/setup.py`
Content:
```
1 #!/usr/bin/env python
2 import os
3 import sys
4
5 from setuptools import setup
6 try:
7 from setuptools import find_namespace_packages
8 except ImportError:
9 # the user has a downlevel version of setuptools.
10 print('Error: dbt requires setuptools v40.1.0 or higher.')
11 print('Please upgrade setuptools with "pip install --upgrade setuptools" '
12 'and try again')
13 sys.exit(1)
14
15
16 def read(fname):
17 return open(os.path.join(os.path.dirname(__file__), fname)).read()
18
19
20 package_name = "dbt-core"
21 package_version = "0.17.1a1"
22 description = """dbt (data build tool) is a command line tool that helps \
23 analysts and engineers transform data in their warehouse more effectively"""
24
25
26 setup(
27 name=package_name,
28 version=package_version,
29 description=description,
30 long_description=description,
31 author="Fishtown Analytics",
32 author_email="[email protected]",
33 url="https://github.com/fishtown-analytics/dbt",
34 packages=find_namespace_packages(include=['dbt', 'dbt.*']),
35 package_data={
36 'dbt': [
37 'include/index.html',
38 'include/global_project/dbt_project.yml',
39 'include/global_project/docs/*.md',
40 'include/global_project/macros/*.sql',
41 'include/global_project/macros/**/*.sql',
42 'include/global_project/macros/**/**/*.sql',
43 'py.typed',
44 ]
45 },
46 test_suite='test',
47 entry_points={
48 'console_scripts': [
49 'dbt = dbt.main:main',
50 ],
51 },
52 scripts=[
53 'scripts/dbt',
54 ],
55 install_requires=[
56 'Jinja2==2.11.2',
57 'PyYAML>=3.11',
58 'sqlparse>=0.2.3,<0.4',
59 'networkx>=2.3,<3',
60 'minimal-snowplow-tracker==0.0.2',
61 'colorama>=0.3.9,<0.5',
62 'agate>=1.6,<2',
63 'isodate>=0.6,<0.7',
64 'json-rpc>=1.12,<2',
65 'werkzeug>=0.15,<0.17',
66 'dataclasses==0.6;python_version<"3.7"',
67 'hologram==0.0.7',
68 'logbook>=1.5,<1.6',
69 'typing-extensions>=3.7.4,<3.8',
70 # the following are all to match snowflake-connector-python
71 'requests>=2.18.0,<2.23.0',
72 'idna<2.9',
73 'cffi>=1.9,<1.14',
74 ],
75 zip_safe=False,
76 classifiers=[
77 'Development Status :: 5 - Production/Stable',
78
79 'License :: OSI Approved :: Apache Software License',
80
81 'Operating System :: Microsoft :: Windows',
82 'Operating System :: MacOS :: MacOS X',
83 'Operating System :: POSIX :: Linux',
84
85 'Programming Language :: Python :: 3.6',
86 'Programming Language :: Python :: 3.7',
87 'Programming Language :: Python :: 3.8',
88 ],
89 python_requires=">=3.6.2",
90 )
91
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/core/setup.py b/core/setup.py
--- a/core/setup.py
+++ b/core/setup.py
@@ -86,5 +86,5 @@
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
- python_requires=">=3.6.2",
+ python_requires=">=3.6.3",
)
| {"golden_diff": "diff --git a/core/setup.py b/core/setup.py\n--- a/core/setup.py\n+++ b/core/setup.py\n@@ -86,5 +86,5 @@\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n ],\n- python_requires=\">=3.6.2\",\n+ python_requires=\">=3.6.3\",\n )\n", "issue": "Python 3.6.2 doesn't work with dbt 0.17.0\n### Describe the bug\r\nRunning dbt on python <= 3.6.2 results in an error that `name 'TimestampSnapshotConfig' is not defined`. 3.6.3 is unaffected.\r\n\r\n### Steps To Reproduce\r\nInstall python 3.6.2\r\nInstall dbt\r\nTry to use dbt\r\n\r\n### Expected behavior\r\ndbt should run, not crash, etc\r\n\r\n\r\n### System information\r\n**Which database are you using dbt with?**\r\nAny\r\n\r\n**The output of `dbt --version`:**\r\n```\r\n0.17.0\r\n```\r\n\r\n**The operating system you're using:**\r\nmacos, linux\r\n\r\n**The output of `python --version`:**\r\n`Python 3.6.2`\r\n\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\nimport os\nimport sys\n\nfrom setuptools import setup\ntry:\n from setuptools import find_namespace_packages\nexcept ImportError:\n # the user has a downlevel version of setuptools.\n print('Error: dbt requires setuptools v40.1.0 or higher.')\n print('Please upgrade setuptools with \"pip install --upgrade setuptools\" '\n 'and try again')\n sys.exit(1)\n\n\ndef read(fname):\n return open(os.path.join(os.path.dirname(__file__), fname)).read()\n\n\npackage_name = \"dbt-core\"\npackage_version = \"0.17.1a1\"\ndescription = \"\"\"dbt (data build tool) is a command line tool that helps \\\nanalysts and engineers transform data in their warehouse more effectively\"\"\"\n\n\nsetup(\n name=package_name,\n version=package_version,\n description=description,\n long_description=description,\n author=\"Fishtown Analytics\",\n author_email=\"[email protected]\",\n url=\"https://github.com/fishtown-analytics/dbt\",\n packages=find_namespace_packages(include=['dbt', 'dbt.*']),\n package_data={\n 'dbt': [\n 'include/index.html',\n 'include/global_project/dbt_project.yml',\n 'include/global_project/docs/*.md',\n 'include/global_project/macros/*.sql',\n 'include/global_project/macros/**/*.sql',\n 'include/global_project/macros/**/**/*.sql',\n 'py.typed',\n ]\n },\n test_suite='test',\n entry_points={\n 'console_scripts': [\n 'dbt = dbt.main:main',\n ],\n },\n scripts=[\n 'scripts/dbt',\n ],\n install_requires=[\n 'Jinja2==2.11.2',\n 'PyYAML>=3.11',\n 'sqlparse>=0.2.3,<0.4',\n 'networkx>=2.3,<3',\n 'minimal-snowplow-tracker==0.0.2',\n 'colorama>=0.3.9,<0.5',\n 'agate>=1.6,<2',\n 'isodate>=0.6,<0.7',\n 'json-rpc>=1.12,<2',\n 'werkzeug>=0.15,<0.17',\n 'dataclasses==0.6;python_version<\"3.7\"',\n 'hologram==0.0.7',\n 'logbook>=1.5,<1.6',\n 'typing-extensions>=3.7.4,<3.8',\n # the following are all to match snowflake-connector-python\n 'requests>=2.18.0,<2.23.0',\n 'idna<2.9',\n 'cffi>=1.9,<1.14',\n ],\n zip_safe=False,\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n\n 'License :: OSI Approved :: Apache Software License',\n\n 'Operating System :: Microsoft :: Windows',\n 'Operating System :: MacOS :: MacOS X',\n 'Operating System :: POSIX :: Linux',\n\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n ],\n python_requires=\">=3.6.2\",\n)\n", "path": "core/setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\nimport os\nimport sys\n\nfrom setuptools import setup\ntry:\n from setuptools import find_namespace_packages\nexcept ImportError:\n # the user has a downlevel version of setuptools.\n print('Error: dbt requires setuptools v40.1.0 or higher.')\n print('Please upgrade setuptools with \"pip install --upgrade setuptools\" '\n 'and try again')\n sys.exit(1)\n\n\ndef read(fname):\n return open(os.path.join(os.path.dirname(__file__), fname)).read()\n\n\npackage_name = \"dbt-core\"\npackage_version = \"0.17.1a1\"\ndescription = \"\"\"dbt (data build tool) is a command line tool that helps \\\nanalysts and engineers transform data in their warehouse more effectively\"\"\"\n\n\nsetup(\n name=package_name,\n version=package_version,\n description=description,\n long_description=description,\n author=\"Fishtown Analytics\",\n author_email=\"[email protected]\",\n url=\"https://github.com/fishtown-analytics/dbt\",\n packages=find_namespace_packages(include=['dbt', 'dbt.*']),\n package_data={\n 'dbt': [\n 'include/index.html',\n 'include/global_project/dbt_project.yml',\n 'include/global_project/docs/*.md',\n 'include/global_project/macros/*.sql',\n 'include/global_project/macros/**/*.sql',\n 'include/global_project/macros/**/**/*.sql',\n 'py.typed',\n ]\n },\n test_suite='test',\n entry_points={\n 'console_scripts': [\n 'dbt = dbt.main:main',\n ],\n },\n scripts=[\n 'scripts/dbt',\n ],\n install_requires=[\n 'Jinja2==2.11.2',\n 'PyYAML>=3.11',\n 'sqlparse>=0.2.3,<0.4',\n 'networkx>=2.3,<3',\n 'minimal-snowplow-tracker==0.0.2',\n 'colorama>=0.3.9,<0.5',\n 'agate>=1.6,<2',\n 'isodate>=0.6,<0.7',\n 'json-rpc>=1.12,<2',\n 'werkzeug>=0.15,<0.17',\n 'dataclasses==0.6;python_version<\"3.7\"',\n 'hologram==0.0.7',\n 'logbook>=1.5,<1.6',\n 'typing-extensions>=3.7.4,<3.8',\n # the following are all to match snowflake-connector-python\n 'requests>=2.18.0,<2.23.0',\n 'idna<2.9',\n 'cffi>=1.9,<1.14',\n ],\n zip_safe=False,\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n\n 'License :: OSI Approved :: Apache Software License',\n\n 'Operating System :: Microsoft :: Windows',\n 'Operating System :: MacOS :: MacOS X',\n 'Operating System :: POSIX :: Linux',\n\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n ],\n python_requires=\">=3.6.3\",\n)\n", "path": "core/setup.py"}]} | 1,329 | 87 |
gh_patches_debug_31382 | rasdani/github-patches | git_diff | alltheplaces__alltheplaces-2814 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Spider pricerite is broken
During the global build at 2021-05-26-14-42-23, spider **pricerite** failed with **0 features** and **2 errors**.
Here's [the log](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/logs/pricerite.log) and [the output](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/pricerite.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/pricerite.geojson))
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `locations/spiders/pricerite.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 import json
3 import re
4
5 import scrapy
6
7 from locations.items import GeojsonPointItem
8 from locations.hours import OpeningHours
9
10
11 class PriceRiteSpider(scrapy.Spider):
12 name = "pricerite"
13 item_attributes = { 'brand': "PriceRite" }
14 allowed_domains = ["priceritesupermarkets.com"]
15
16 start_urls = (
17 "https://www.priceritesupermarkets.com/locations/",
18 )
19
20 def parse(self, response):
21 script = response.xpath('//script[contains(text(), "var stores")]').extract_first()
22 stores = json.loads(re.search(r'var stores = (.*?);', script).groups()[0])
23
24 for store in stores:
25 properties = {
26 "ref": store["storeNumber"],
27 "name": store["name"],
28 "lat": store["latitude"],
29 "lon": store["longitude"],
30 "addr_full": store["address1"],
31 "city": store["city"],
32 "state": store["state"],
33 "postcode": store["zipCode"],
34 }
35
36 yield GeojsonPointItem(**properties)
37
38
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/locations/spiders/pricerite.py b/locations/spiders/pricerite.py
--- a/locations/spiders/pricerite.py
+++ b/locations/spiders/pricerite.py
@@ -5,32 +5,36 @@
import scrapy
from locations.items import GeojsonPointItem
-from locations.hours import OpeningHours
class PriceRiteSpider(scrapy.Spider):
name = "pricerite"
item_attributes = { 'brand': "PriceRite" }
- allowed_domains = ["priceritesupermarkets.com"]
+ allowed_domains = ["priceritemarketplace.com"]
start_urls = (
- "https://www.priceritesupermarkets.com/locations/",
+ "https://www.priceritemarketplace.com/",
)
def parse(self, response):
- script = response.xpath('//script[contains(text(), "var stores")]').extract_first()
- stores = json.loads(re.search(r'var stores = (.*?);', script).groups()[0])
+ script = response.xpath('//script[contains(text(), "__PRELOADED_STATE__")]/text()').extract_first()
+ script = script[script.index('{'):]
+ stores = json.loads(script)['stores']['availablePlanningStores']['items']
for store in stores:
+ ref = store["retailerStoreId"]
properties = {
- "ref": store["storeNumber"],
+ "ref": ref,
+ "website": f"https://www.priceritemarketplace.com/sm/planning/rsid/{ref}",
"name": store["name"],
- "lat": store["latitude"],
- "lon": store["longitude"],
- "addr_full": store["address1"],
+ "lat": store["location"]["latitude"],
+ "lon": store["location"]["longitude"],
+ "addr_full": store["addressLine1"],
"city": store["city"],
- "state": store["state"],
- "postcode": store["zipCode"],
+ "state": store["countyProvinceState"],
+ "postcode": store["postCode"],
+ "phone": store["phone"],
+ "opening_hours": store["openingHours"],
}
yield GeojsonPointItem(**properties)
| {"golden_diff": "diff --git a/locations/spiders/pricerite.py b/locations/spiders/pricerite.py\n--- a/locations/spiders/pricerite.py\n+++ b/locations/spiders/pricerite.py\n@@ -5,32 +5,36 @@\n import scrapy\n \n from locations.items import GeojsonPointItem\n-from locations.hours import OpeningHours\n \n \n class PriceRiteSpider(scrapy.Spider):\n name = \"pricerite\"\n item_attributes = { 'brand': \"PriceRite\" }\n- allowed_domains = [\"priceritesupermarkets.com\"]\n+ allowed_domains = [\"priceritemarketplace.com\"]\n \n start_urls = (\n- \"https://www.priceritesupermarkets.com/locations/\",\n+ \"https://www.priceritemarketplace.com/\",\n )\n \n def parse(self, response):\n- script = response.xpath('//script[contains(text(), \"var stores\")]').extract_first()\n- stores = json.loads(re.search(r'var stores = (.*?);', script).groups()[0])\n+ script = response.xpath('//script[contains(text(), \"__PRELOADED_STATE__\")]/text()').extract_first()\n+ script = script[script.index('{'):]\n+ stores = json.loads(script)['stores']['availablePlanningStores']['items']\n \n for store in stores:\n+ ref = store[\"retailerStoreId\"]\n properties = {\n- \"ref\": store[\"storeNumber\"],\n+ \"ref\": ref,\n+ \"website\": f\"https://www.priceritemarketplace.com/sm/planning/rsid/{ref}\",\n \"name\": store[\"name\"],\n- \"lat\": store[\"latitude\"],\n- \"lon\": store[\"longitude\"],\n- \"addr_full\": store[\"address1\"],\n+ \"lat\": store[\"location\"][\"latitude\"],\n+ \"lon\": store[\"location\"][\"longitude\"],\n+ \"addr_full\": store[\"addressLine1\"],\n \"city\": store[\"city\"],\n- \"state\": store[\"state\"],\n- \"postcode\": store[\"zipCode\"],\n+ \"state\": store[\"countyProvinceState\"],\n+ \"postcode\": store[\"postCode\"],\n+ \"phone\": store[\"phone\"],\n+ \"opening_hours\": store[\"openingHours\"],\n }\n \n yield GeojsonPointItem(**properties)\n", "issue": "Spider pricerite is broken\nDuring the global build at 2021-05-26-14-42-23, spider **pricerite** failed with **0 features** and **2 errors**.\n\nHere's [the log](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/logs/pricerite.log) and [the output](https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/pricerite.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-05-26-14-42-23/output/pricerite.geojson))\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nimport json\nimport re\n\nimport scrapy\n\nfrom locations.items import GeojsonPointItem\nfrom locations.hours import OpeningHours\n\n\nclass PriceRiteSpider(scrapy.Spider):\n name = \"pricerite\"\n item_attributes = { 'brand': \"PriceRite\" }\n allowed_domains = [\"priceritesupermarkets.com\"]\n\n start_urls = (\n \"https://www.priceritesupermarkets.com/locations/\",\n )\n\n def parse(self, response):\n script = response.xpath('//script[contains(text(), \"var stores\")]').extract_first()\n stores = json.loads(re.search(r'var stores = (.*?);', script).groups()[0])\n\n for store in stores:\n properties = {\n \"ref\": store[\"storeNumber\"],\n \"name\": store[\"name\"],\n \"lat\": store[\"latitude\"],\n \"lon\": store[\"longitude\"],\n \"addr_full\": store[\"address1\"],\n \"city\": store[\"city\"],\n \"state\": store[\"state\"],\n \"postcode\": store[\"zipCode\"],\n }\n\n yield GeojsonPointItem(**properties)\n\n", "path": "locations/spiders/pricerite.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\nimport json\nimport re\n\nimport scrapy\n\nfrom locations.items import GeojsonPointItem\n\n\nclass PriceRiteSpider(scrapy.Spider):\n name = \"pricerite\"\n item_attributes = { 'brand': \"PriceRite\" }\n allowed_domains = [\"priceritemarketplace.com\"]\n\n start_urls = (\n \"https://www.priceritemarketplace.com/\",\n )\n\n def parse(self, response):\n script = response.xpath('//script[contains(text(), \"__PRELOADED_STATE__\")]/text()').extract_first()\n script = script[script.index('{'):]\n stores = json.loads(script)['stores']['availablePlanningStores']['items']\n\n for store in stores:\n ref = store[\"retailerStoreId\"]\n properties = {\n \"ref\": ref,\n \"website\": f\"https://www.priceritemarketplace.com/sm/planning/rsid/{ref}\",\n \"name\": store[\"name\"],\n \"lat\": store[\"location\"][\"latitude\"],\n \"lon\": store[\"location\"][\"longitude\"],\n \"addr_full\": store[\"addressLine1\"],\n \"city\": store[\"city\"],\n \"state\": store[\"countyProvinceState\"],\n \"postcode\": store[\"postCode\"],\n \"phone\": store[\"phone\"],\n \"opening_hours\": store[\"openingHours\"],\n }\n\n yield GeojsonPointItem(**properties)\n\n", "path": "locations/spiders/pricerite.py"}]} | 758 | 489 |
gh_patches_debug_64324 | rasdani/github-patches | git_diff | pex-tool__pex-630 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 1.6.0
On the docket:
+ (longterm fix) unhandled AttributeError during pex bootstrapping with PEX_PATH #598
+ Vendor setuptools / wheel. #607
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = '1.5.3'
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = '1.5.3'
+__version__ = '1.6.0'
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = '1.5.3'\n+__version__ = '1.6.0'\n", "issue": "Release 1.6.0\nOn the docket:\r\n+ (longterm fix) unhandled AttributeError during pex bootstrapping with PEX_PATH #598\r\n+ Vendor setuptools / wheel. #607\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = '1.5.3'\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = '1.6.0'\n", "path": "pex/version.py"}]} | 354 | 94 |
gh_patches_debug_692 | rasdani/github-patches | git_diff | hylang__hy-2312 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
New release
It's time for a new release soon. Here are the things I'd like to get done, or at least try to get done, first. If you think you'll make a PR soon that you'd also like to get in for this release, mention that, too. Volunteers to take these tasks on are also welcome.
- ~#2291~; ~#2292~ - These are more difficult than I thought. I don't think I'm going to make the release wait for them.
- Install bytecode (for Hy and for Hyrule): hylang/hyrule#42; at least partly addresses #1747
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs/conf.py`
Content:
```
1 # This file is execfile()d with the current directory set to its containing dir.
2
3 import html
4 import os
5 import re
6 import sys
7 import time
8
9 sys.path.insert(0, os.path.abspath(".."))
10
11 extensions = [
12 "sphinx.ext.napoleon",
13 "sphinx.ext.intersphinx",
14 "sphinx.ext.autodoc",
15 "sphinx.ext.viewcode",
16 "sphinxcontrib.hydomain",
17 ]
18
19 from get_version import __version__ as hy_version
20
21 # Read the Docs might dirty its checkout, so strip the dirty flag.
22 hy_version = re.sub(r"[+.]dirty\Z", "", hy_version)
23
24 templates_path = ["_templates"]
25 source_suffix = ".rst"
26
27 master_doc = "index"
28
29 # General information about the project.
30 project = "hy"
31 copyright = "%s the authors" % time.strftime("%Y")
32
33 # The version info for the project you're documenting, acts as replacement for
34 # |version| and |release|, also used in various other places throughout the
35 # built documents.
36 #
37 # The short X.Y version.
38 version = ".".join(hy_version.split(".")[:-1])
39 # The full version, including alpha/beta/rc tags.
40 release = hy_version
41 hy_descriptive_version = html.escape(hy_version)
42 if "+" in hy_version:
43 hy_descriptive_version += " <strong style='color: red;'>(unstable)</strong>"
44
45 exclude_patterns = ["_build", "coreteam.rst"]
46 add_module_names = True
47
48 pygments_style = "sphinx"
49
50 import sphinx_rtd_theme
51
52 html_theme = "sphinx_rtd_theme"
53 html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
54
55 # Add any paths that contain custom static files (such as style sheets) here,
56 # relative to this directory. They are copied after the builtin static files,
57 # so a file named "default.css" will overwrite the builtin "default.css".
58 html_static_path = ["_static"]
59
60 html_use_smartypants = False
61 html_show_sphinx = False
62
63 html_context = dict(
64 hy_descriptive_version=hy_descriptive_version,
65 has_active_alpha=True,
66 )
67
68 highlight_language = "clojure"
69
70 intersphinx_mapping = dict(
71 py=("https://docs.python.org/3/", None),
72 py3_10=("https://docs.python.org/3.10/", None),
73 hyrule=("https://hyrule.readthedocs.io/en/master/", None),
74 )
75 # ** Generate Cheatsheet
76 import json
77 from itertools import zip_longest
78 from pathlib import Path
79
80
81 def refize(spec):
82 role = ":hy:func:"
83 if isinstance(spec, dict):
84 _name = spec["name"]
85 uri = spec["uri"]
86 if spec.get("internal"):
87 role = ":ref:"
88 else:
89 uri = spec
90 _name = str.split(uri, ".")[-1]
91 return "{}`{} <{}>`".format(role, _name, uri)
92
93
94 def format_refs(refs, indent):
95 args = [iter(map(refize, refs))]
96 ref_groups = zip_longest(*args, fillvalue="")
97 return str.join(
98 " \\\n" + " " * (indent + 3),
99 [str.join(" ", ref_group) for ref_group in ref_groups],
100 )
101
102
103 def format_row(category, divider_loc):
104 return "{title: <{width}} | {methods}".format(
105 width=divider_loc,
106 title=category["name"],
107 methods=format_refs(category["methods"], divider_loc),
108 )
109
110
111 def format_table(table_spec):
112 table_name = table_spec["name"]
113 categories = table_spec["categories"]
114 longest_cat_name = max(len(category["name"]) for category in categories)
115 table = [
116 table_name,
117 "-" * len(table_name),
118 "",
119 "=" * longest_cat_name + " " + "=" * 25,
120 *(format_row(category, longest_cat_name) for category in categories),
121 "=" * longest_cat_name + " " + "=" * 25,
122 "",
123 ]
124 return "\n".join(table)
125
126
127 # Modifications to the cheatsheet should be added in `cheatsheet.json`
128 cheatsheet_spec = json.loads(Path("./docs/cheatsheet.json").read_text())
129 cheatsheet = [
130 "..",
131 " DO NOT MODIFY THIS FILE. IT IS AUTO GENERATED BY ``conf.py``",
132 " If you need to change or add methods, modify ``cheatsheet_spec`` in ``conf.py``",
133 "",
134 ".. _cheatsheet:",
135 "",
136 "Cheatsheet",
137 "==========",
138 "",
139 *map(format_table, cheatsheet_spec),
140 ]
141 Path("./docs/cheatsheet.rst").write_text("\n".join(cheatsheet))
142
143
144 # ** Sphinx App Setup
145
146
147 def setup(app):
148 app.add_css_file("overrides.css")
149
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -61,9 +61,7 @@
html_show_sphinx = False
html_context = dict(
- hy_descriptive_version=hy_descriptive_version,
- has_active_alpha=True,
-)
+ hy_descriptive_version=hy_descriptive_version)
highlight_language = "clojure"
| {"golden_diff": "diff --git a/docs/conf.py b/docs/conf.py\n--- a/docs/conf.py\n+++ b/docs/conf.py\n@@ -61,9 +61,7 @@\n html_show_sphinx = False\n \n html_context = dict(\n- hy_descriptive_version=hy_descriptive_version,\n- has_active_alpha=True,\n-)\n+ hy_descriptive_version=hy_descriptive_version)\n \n highlight_language = \"clojure\"\n", "issue": "New release\nIt's time for a new release soon. Here are the things I'd like to get done, or at least try to get done, first. If you think you'll make a PR soon that you'd also like to get in for this release, mention that, too. Volunteers to take these tasks on are also welcome.\r\n\r\n- ~#2291~; ~#2292~ - These are more difficult than I thought. I don't think I'm going to make the release wait for them.\r\n- Install bytecode (for Hy and for Hyrule): hylang/hyrule#42; at least partly addresses #1747\n", "before_files": [{"content": "# This file is execfile()d with the current directory set to its containing dir.\n\nimport html\nimport os\nimport re\nimport sys\nimport time\n\nsys.path.insert(0, os.path.abspath(\"..\"))\n\nextensions = [\n \"sphinx.ext.napoleon\",\n \"sphinx.ext.intersphinx\",\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.viewcode\",\n \"sphinxcontrib.hydomain\",\n]\n\nfrom get_version import __version__ as hy_version\n\n# Read the Docs might dirty its checkout, so strip the dirty flag.\nhy_version = re.sub(r\"[+.]dirty\\Z\", \"\", hy_version)\n\ntemplates_path = [\"_templates\"]\nsource_suffix = \".rst\"\n\nmaster_doc = \"index\"\n\n# General information about the project.\nproject = \"hy\"\ncopyright = \"%s the authors\" % time.strftime(\"%Y\")\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\nversion = \".\".join(hy_version.split(\".\")[:-1])\n# The full version, including alpha/beta/rc tags.\nrelease = hy_version\nhy_descriptive_version = html.escape(hy_version)\nif \"+\" in hy_version:\n hy_descriptive_version += \" <strong style='color: red;'>(unstable)</strong>\"\n\nexclude_patterns = [\"_build\", \"coreteam.rst\"]\nadd_module_names = True\n\npygments_style = \"sphinx\"\n\nimport sphinx_rtd_theme\n\nhtml_theme = \"sphinx_rtd_theme\"\nhtml_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = [\"_static\"]\n\nhtml_use_smartypants = False\nhtml_show_sphinx = False\n\nhtml_context = dict(\n hy_descriptive_version=hy_descriptive_version,\n has_active_alpha=True,\n)\n\nhighlight_language = \"clojure\"\n\nintersphinx_mapping = dict(\n py=(\"https://docs.python.org/3/\", None),\n py3_10=(\"https://docs.python.org/3.10/\", None),\n hyrule=(\"https://hyrule.readthedocs.io/en/master/\", None),\n)\n# ** Generate Cheatsheet\nimport json\nfrom itertools import zip_longest\nfrom pathlib import Path\n\n\ndef refize(spec):\n role = \":hy:func:\"\n if isinstance(spec, dict):\n _name = spec[\"name\"]\n uri = spec[\"uri\"]\n if spec.get(\"internal\"):\n role = \":ref:\"\n else:\n uri = spec\n _name = str.split(uri, \".\")[-1]\n return \"{}`{} <{}>`\".format(role, _name, uri)\n\n\ndef format_refs(refs, indent):\n args = [iter(map(refize, refs))]\n ref_groups = zip_longest(*args, fillvalue=\"\")\n return str.join(\n \" \\\\\\n\" + \" \" * (indent + 3),\n [str.join(\" \", ref_group) for ref_group in ref_groups],\n )\n\n\ndef format_row(category, divider_loc):\n return \"{title: <{width}} | {methods}\".format(\n width=divider_loc,\n title=category[\"name\"],\n methods=format_refs(category[\"methods\"], divider_loc),\n )\n\n\ndef format_table(table_spec):\n table_name = table_spec[\"name\"]\n categories = table_spec[\"categories\"]\n longest_cat_name = max(len(category[\"name\"]) for category in categories)\n table = [\n table_name,\n \"-\" * len(table_name),\n \"\",\n \"=\" * longest_cat_name + \" \" + \"=\" * 25,\n *(format_row(category, longest_cat_name) for category in categories),\n \"=\" * longest_cat_name + \" \" + \"=\" * 25,\n \"\",\n ]\n return \"\\n\".join(table)\n\n\n# Modifications to the cheatsheet should be added in `cheatsheet.json`\ncheatsheet_spec = json.loads(Path(\"./docs/cheatsheet.json\").read_text())\ncheatsheet = [\n \"..\",\n \" DO NOT MODIFY THIS FILE. IT IS AUTO GENERATED BY ``conf.py``\",\n \" If you need to change or add methods, modify ``cheatsheet_spec`` in ``conf.py``\",\n \"\",\n \".. _cheatsheet:\",\n \"\",\n \"Cheatsheet\",\n \"==========\",\n \"\",\n *map(format_table, cheatsheet_spec),\n]\nPath(\"./docs/cheatsheet.rst\").write_text(\"\\n\".join(cheatsheet))\n\n\n# ** Sphinx App Setup\n\n\ndef setup(app):\n app.add_css_file(\"overrides.css\")\n", "path": "docs/conf.py"}], "after_files": [{"content": "# This file is execfile()d with the current directory set to its containing dir.\n\nimport html\nimport os\nimport re\nimport sys\nimport time\n\nsys.path.insert(0, os.path.abspath(\"..\"))\n\nextensions = [\n \"sphinx.ext.napoleon\",\n \"sphinx.ext.intersphinx\",\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.viewcode\",\n \"sphinxcontrib.hydomain\",\n]\n\nfrom get_version import __version__ as hy_version\n\n# Read the Docs might dirty its checkout, so strip the dirty flag.\nhy_version = re.sub(r\"[+.]dirty\\Z\", \"\", hy_version)\n\ntemplates_path = [\"_templates\"]\nsource_suffix = \".rst\"\n\nmaster_doc = \"index\"\n\n# General information about the project.\nproject = \"hy\"\ncopyright = \"%s the authors\" % time.strftime(\"%Y\")\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\nversion = \".\".join(hy_version.split(\".\")[:-1])\n# The full version, including alpha/beta/rc tags.\nrelease = hy_version\nhy_descriptive_version = html.escape(hy_version)\nif \"+\" in hy_version:\n hy_descriptive_version += \" <strong style='color: red;'>(unstable)</strong>\"\n\nexclude_patterns = [\"_build\", \"coreteam.rst\"]\nadd_module_names = True\n\npygments_style = \"sphinx\"\n\nimport sphinx_rtd_theme\n\nhtml_theme = \"sphinx_rtd_theme\"\nhtml_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = [\"_static\"]\n\nhtml_use_smartypants = False\nhtml_show_sphinx = False\n\nhtml_context = dict(\n hy_descriptive_version=hy_descriptive_version)\n\nhighlight_language = \"clojure\"\n\nintersphinx_mapping = dict(\n py=(\"https://docs.python.org/3/\", None),\n py3_10=(\"https://docs.python.org/3.10/\", None),\n hyrule=(\"https://hyrule.readthedocs.io/en/master/\", None),\n)\n# ** Generate Cheatsheet\nimport json\nfrom itertools import zip_longest\nfrom pathlib import Path\n\n\ndef refize(spec):\n role = \":hy:func:\"\n if isinstance(spec, dict):\n _name = spec[\"name\"]\n uri = spec[\"uri\"]\n if spec.get(\"internal\"):\n role = \":ref:\"\n else:\n uri = spec\n _name = str.split(uri, \".\")[-1]\n return \"{}`{} <{}>`\".format(role, _name, uri)\n\n\ndef format_refs(refs, indent):\n args = [iter(map(refize, refs))]\n ref_groups = zip_longest(*args, fillvalue=\"\")\n return str.join(\n \" \\\\\\n\" + \" \" * (indent + 3),\n [str.join(\" \", ref_group) for ref_group in ref_groups],\n )\n\n\ndef format_row(category, divider_loc):\n return \"{title: <{width}} | {methods}\".format(\n width=divider_loc,\n title=category[\"name\"],\n methods=format_refs(category[\"methods\"], divider_loc),\n )\n\n\ndef format_table(table_spec):\n table_name = table_spec[\"name\"]\n categories = table_spec[\"categories\"]\n longest_cat_name = max(len(category[\"name\"]) for category in categories)\n table = [\n table_name,\n \"-\" * len(table_name),\n \"\",\n \"=\" * longest_cat_name + \" \" + \"=\" * 25,\n *(format_row(category, longest_cat_name) for category in categories),\n \"=\" * longest_cat_name + \" \" + \"=\" * 25,\n \"\",\n ]\n return \"\\n\".join(table)\n\n\n# Modifications to the cheatsheet should be added in `cheatsheet.json`\ncheatsheet_spec = json.loads(Path(\"./docs/cheatsheet.json\").read_text())\ncheatsheet = [\n \"..\",\n \" DO NOT MODIFY THIS FILE. IT IS AUTO GENERATED BY ``conf.py``\",\n \" If you need to change or add methods, modify ``cheatsheet_spec`` in ``conf.py``\",\n \"\",\n \".. _cheatsheet:\",\n \"\",\n \"Cheatsheet\",\n \"==========\",\n \"\",\n *map(format_table, cheatsheet_spec),\n]\nPath(\"./docs/cheatsheet.rst\").write_text(\"\\n\".join(cheatsheet))\n\n\n# ** Sphinx App Setup\n\n\ndef setup(app):\n app.add_css_file(\"overrides.css\")\n", "path": "docs/conf.py"}]} | 1,780 | 89 |
gh_patches_debug_28222 | rasdani/github-patches | git_diff | scikit-hep__awkward-1650 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ak.fields (v2) passes a RecordArray's internal fields by reference
Okay, so I hadn't noticed that Awkward v2's fields are passed by reference, which exposes them to the danger that someone might modify them downstream:
v1:
```python
>>> array = awkward.Array([{"x": 1, "y": 1.1}])
>>> fields = awkward.fields(array)
>>> array
<Array [{x: 1, y: 1.1}] type='1 * {"x": int64, "y": float64}'>
>>> fields
['x', 'y']
>>> fields[0] = "XXX"
>>> fields
['XXX', 'y']
>>> array
<Array [{x: 1, y: 1.1}] type='1 * {"x": int64, "y": float64}'>
```
v2:
```python
>>> array = awkward._v2.Array([{"x": 1, "y": 1.1}])
>>> fields = awkward._v2.fields(array)
>>> array
<Array [{x: 1, y: 1.1}] type='1 * {x: int64, y: float64}'>
>>> fields
['x', 'y']
>>> fields[0] = "XXX"
>>> fields
['XXX', 'y']
>>> array
<Array [{XXX: 1, y: 1.1}] type='1 * {XXX: int64, y: float64}'>
```
It could be fixed [here, in Awkward](https://github.com/scikit-hep/awkward/blob/352b0dead74846ad2a56d385be4694ec87072a08/src/awkward/_v2/contents/recordarray.py#L162), or maybe [here](https://github.com/scikit-hep/awkward/blob/352b0dead74846ad2a56d385be4694ec87072a08/src/awkward/_v2/operations/ak_fields.py#L30) (to only suffer the list-copy when handing it off to a user, so that internal uses can still be by reference).
I'll use this comment to open an issue in Awkward. Once `awkward.fields` is guarded, your `.copy()` can be removed, but it can also not be removed with no consequences but a little performance.
_Originally posted by @jpivarski in https://github.com/scikit-hep/vector/pull/226#discussion_r958660705_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/awkward/_v2/operations/ak_fields.py`
Content:
```
1 # BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
2
3 import awkward as ak
4
5 np = ak.nplike.NumpyMetadata.instance()
6
7
8 def fields(array):
9 """
10 Extracts record fields or tuple slot numbers from `array` (many types
11 supported, including all Awkward Arrays and Records).
12
13 If the array contains nested records, only the outermost record is
14 queried. If it contains tuples instead of records, this function outputs
15 string representations of integers, such as `"0"`, `"1"`, `"2"`, etc.
16 The records or tuples may be within multiple layers of nested lists.
17
18 If the array contains neither tuples nor records, this returns an empty
19 list.
20 """
21 with ak._v2._util.OperationErrorContext(
22 "ak._v2.fields",
23 dict(array=array),
24 ):
25 return _impl(array)
26
27
28 def _impl(array):
29 layout = ak._v2.operations.to_layout(array, allow_record=True, allow_other=False)
30 return layout.fields
31
```
Path: `src/awkward/_v2/operations/ak_parameters.py`
Content:
```
1 # BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
2
3 import awkward as ak
4
5 np = ak.nplike.NumpyMetadata.instance()
6
7
8 def parameters(array):
9 """
10 Extracts parameters from the outermost array node of `array` (many types
11 supported, including all Awkward Arrays and Records).
12
13 Parameters are a dict from str to JSON-like objects, usually strings.
14 Every #ak.layout.Content node has a different set of parameters. Some
15 key names are special, such as `"__record__"` and `"__array__"` that name
16 particular records and arrays as capable of supporting special behaviors.
17
18 See #ak.Array and #ak.behavior for a more complete description of
19 behaviors.
20 """
21 with ak._v2._util.OperationErrorContext(
22 "ak._v2.parameters",
23 dict(array=array),
24 ):
25 return _impl(array)
26
27
28 def _impl(array):
29 if isinstance(array, (ak._v2.highlevel.Array, ak._v2.highlevel.Record)):
30 return array.layout.parameters
31
32 elif isinstance(
33 array,
34 (ak._v2.contents.Content, ak._v2.record.Record),
35 ):
36 return array.parameters
37
38 elif isinstance(array, ak._v2.highlevel.ArrayBuilder):
39 return array.snapshot().layout.parameters
40
41 elif isinstance(array, ak.layout.ArrayBuilder):
42 return array.snapshot().parameters
43
44 else:
45 return {}
46
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/awkward/_v2/operations/ak_fields.py b/src/awkward/_v2/operations/ak_fields.py
--- a/src/awkward/_v2/operations/ak_fields.py
+++ b/src/awkward/_v2/operations/ak_fields.py
@@ -27,4 +27,4 @@
def _impl(array):
layout = ak._v2.operations.to_layout(array, allow_record=True, allow_other=False)
- return layout.fields
+ return layout.fields.copy()
diff --git a/src/awkward/_v2/operations/ak_parameters.py b/src/awkward/_v2/operations/ak_parameters.py
--- a/src/awkward/_v2/operations/ak_parameters.py
+++ b/src/awkward/_v2/operations/ak_parameters.py
@@ -1,5 +1,8 @@
# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
+import copy
+import numbers
+
import awkward as ak
np = ak.nplike.NumpyMetadata.instance()
@@ -27,13 +30,13 @@
def _impl(array):
if isinstance(array, (ak._v2.highlevel.Array, ak._v2.highlevel.Record)):
- return array.layout.parameters
+ return _copy(array.layout.parameters)
elif isinstance(
array,
(ak._v2.contents.Content, ak._v2.record.Record),
):
- return array.parameters
+ return _copy(array.parameters)
elif isinstance(array, ak._v2.highlevel.ArrayBuilder):
return array.snapshot().layout.parameters
@@ -43,3 +46,10 @@
else:
return {}
+
+
+def _copy(what):
+ if all(isinstance(x, (str, numbers.Real)) for x in what.values()):
+ return what.copy()
+ else:
+ return copy.deepcopy(what)
| {"golden_diff": "diff --git a/src/awkward/_v2/operations/ak_fields.py b/src/awkward/_v2/operations/ak_fields.py\n--- a/src/awkward/_v2/operations/ak_fields.py\n+++ b/src/awkward/_v2/operations/ak_fields.py\n@@ -27,4 +27,4 @@\n \n def _impl(array):\n layout = ak._v2.operations.to_layout(array, allow_record=True, allow_other=False)\n- return layout.fields\n+ return layout.fields.copy()\ndiff --git a/src/awkward/_v2/operations/ak_parameters.py b/src/awkward/_v2/operations/ak_parameters.py\n--- a/src/awkward/_v2/operations/ak_parameters.py\n+++ b/src/awkward/_v2/operations/ak_parameters.py\n@@ -1,5 +1,8 @@\n # BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE\n \n+import copy\n+import numbers\n+\n import awkward as ak\n \n np = ak.nplike.NumpyMetadata.instance()\n@@ -27,13 +30,13 @@\n \n def _impl(array):\n if isinstance(array, (ak._v2.highlevel.Array, ak._v2.highlevel.Record)):\n- return array.layout.parameters\n+ return _copy(array.layout.parameters)\n \n elif isinstance(\n array,\n (ak._v2.contents.Content, ak._v2.record.Record),\n ):\n- return array.parameters\n+ return _copy(array.parameters)\n \n elif isinstance(array, ak._v2.highlevel.ArrayBuilder):\n return array.snapshot().layout.parameters\n@@ -43,3 +46,10 @@\n \n else:\n return {}\n+\n+\n+def _copy(what):\n+ if all(isinstance(x, (str, numbers.Real)) for x in what.values()):\n+ return what.copy()\n+ else:\n+ return copy.deepcopy(what)\n", "issue": "ak.fields (v2) passes a RecordArray's internal fields by reference\nOkay, so I hadn't noticed that Awkward v2's fields are passed by reference, which exposes them to the danger that someone might modify them downstream:\r\n\r\nv1:\r\n\r\n```python\r\n>>> array = awkward.Array([{\"x\": 1, \"y\": 1.1}])\r\n>>> fields = awkward.fields(array)\r\n>>> array\r\n<Array [{x: 1, y: 1.1}] type='1 * {\"x\": int64, \"y\": float64}'>\r\n>>> fields\r\n['x', 'y']\r\n>>> fields[0] = \"XXX\"\r\n>>> fields\r\n['XXX', 'y']\r\n>>> array\r\n<Array [{x: 1, y: 1.1}] type='1 * {\"x\": int64, \"y\": float64}'>\r\n```\r\n\r\nv2:\r\n\r\n```python\r\n>>> array = awkward._v2.Array([{\"x\": 1, \"y\": 1.1}])\r\n>>> fields = awkward._v2.fields(array)\r\n>>> array\r\n<Array [{x: 1, y: 1.1}] type='1 * {x: int64, y: float64}'>\r\n>>> fields\r\n['x', 'y']\r\n>>> fields[0] = \"XXX\"\r\n>>> fields\r\n['XXX', 'y']\r\n>>> array\r\n<Array [{XXX: 1, y: 1.1}] type='1 * {XXX: int64, y: float64}'>\r\n```\r\n\r\nIt could be fixed [here, in Awkward](https://github.com/scikit-hep/awkward/blob/352b0dead74846ad2a56d385be4694ec87072a08/src/awkward/_v2/contents/recordarray.py#L162), or maybe [here](https://github.com/scikit-hep/awkward/blob/352b0dead74846ad2a56d385be4694ec87072a08/src/awkward/_v2/operations/ak_fields.py#L30) (to only suffer the list-copy when handing it off to a user, so that internal uses can still be by reference).\r\n\r\nI'll use this comment to open an issue in Awkward. Once `awkward.fields` is guarded, your `.copy()` can be removed, but it can also not be removed with no consequences but a little performance.\r\n\r\n_Originally posted by @jpivarski in https://github.com/scikit-hep/vector/pull/226#discussion_r958660705_\n", "before_files": [{"content": "# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE\n\nimport awkward as ak\n\nnp = ak.nplike.NumpyMetadata.instance()\n\n\ndef fields(array):\n \"\"\"\n Extracts record fields or tuple slot numbers from `array` (many types\n supported, including all Awkward Arrays and Records).\n\n If the array contains nested records, only the outermost record is\n queried. If it contains tuples instead of records, this function outputs\n string representations of integers, such as `\"0\"`, `\"1\"`, `\"2\"`, etc.\n The records or tuples may be within multiple layers of nested lists.\n\n If the array contains neither tuples nor records, this returns an empty\n list.\n \"\"\"\n with ak._v2._util.OperationErrorContext(\n \"ak._v2.fields\",\n dict(array=array),\n ):\n return _impl(array)\n\n\ndef _impl(array):\n layout = ak._v2.operations.to_layout(array, allow_record=True, allow_other=False)\n return layout.fields\n", "path": "src/awkward/_v2/operations/ak_fields.py"}, {"content": "# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE\n\nimport awkward as ak\n\nnp = ak.nplike.NumpyMetadata.instance()\n\n\ndef parameters(array):\n \"\"\"\n Extracts parameters from the outermost array node of `array` (many types\n supported, including all Awkward Arrays and Records).\n\n Parameters are a dict from str to JSON-like objects, usually strings.\n Every #ak.layout.Content node has a different set of parameters. Some\n key names are special, such as `\"__record__\"` and `\"__array__\"` that name\n particular records and arrays as capable of supporting special behaviors.\n\n See #ak.Array and #ak.behavior for a more complete description of\n behaviors.\n \"\"\"\n with ak._v2._util.OperationErrorContext(\n \"ak._v2.parameters\",\n dict(array=array),\n ):\n return _impl(array)\n\n\ndef _impl(array):\n if isinstance(array, (ak._v2.highlevel.Array, ak._v2.highlevel.Record)):\n return array.layout.parameters\n\n elif isinstance(\n array,\n (ak._v2.contents.Content, ak._v2.record.Record),\n ):\n return array.parameters\n\n elif isinstance(array, ak._v2.highlevel.ArrayBuilder):\n return array.snapshot().layout.parameters\n\n elif isinstance(array, ak.layout.ArrayBuilder):\n return array.snapshot().parameters\n\n else:\n return {}\n", "path": "src/awkward/_v2/operations/ak_parameters.py"}], "after_files": [{"content": "# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE\n\nimport awkward as ak\n\nnp = ak.nplike.NumpyMetadata.instance()\n\n\ndef fields(array):\n \"\"\"\n Extracts record fields or tuple slot numbers from `array` (many types\n supported, including all Awkward Arrays and Records).\n\n If the array contains nested records, only the outermost record is\n queried. If it contains tuples instead of records, this function outputs\n string representations of integers, such as `\"0\"`, `\"1\"`, `\"2\"`, etc.\n The records or tuples may be within multiple layers of nested lists.\n\n If the array contains neither tuples nor records, this returns an empty\n list.\n \"\"\"\n with ak._v2._util.OperationErrorContext(\n \"ak._v2.fields\",\n dict(array=array),\n ):\n return _impl(array)\n\n\ndef _impl(array):\n layout = ak._v2.operations.to_layout(array, allow_record=True, allow_other=False)\n return layout.fields.copy()\n", "path": "src/awkward/_v2/operations/ak_fields.py"}, {"content": "# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE\n\nimport copy\nimport numbers\n\nimport awkward as ak\n\nnp = ak.nplike.NumpyMetadata.instance()\n\n\ndef parameters(array):\n \"\"\"\n Extracts parameters from the outermost array node of `array` (many types\n supported, including all Awkward Arrays and Records).\n\n Parameters are a dict from str to JSON-like objects, usually strings.\n Every #ak.layout.Content node has a different set of parameters. Some\n key names are special, such as `\"__record__\"` and `\"__array__\"` that name\n particular records and arrays as capable of supporting special behaviors.\n\n See #ak.Array and #ak.behavior for a more complete description of\n behaviors.\n \"\"\"\n with ak._v2._util.OperationErrorContext(\n \"ak._v2.parameters\",\n dict(array=array),\n ):\n return _impl(array)\n\n\ndef _impl(array):\n if isinstance(array, (ak._v2.highlevel.Array, ak._v2.highlevel.Record)):\n return _copy(array.layout.parameters)\n\n elif isinstance(\n array,\n (ak._v2.contents.Content, ak._v2.record.Record),\n ):\n return _copy(array.parameters)\n\n elif isinstance(array, ak._v2.highlevel.ArrayBuilder):\n return array.snapshot().layout.parameters\n\n elif isinstance(array, ak.layout.ArrayBuilder):\n return array.snapshot().parameters\n\n else:\n return {}\n\n\ndef _copy(what):\n if all(isinstance(x, (str, numbers.Real)) for x in what.values()):\n return what.copy()\n else:\n return copy.deepcopy(what)\n", "path": "src/awkward/_v2/operations/ak_parameters.py"}]} | 1,562 | 428 |
gh_patches_debug_7366 | rasdani/github-patches | git_diff | aws-cloudformation__cfn-lint-2665 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
cfn-lint throws error when !ToJsonString contains int value
### CloudFormation Lint Version
0.76.2
### What operating system are you using?
Ubuntu
### Describe the bug
Unexpected internal error during linting of rule E1031, involving `ToJsonString` of numerical value
```
2023-04-06 20:20:31,922 - cfnlint - DEBUG - Completed linting of file: templates/lambda.yml
E0002 Unknown exception while processing rule E1031: Traceback (most recent call last):
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py", line 320, in run_check
return check(*args)
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py", line 44, in wrapper
results = match_function(self, filename, cfn, *args, **kwargs)
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py", line 202, in matchall
return self.match(cfn) # pylint: disable=E1102
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/functions/ToJsonString.py", line 39, in match
LanguageExtensions.validate_pseudo_parameters(
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/languageExtensions.py", line 32, in validate_pseudo_parameters
ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/languageExtensions.py", line 32, in <listcomp>
ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]
TypeError: argument of type 'int' is not iterable
cfn-secrets-stack.yml:1:1
E0002 Unknown exception while processing rule E1031: Traceback (most recent call last):
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py", line 320, in run_check
return check(*args)
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py", line 44, in wrapper
results = match_function(self, filename, cfn, *args, **kwargs)
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py", line 202, in matchall
return self.match(cfn) # pylint: disable=E1102
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/functions/ToJsonString.py", line 39, in match
LanguageExtensions.validate_pseudo_parameters(
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/languageExtensions.py", line 32, in validate_pseudo_parameters
ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]
File "/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/languageExtensions.py", line 32, in <listcomp>
ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]
TypeError: argument of type 'int' is not iterable
cfn-secrets-stack.yml:1:1
```
### Expected behavior
String quoted int should work as well as int, both are valid json
### Reproduction template
This works
```yaml
Resources:
DeploymentProperties:
Properties:
Description: "testing"
Name: 'Test'
SecretString: !ToJsonString
SomeNumber: '3'
Type: AWS::SecretsManager::Secret
Transform: AWS::LanguageExtensions
```
This does not, with the above error
```yaml
Resources:
DeploymentProperties:
Properties:
Description: "testing"
Name: 'Test'
SecretString: !ToJsonString
SomeNumber: 3
Type: AWS::SecretsManager::Secret
Transform: AWS::LanguageExtensions
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/cfnlint/languageExtensions.py`
Content:
```
1 from cfnlint.rules import RuleMatch
2
3
4 class LanguageExtensions:
5 """Class for a CloudFormation languageExtensions"""
6
7 def validate_transform_is_declared(
8 self, has_language_extensions_transform, matches, tree, intrinsic_function
9 ):
10 if not has_language_extensions_transform:
11 message = (
12 "Missing Transform: Declare the AWS::LanguageExtensions Transform globally to enable use"
13 " of the intrinsic function " + intrinsic_function + " at {0}"
14 )
15 matches.append(RuleMatch(tree[:], message.format("/".join(map(str, tree)))))
16 return matches
17
18 def validate_type(self, fn_object_val, matches, tree, intrinsic_function):
19 if not isinstance(fn_object_val, dict) and not isinstance(fn_object_val, list):
20 message = intrinsic_function + " needs a map or a list at {0}"
21 matches.append(RuleMatch(tree[:], message.format("/".join(map(str, tree)))))
22 elif len(fn_object_val) == 0:
23 message = "Invalid value for " + intrinsic_function + " for {0}"
24 matches.append(RuleMatch(tree[:], message.format("/".join(map(str, tree)))))
25 return matches
26
27 def validate_pseudo_parameters(
28 self, fn_object_val, matches, tree, pseudo_params, intrinsic_function
29 ):
30 if isinstance(fn_object_val, dict):
31 ref = "Ref"
32 ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]
33 for ref in ref_list:
34 if ref in pseudo_params:
35 message = (
36 intrinsic_function
37 + " does not support the pseudo parameter "
38 + ref
39 + " for {0}"
40 )
41 matches.append(
42 RuleMatch(tree[:], message.format("/".join(map(str, tree))))
43 )
44 return matches
45
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/cfnlint/languageExtensions.py b/src/cfnlint/languageExtensions.py
--- a/src/cfnlint/languageExtensions.py
+++ b/src/cfnlint/languageExtensions.py
@@ -29,7 +29,11 @@
):
if isinstance(fn_object_val, dict):
ref = "Ref"
- ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]
+ ref_list = [
+ val[ref]
+ for _, val in fn_object_val.items()
+ if hasattr(val, "__iter__") and ref in val
+ ]
for ref in ref_list:
if ref in pseudo_params:
message = (
| {"golden_diff": "diff --git a/src/cfnlint/languageExtensions.py b/src/cfnlint/languageExtensions.py\n--- a/src/cfnlint/languageExtensions.py\n+++ b/src/cfnlint/languageExtensions.py\n@@ -29,7 +29,11 @@\n ):\n if isinstance(fn_object_val, dict):\n ref = \"Ref\"\n- ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]\n+ ref_list = [\n+ val[ref]\n+ for _, val in fn_object_val.items()\n+ if hasattr(val, \"__iter__\") and ref in val\n+ ]\n for ref in ref_list:\n if ref in pseudo_params:\n message = (\n", "issue": "cfn-lint throws error when !ToJsonString contains int value\n### CloudFormation Lint Version\n\n0.76.2\n\n### What operating system are you using?\n\nUbuntu\n\n### Describe the bug\n\nUnexpected internal error during linting of rule E1031, involving `ToJsonString` of numerical value\r\n\r\n```\r\n2023-04-06 20:20:31,922 - cfnlint - DEBUG - Completed linting of file: templates/lambda.yml\r\nE0002 Unknown exception while processing rule E1031: Traceback (most recent call last):\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py\", line 320, in run_check\r\n return check(*args)\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py\", line 44, in wrapper\r\n results = match_function(self, filename, cfn, *args, **kwargs)\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py\", line 202, in matchall\r\n return self.match(cfn) # pylint: disable=E1102\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/functions/ToJsonString.py\", line 39, in match\r\n LanguageExtensions.validate_pseudo_parameters(\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/languageExtensions.py\", line 32, in validate_pseudo_parameters\r\n ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/languageExtensions.py\", line 32, in <listcomp>\r\n ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]\r\nTypeError: argument of type 'int' is not iterable\r\n\r\ncfn-secrets-stack.yml:1:1\r\n\r\nE0002 Unknown exception while processing rule E1031: Traceback (most recent call last):\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py\", line 320, in run_check\r\n return check(*args)\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py\", line 44, in wrapper\r\n results = match_function(self, filename, cfn, *args, **kwargs)\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/__init__.py\", line 202, in matchall\r\n return self.match(cfn) # pylint: disable=E1102\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/rules/functions/ToJsonString.py\", line 39, in match\r\n LanguageExtensions.validate_pseudo_parameters(\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/languageExtensions.py\", line 32, in validate_pseudo_parameters\r\n ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]\r\n File \"/home/kftse/anaconda3/envs/aws/lib/python3.10/site-packages/cfnlint/languageExtensions.py\", line 32, in <listcomp>\r\n ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]\r\nTypeError: argument of type 'int' is not iterable\r\n\r\ncfn-secrets-stack.yml:1:1\r\n```\n\n### Expected behavior\n\nString quoted int should work as well as int, both are valid json\n\n### Reproduction template\n\nThis works\r\n```yaml\r\nResources:\r\n DeploymentProperties:\r\n Properties:\r\n Description: \"testing\"\r\n Name: 'Test'\r\n SecretString: !ToJsonString\r\n SomeNumber: '3'\r\n Type: AWS::SecretsManager::Secret\r\nTransform: AWS::LanguageExtensions\r\n```\r\n\r\nThis does not, with the above error\r\n```yaml\r\nResources:\r\n DeploymentProperties:\r\n Properties:\r\n Description: \"testing\"\r\n Name: 'Test'\r\n SecretString: !ToJsonString\r\n SomeNumber: 3\r\n Type: AWS::SecretsManager::Secret\r\nTransform: AWS::LanguageExtensions\r\n```\r\n\n", "before_files": [{"content": "from cfnlint.rules import RuleMatch\n\n\nclass LanguageExtensions:\n \"\"\"Class for a CloudFormation languageExtensions\"\"\"\n\n def validate_transform_is_declared(\n self, has_language_extensions_transform, matches, tree, intrinsic_function\n ):\n if not has_language_extensions_transform:\n message = (\n \"Missing Transform: Declare the AWS::LanguageExtensions Transform globally to enable use\"\n \" of the intrinsic function \" + intrinsic_function + \" at {0}\"\n )\n matches.append(RuleMatch(tree[:], message.format(\"/\".join(map(str, tree)))))\n return matches\n\n def validate_type(self, fn_object_val, matches, tree, intrinsic_function):\n if not isinstance(fn_object_val, dict) and not isinstance(fn_object_val, list):\n message = intrinsic_function + \" needs a map or a list at {0}\"\n matches.append(RuleMatch(tree[:], message.format(\"/\".join(map(str, tree)))))\n elif len(fn_object_val) == 0:\n message = \"Invalid value for \" + intrinsic_function + \" for {0}\"\n matches.append(RuleMatch(tree[:], message.format(\"/\".join(map(str, tree)))))\n return matches\n\n def validate_pseudo_parameters(\n self, fn_object_val, matches, tree, pseudo_params, intrinsic_function\n ):\n if isinstance(fn_object_val, dict):\n ref = \"Ref\"\n ref_list = [val[ref] for key, val in fn_object_val.items() if ref in val]\n for ref in ref_list:\n if ref in pseudo_params:\n message = (\n intrinsic_function\n + \" does not support the pseudo parameter \"\n + ref\n + \" for {0}\"\n )\n matches.append(\n RuleMatch(tree[:], message.format(\"/\".join(map(str, tree))))\n )\n return matches\n", "path": "src/cfnlint/languageExtensions.py"}], "after_files": [{"content": "from cfnlint.rules import RuleMatch\n\n\nclass LanguageExtensions:\n \"\"\"Class for a CloudFormation languageExtensions\"\"\"\n\n def validate_transform_is_declared(\n self, has_language_extensions_transform, matches, tree, intrinsic_function\n ):\n if not has_language_extensions_transform:\n message = (\n \"Missing Transform: Declare the AWS::LanguageExtensions Transform globally to enable use\"\n \" of the intrinsic function \" + intrinsic_function + \" at {0}\"\n )\n matches.append(RuleMatch(tree[:], message.format(\"/\".join(map(str, tree)))))\n return matches\n\n def validate_type(self, fn_object_val, matches, tree, intrinsic_function):\n if not isinstance(fn_object_val, dict) and not isinstance(fn_object_val, list):\n message = intrinsic_function + \" needs a map or a list at {0}\"\n matches.append(RuleMatch(tree[:], message.format(\"/\".join(map(str, tree)))))\n elif len(fn_object_val) == 0:\n message = \"Invalid value for \" + intrinsic_function + \" for {0}\"\n matches.append(RuleMatch(tree[:], message.format(\"/\".join(map(str, tree)))))\n return matches\n\n def validate_pseudo_parameters(\n self, fn_object_val, matches, tree, pseudo_params, intrinsic_function\n ):\n if isinstance(fn_object_val, dict):\n ref = \"Ref\"\n ref_list = [\n val[ref]\n for _, val in fn_object_val.items()\n if hasattr(val, \"__iter__\") and ref in val\n ]\n for ref in ref_list:\n if ref in pseudo_params:\n message = (\n intrinsic_function\n + \" does not support the pseudo parameter \"\n + ref\n + \" for {0}\"\n )\n matches.append(\n RuleMatch(tree[:], message.format(\"/\".join(map(str, tree))))\n )\n return matches\n", "path": "src/cfnlint/languageExtensions.py"}]} | 1,760 | 154 |
gh_patches_debug_3322 | rasdani/github-patches | git_diff | holoviz__panel-3100 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Django autoload_handle broken
#### ALL software version info
Panel = 0.13.0a25
Bokeh = 2.4.2
Django = 2.2.14
When loading a Panel app embedded in Django, the `AutoloadJsConsumer` call just hangs. After stepping through the code it
appears there is an error, which causes it to enter an eternal loop:
```python
> /Users/rditlsc9/miniconda/envs/tethys-vtime/lib/python3.7/site-packages/panel/io/django.py(37)autoload_handle()
-> js = autoload_js_script(resources, session.token, element_id, app_path, absolute_url)
TypeError: autoload_js_script() missing 1 required positional argument: 'absolute_url'
```
It appears that #2919 changed the signature of `autoload_js_script`, but the call to it in `panel/io/django.py:autoload_handle` wasn't updated accordingly.
As a side note - is there a better way to get this type of error to log? I wasn't able to see any indication of an error until I stepped through the code in a debugger.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `panel/io/django.py`
Content:
```
1 from urllib.parse import urlparse
2
3 from bokeh.server.django.consumers import DocConsumer, AutoloadJsConsumer
4
5 from .resources import Resources
6 from .server import (
7 autoload_js_script, server_html_page_for_session
8 )
9
10 async def doc_handle(self, body):
11 session = await self._get_session()
12 resources = Resources.from_bokeh(self.application.resources())
13 page = server_html_page_for_session(
14 session, resources=resources, title=session.document.title,
15 template=session.document.template,
16 template_variables=session.document.template_variables
17 )
18 await self.send_response(200, page.encode(), headers=[(b"Content-Type", b"text/html")])
19
20
21 async def autoload_handle(self, body):
22 session = await self._get_session()
23
24 element_id = self.get_argument("bokeh-autoload-element", default=None)
25 if not element_id:
26 raise RuntimeError("No bokeh-autoload-element query parameter")
27
28 app_path = self.get_argument("bokeh-app-path", default="/")
29 absolute_url = self.get_argument("bokeh-absolute-url", default=None)
30
31 if absolute_url:
32 server_url = '{uri.scheme}://{uri.netloc}/'.format(uri=urlparse(absolute_url))
33 else:
34 server_url = None
35
36 resources = self.resources(server_url)
37 js = autoload_js_script(resources, session.token, element_id, app_path, absolute_url)
38
39 headers = [
40 (b"Access-Control-Allow-Headers", b"*"),
41 (b"Access-Control-Allow-Methods", b"PUT, GET, OPTIONS"),
42 (b"Access-Control-Allow-Origin", b"*"),
43 (b"Content-Type", b"application/javascript")
44 ]
45 await self.send_response(200, js.encode(), headers=headers)
46
47
48 DocConsumer.handle = doc_handle
49 AutoloadJsConsumer.handle = autoload_handle
50
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/panel/io/django.py b/panel/io/django.py
--- a/panel/io/django.py
+++ b/panel/io/django.py
@@ -34,7 +34,7 @@
server_url = None
resources = self.resources(server_url)
- js = autoload_js_script(resources, session.token, element_id, app_path, absolute_url)
+ js = autoload_js_script(session.document, resources, session.token, element_id, app_path, absolute_url)
headers = [
(b"Access-Control-Allow-Headers", b"*"),
| {"golden_diff": "diff --git a/panel/io/django.py b/panel/io/django.py\n--- a/panel/io/django.py\n+++ b/panel/io/django.py\n@@ -34,7 +34,7 @@\n server_url = None\n \n resources = self.resources(server_url)\n- js = autoload_js_script(resources, session.token, element_id, app_path, absolute_url)\n+ js = autoload_js_script(session.document, resources, session.token, element_id, app_path, absolute_url)\n \n headers = [\n (b\"Access-Control-Allow-Headers\", b\"*\"),\n", "issue": "Django autoload_handle broken\n#### ALL software version info\r\nPanel = 0.13.0a25\r\nBokeh = 2.4.2\r\nDjango = 2.2.14\r\n\r\nWhen loading a Panel app embedded in Django, the `AutoloadJsConsumer` call just hangs. After stepping through the code it \r\nappears there is an error, which causes it to enter an eternal loop:\r\n\r\n```python\r\n> /Users/rditlsc9/miniconda/envs/tethys-vtime/lib/python3.7/site-packages/panel/io/django.py(37)autoload_handle()\r\n-> js = autoload_js_script(resources, session.token, element_id, app_path, absolute_url)\r\n\r\nTypeError: autoload_js_script() missing 1 required positional argument: 'absolute_url'\r\n```\r\n\r\nIt appears that #2919 changed the signature of `autoload_js_script`, but the call to it in `panel/io/django.py:autoload_handle` wasn't updated accordingly.\r\n\r\n\r\nAs a side note - is there a better way to get this type of error to log? I wasn't able to see any indication of an error until I stepped through the code in a debugger.\r\n\n", "before_files": [{"content": "from urllib.parse import urlparse\n\nfrom bokeh.server.django.consumers import DocConsumer, AutoloadJsConsumer\n\nfrom .resources import Resources\nfrom .server import (\n autoload_js_script, server_html_page_for_session\n)\n\nasync def doc_handle(self, body):\n session = await self._get_session()\n resources = Resources.from_bokeh(self.application.resources())\n page = server_html_page_for_session(\n session, resources=resources, title=session.document.title,\n template=session.document.template,\n template_variables=session.document.template_variables\n )\n await self.send_response(200, page.encode(), headers=[(b\"Content-Type\", b\"text/html\")])\n\n\nasync def autoload_handle(self, body):\n session = await self._get_session()\n\n element_id = self.get_argument(\"bokeh-autoload-element\", default=None)\n if not element_id:\n raise RuntimeError(\"No bokeh-autoload-element query parameter\")\n\n app_path = self.get_argument(\"bokeh-app-path\", default=\"/\")\n absolute_url = self.get_argument(\"bokeh-absolute-url\", default=None)\n\n if absolute_url:\n server_url = '{uri.scheme}://{uri.netloc}/'.format(uri=urlparse(absolute_url))\n else:\n server_url = None\n\n resources = self.resources(server_url)\n js = autoload_js_script(resources, session.token, element_id, app_path, absolute_url)\n\n headers = [\n (b\"Access-Control-Allow-Headers\", b\"*\"),\n (b\"Access-Control-Allow-Methods\", b\"PUT, GET, OPTIONS\"),\n (b\"Access-Control-Allow-Origin\", b\"*\"),\n (b\"Content-Type\", b\"application/javascript\")\n ]\n await self.send_response(200, js.encode(), headers=headers)\n\n\nDocConsumer.handle = doc_handle\nAutoloadJsConsumer.handle = autoload_handle\n", "path": "panel/io/django.py"}], "after_files": [{"content": "from urllib.parse import urlparse\n\nfrom bokeh.server.django.consumers import DocConsumer, AutoloadJsConsumer\n\nfrom .resources import Resources\nfrom .server import (\n autoload_js_script, server_html_page_for_session\n)\n\nasync def doc_handle(self, body):\n session = await self._get_session()\n resources = Resources.from_bokeh(self.application.resources())\n page = server_html_page_for_session(\n session, resources=resources, title=session.document.title,\n template=session.document.template,\n template_variables=session.document.template_variables\n )\n await self.send_response(200, page.encode(), headers=[(b\"Content-Type\", b\"text/html\")])\n\n\nasync def autoload_handle(self, body):\n session = await self._get_session()\n\n element_id = self.get_argument(\"bokeh-autoload-element\", default=None)\n if not element_id:\n raise RuntimeError(\"No bokeh-autoload-element query parameter\")\n\n app_path = self.get_argument(\"bokeh-app-path\", default=\"/\")\n absolute_url = self.get_argument(\"bokeh-absolute-url\", default=None)\n\n if absolute_url:\n server_url = '{uri.scheme}://{uri.netloc}/'.format(uri=urlparse(absolute_url))\n else:\n server_url = None\n\n resources = self.resources(server_url)\n js = autoload_js_script(session.document, resources, session.token, element_id, app_path, absolute_url)\n\n headers = [\n (b\"Access-Control-Allow-Headers\", b\"*\"),\n (b\"Access-Control-Allow-Methods\", b\"PUT, GET, OPTIONS\"),\n (b\"Access-Control-Allow-Origin\", b\"*\"),\n (b\"Content-Type\", b\"application/javascript\")\n ]\n await self.send_response(200, js.encode(), headers=headers)\n\n\nDocConsumer.handle = doc_handle\nAutoloadJsConsumer.handle = autoload_handle\n", "path": "panel/io/django.py"}]} | 994 | 124 |
gh_patches_debug_875 | rasdani/github-patches | git_diff | dbt-labs__dbt-core-5507 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[CT-876] Could we also now remove our upper bound on `MarkupSafe`, which we put in place earlier this year due to incompatibility with Jinja2?
Remove our upper bound on `MarkupSafe`, which we put in place earlier this year due to incompatibility with Jinja2(#4745). Also bump minimum requirement to match [Jinja2's requirements](https://github.com/pallets/jinja/blob/1c4066a4fad5aaeb2ac55809d1d38477cd23a0f6/setup.py#L6).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `core/setup.py`
Content:
```
1 #!/usr/bin/env python
2 import os
3 import sys
4
5 if sys.version_info < (3, 7, 2):
6 print("Error: dbt does not support this version of Python.")
7 print("Please upgrade to Python 3.7.2 or higher.")
8 sys.exit(1)
9
10
11 from setuptools import setup
12
13 try:
14 from setuptools import find_namespace_packages
15 except ImportError:
16 # the user has a downlevel version of setuptools.
17 print("Error: dbt requires setuptools v40.1.0 or higher.")
18 print('Please upgrade setuptools with "pip install --upgrade setuptools" ' "and try again")
19 sys.exit(1)
20
21
22 this_directory = os.path.abspath(os.path.dirname(__file__))
23 with open(os.path.join(this_directory, "README.md")) as f:
24 long_description = f.read()
25
26
27 package_name = "dbt-core"
28 package_version = "1.3.0a1"
29 description = """With dbt, data analysts and engineers can build analytics \
30 the way engineers build applications."""
31
32
33 setup(
34 name=package_name,
35 version=package_version,
36 description=description,
37 long_description=long_description,
38 long_description_content_type="text/markdown",
39 author="dbt Labs",
40 author_email="[email protected]",
41 url="https://github.com/dbt-labs/dbt-core",
42 packages=find_namespace_packages(include=["dbt", "dbt.*"]),
43 include_package_data=True,
44 test_suite="test",
45 entry_points={
46 "console_scripts": [
47 "dbt = dbt.main:main",
48 ],
49 },
50 install_requires=[
51 "Jinja2==3.1.2",
52 "MarkupSafe>=0.23,<2.1",
53 "agate>=1.6,<1.6.4",
54 "click>=7.0,<9",
55 "colorama>=0.3.9,<0.4.6",
56 "hologram>=0.0.14,<=0.0.15",
57 "isodate>=0.6,<0.7",
58 "logbook>=1.5,<1.6",
59 "mashumaro[msgpack]==3.0.3",
60 "minimal-snowplow-tracker==0.0.2",
61 "networkx>=2.3,<2.8.1;python_version<'3.8'",
62 "networkx>=2.3,<3;python_version>='3.8'",
63 "packaging>=20.9,<22.0",
64 "sqlparse>=0.2.3,<0.5",
65 "dbt-extractor~=0.4.1",
66 "typing-extensions>=3.7.4",
67 "werkzeug>=1,<3",
68 # the following are all to match snowflake-connector-python
69 "requests<3.0.0",
70 "idna>=2.5,<4",
71 "cffi>=1.9,<2.0.0",
72 "pyyaml>=6.0",
73 ],
74 zip_safe=False,
75 classifiers=[
76 "Development Status :: 5 - Production/Stable",
77 "License :: OSI Approved :: Apache Software License",
78 "Operating System :: Microsoft :: Windows",
79 "Operating System :: MacOS :: MacOS X",
80 "Operating System :: POSIX :: Linux",
81 "Programming Language :: Python :: 3.7",
82 "Programming Language :: Python :: 3.8",
83 "Programming Language :: Python :: 3.9",
84 "Programming Language :: Python :: 3.10",
85 ],
86 python_requires=">=3.7.2",
87 )
88
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/core/setup.py b/core/setup.py
--- a/core/setup.py
+++ b/core/setup.py
@@ -49,7 +49,6 @@
},
install_requires=[
"Jinja2==3.1.2",
- "MarkupSafe>=0.23,<2.1",
"agate>=1.6,<1.6.4",
"click>=7.0,<9",
"colorama>=0.3.9,<0.4.6",
| {"golden_diff": "diff --git a/core/setup.py b/core/setup.py\n--- a/core/setup.py\n+++ b/core/setup.py\n@@ -49,7 +49,6 @@\n },\n install_requires=[\n \"Jinja2==3.1.2\",\n- \"MarkupSafe>=0.23,<2.1\",\n \"agate>=1.6,<1.6.4\",\n \"click>=7.0,<9\",\n \"colorama>=0.3.9,<0.4.6\",\n", "issue": "[CT-876] Could we also now remove our upper bound on `MarkupSafe`, which we put in place earlier this year due to incompatibility with Jinja2?\nRemove our upper bound on `MarkupSafe`, which we put in place earlier this year due to incompatibility with Jinja2(#4745). Also bump minimum requirement to match [Jinja2's requirements](https://github.com/pallets/jinja/blob/1c4066a4fad5aaeb2ac55809d1d38477cd23a0f6/setup.py#L6).\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\nimport os\nimport sys\n\nif sys.version_info < (3, 7, 2):\n print(\"Error: dbt does not support this version of Python.\")\n print(\"Please upgrade to Python 3.7.2 or higher.\")\n sys.exit(1)\n\n\nfrom setuptools import setup\n\ntry:\n from setuptools import find_namespace_packages\nexcept ImportError:\n # the user has a downlevel version of setuptools.\n print(\"Error: dbt requires setuptools v40.1.0 or higher.\")\n print('Please upgrade setuptools with \"pip install --upgrade setuptools\" ' \"and try again\")\n sys.exit(1)\n\n\nthis_directory = os.path.abspath(os.path.dirname(__file__))\nwith open(os.path.join(this_directory, \"README.md\")) as f:\n long_description = f.read()\n\n\npackage_name = \"dbt-core\"\npackage_version = \"1.3.0a1\"\ndescription = \"\"\"With dbt, data analysts and engineers can build analytics \\\nthe way engineers build applications.\"\"\"\n\n\nsetup(\n name=package_name,\n version=package_version,\n description=description,\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n author=\"dbt Labs\",\n author_email=\"[email protected]\",\n url=\"https://github.com/dbt-labs/dbt-core\",\n packages=find_namespace_packages(include=[\"dbt\", \"dbt.*\"]),\n include_package_data=True,\n test_suite=\"test\",\n entry_points={\n \"console_scripts\": [\n \"dbt = dbt.main:main\",\n ],\n },\n install_requires=[\n \"Jinja2==3.1.2\",\n \"MarkupSafe>=0.23,<2.1\",\n \"agate>=1.6,<1.6.4\",\n \"click>=7.0,<9\",\n \"colorama>=0.3.9,<0.4.6\",\n \"hologram>=0.0.14,<=0.0.15\",\n \"isodate>=0.6,<0.7\",\n \"logbook>=1.5,<1.6\",\n \"mashumaro[msgpack]==3.0.3\",\n \"minimal-snowplow-tracker==0.0.2\",\n \"networkx>=2.3,<2.8.1;python_version<'3.8'\",\n \"networkx>=2.3,<3;python_version>='3.8'\",\n \"packaging>=20.9,<22.0\",\n \"sqlparse>=0.2.3,<0.5\",\n \"dbt-extractor~=0.4.1\",\n \"typing-extensions>=3.7.4\",\n \"werkzeug>=1,<3\",\n # the following are all to match snowflake-connector-python\n \"requests<3.0.0\",\n \"idna>=2.5,<4\",\n \"cffi>=1.9,<2.0.0\",\n \"pyyaml>=6.0\",\n ],\n zip_safe=False,\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: Microsoft :: Windows\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: POSIX :: Linux\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n ],\n python_requires=\">=3.7.2\",\n)\n", "path": "core/setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\nimport os\nimport sys\n\nif sys.version_info < (3, 7, 2):\n print(\"Error: dbt does not support this version of Python.\")\n print(\"Please upgrade to Python 3.7.2 or higher.\")\n sys.exit(1)\n\n\nfrom setuptools import setup\n\ntry:\n from setuptools import find_namespace_packages\nexcept ImportError:\n # the user has a downlevel version of setuptools.\n print(\"Error: dbt requires setuptools v40.1.0 or higher.\")\n print('Please upgrade setuptools with \"pip install --upgrade setuptools\" ' \"and try again\")\n sys.exit(1)\n\n\nthis_directory = os.path.abspath(os.path.dirname(__file__))\nwith open(os.path.join(this_directory, \"README.md\")) as f:\n long_description = f.read()\n\n\npackage_name = \"dbt-core\"\npackage_version = \"1.3.0a1\"\ndescription = \"\"\"With dbt, data analysts and engineers can build analytics \\\nthe way engineers build applications.\"\"\"\n\n\nsetup(\n name=package_name,\n version=package_version,\n description=description,\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n author=\"dbt Labs\",\n author_email=\"[email protected]\",\n url=\"https://github.com/dbt-labs/dbt-core\",\n packages=find_namespace_packages(include=[\"dbt\", \"dbt.*\"]),\n include_package_data=True,\n test_suite=\"test\",\n entry_points={\n \"console_scripts\": [\n \"dbt = dbt.main:main\",\n ],\n },\n install_requires=[\n \"Jinja2==3.1.2\",\n \"agate>=1.6,<1.6.4\",\n \"click>=7.0,<9\",\n \"colorama>=0.3.9,<0.4.6\",\n \"hologram>=0.0.14,<=0.0.15\",\n \"isodate>=0.6,<0.7\",\n \"logbook>=1.5,<1.6\",\n \"mashumaro==2.9\",\n \"minimal-snowplow-tracker==0.0.2\",\n \"networkx>=2.3,<2.8.1;python_version<'3.8'\",\n \"networkx>=2.3,<3;python_version>='3.8'\",\n \"packaging>=20.9,<22.0\",\n \"sqlparse>=0.2.3,<0.5\",\n \"dbt-extractor~=0.4.1\",\n \"typing-extensions>=3.7.4\",\n \"werkzeug>=1,<3\",\n # the following are all to match snowflake-connector-python\n \"requests<3.0.0\",\n \"idna>=2.5,<4\",\n \"cffi>=1.9,<2.0.0\",\n ],\n zip_safe=False,\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: Microsoft :: Windows\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: POSIX :: Linux\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n ],\n python_requires=\">=3.7.2\",\n)\n", "path": "core/setup.py"}]} | 1,343 | 110 |
gh_patches_debug_29931 | rasdani/github-patches | git_diff | deepset-ai__haystack-5811 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Change `SentenceTransformersTextEmbedder` to non-batch mode
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `haystack/preview/components/embedders/sentence_transformers_text_embedder.py`
Content:
```
1 from typing import List, Optional, Union, Dict, Any
2
3 from haystack.preview import component, default_to_dict, default_from_dict
4 from haystack.preview.embedding_backends.sentence_transformers_backend import (
5 _SentenceTransformersEmbeddingBackendFactory,
6 )
7
8
9 @component
10 class SentenceTransformersTextEmbedder:
11 """
12 A component for embedding strings using Sentence Transformers models.
13 """
14
15 def __init__(
16 self,
17 model_name_or_path: str = "sentence-transformers/all-mpnet-base-v2",
18 device: Optional[str] = None,
19 use_auth_token: Union[bool, str, None] = None,
20 prefix: str = "",
21 suffix: str = "",
22 batch_size: int = 32,
23 progress_bar: bool = True,
24 normalize_embeddings: bool = False,
25 ):
26 """
27 Create a SentenceTransformersTextEmbedder component.
28
29 :param model_name_or_path: Local path or name of the model in Hugging Face's model hub, such as ``'sentence-transformers/all-mpnet-base-v2'``.
30 :param device: Device (like 'cuda' / 'cpu') that should be used for computation. If None, checks if a GPU can be used.
31 :param use_auth_token: The API token used to download private models from Hugging Face.
32 If this parameter is set to `True`, then the token generated when running
33 `transformers-cli login` (stored in ~/.huggingface) will be used.
34 :param prefix: A string to add to the beginning of each text.
35 :param suffix: A string to add to the end of each text.
36 :param batch_size: Number of strings to encode at once.
37 :param progress_bar: If true, displays progress bar during embedding.
38 :param normalize_embeddings: If set to true, returned vectors will have length 1.
39 """
40
41 self.model_name_or_path = model_name_or_path
42 # TODO: remove device parameter and use Haystack's device management once migrated
43 self.device = device or "cpu"
44 self.use_auth_token = use_auth_token
45 self.prefix = prefix
46 self.suffix = suffix
47 self.batch_size = batch_size
48 self.progress_bar = progress_bar
49 self.normalize_embeddings = normalize_embeddings
50
51 def to_dict(self) -> Dict[str, Any]:
52 """
53 Serialize this component to a dictionary.
54 """
55 return default_to_dict(
56 self,
57 model_name_or_path=self.model_name_or_path,
58 device=self.device,
59 use_auth_token=self.use_auth_token,
60 prefix=self.prefix,
61 suffix=self.suffix,
62 batch_size=self.batch_size,
63 progress_bar=self.progress_bar,
64 normalize_embeddings=self.normalize_embeddings,
65 )
66
67 @classmethod
68 def from_dict(cls, data: Dict[str, Any]) -> "SentenceTransformersTextEmbedder":
69 """
70 Deserialize this component from a dictionary.
71 """
72 return default_from_dict(cls, data)
73
74 def warm_up(self):
75 """
76 Load the embedding backend.
77 """
78 if not hasattr(self, "embedding_backend"):
79 self.embedding_backend = _SentenceTransformersEmbeddingBackendFactory.get_embedding_backend(
80 model_name_or_path=self.model_name_or_path, device=self.device, use_auth_token=self.use_auth_token
81 )
82
83 @component.output_types(embeddings=List[List[float]])
84 def run(self, texts: List[str]):
85 """Embed a list of strings."""
86 if not isinstance(texts, list) or not isinstance(texts[0], str):
87 raise TypeError(
88 "SentenceTransformersTextEmbedder expects a list of strings as input."
89 "In case you want to embed a list of Documents, please use the SentenceTransformersDocumentEmbedder."
90 )
91 if not hasattr(self, "embedding_backend"):
92 raise RuntimeError("The embedding model has not been loaded. Please call warm_up() before running.")
93
94 texts_to_embed = [self.prefix + text + self.suffix for text in texts]
95 embeddings = self.embedding_backend.embed(
96 texts_to_embed,
97 batch_size=self.batch_size,
98 show_progress_bar=self.progress_bar,
99 normalize_embeddings=self.normalize_embeddings,
100 )
101 return {"embeddings": embeddings}
102
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/haystack/preview/components/embedders/sentence_transformers_text_embedder.py b/haystack/preview/components/embedders/sentence_transformers_text_embedder.py
--- a/haystack/preview/components/embedders/sentence_transformers_text_embedder.py
+++ b/haystack/preview/components/embedders/sentence_transformers_text_embedder.py
@@ -80,22 +80,22 @@
model_name_or_path=self.model_name_or_path, device=self.device, use_auth_token=self.use_auth_token
)
- @component.output_types(embeddings=List[List[float]])
- def run(self, texts: List[str]):
- """Embed a list of strings."""
- if not isinstance(texts, list) or not isinstance(texts[0], str):
+ @component.output_types(embedding=List[float])
+ def run(self, text: str):
+ """Embed a string."""
+ if not isinstance(text, str):
raise TypeError(
- "SentenceTransformersTextEmbedder expects a list of strings as input."
+ "SentenceTransformersTextEmbedder expects a string as input."
"In case you want to embed a list of Documents, please use the SentenceTransformersDocumentEmbedder."
)
if not hasattr(self, "embedding_backend"):
raise RuntimeError("The embedding model has not been loaded. Please call warm_up() before running.")
- texts_to_embed = [self.prefix + text + self.suffix for text in texts]
- embeddings = self.embedding_backend.embed(
- texts_to_embed,
+ text_to_embed = self.prefix + text + self.suffix
+ embedding = self.embedding_backend.embed(
+ [text_to_embed],
batch_size=self.batch_size,
show_progress_bar=self.progress_bar,
normalize_embeddings=self.normalize_embeddings,
- )
- return {"embeddings": embeddings}
+ )[0]
+ return {"embedding": embedding}
| {"golden_diff": "diff --git a/haystack/preview/components/embedders/sentence_transformers_text_embedder.py b/haystack/preview/components/embedders/sentence_transformers_text_embedder.py\n--- a/haystack/preview/components/embedders/sentence_transformers_text_embedder.py\n+++ b/haystack/preview/components/embedders/sentence_transformers_text_embedder.py\n@@ -80,22 +80,22 @@\n model_name_or_path=self.model_name_or_path, device=self.device, use_auth_token=self.use_auth_token\n )\n \n- @component.output_types(embeddings=List[List[float]])\n- def run(self, texts: List[str]):\n- \"\"\"Embed a list of strings.\"\"\"\n- if not isinstance(texts, list) or not isinstance(texts[0], str):\n+ @component.output_types(embedding=List[float])\n+ def run(self, text: str):\n+ \"\"\"Embed a string.\"\"\"\n+ if not isinstance(text, str):\n raise TypeError(\n- \"SentenceTransformersTextEmbedder expects a list of strings as input.\"\n+ \"SentenceTransformersTextEmbedder expects a string as input.\"\n \"In case you want to embed a list of Documents, please use the SentenceTransformersDocumentEmbedder.\"\n )\n if not hasattr(self, \"embedding_backend\"):\n raise RuntimeError(\"The embedding model has not been loaded. Please call warm_up() before running.\")\n \n- texts_to_embed = [self.prefix + text + self.suffix for text in texts]\n- embeddings = self.embedding_backend.embed(\n- texts_to_embed,\n+ text_to_embed = self.prefix + text + self.suffix\n+ embedding = self.embedding_backend.embed(\n+ [text_to_embed],\n batch_size=self.batch_size,\n show_progress_bar=self.progress_bar,\n normalize_embeddings=self.normalize_embeddings,\n- )\n- return {\"embeddings\": embeddings}\n+ )[0]\n+ return {\"embedding\": embedding}\n", "issue": "Change `SentenceTransformersTextEmbedder` to non-batch mode\n\n", "before_files": [{"content": "from typing import List, Optional, Union, Dict, Any\n\nfrom haystack.preview import component, default_to_dict, default_from_dict\nfrom haystack.preview.embedding_backends.sentence_transformers_backend import (\n _SentenceTransformersEmbeddingBackendFactory,\n)\n\n\n@component\nclass SentenceTransformersTextEmbedder:\n \"\"\"\n A component for embedding strings using Sentence Transformers models.\n \"\"\"\n\n def __init__(\n self,\n model_name_or_path: str = \"sentence-transformers/all-mpnet-base-v2\",\n device: Optional[str] = None,\n use_auth_token: Union[bool, str, None] = None,\n prefix: str = \"\",\n suffix: str = \"\",\n batch_size: int = 32,\n progress_bar: bool = True,\n normalize_embeddings: bool = False,\n ):\n \"\"\"\n Create a SentenceTransformersTextEmbedder component.\n\n :param model_name_or_path: Local path or name of the model in Hugging Face's model hub, such as ``'sentence-transformers/all-mpnet-base-v2'``.\n :param device: Device (like 'cuda' / 'cpu') that should be used for computation. If None, checks if a GPU can be used.\n :param use_auth_token: The API token used to download private models from Hugging Face.\n If this parameter is set to `True`, then the token generated when running\n `transformers-cli login` (stored in ~/.huggingface) will be used.\n :param prefix: A string to add to the beginning of each text.\n :param suffix: A string to add to the end of each text.\n :param batch_size: Number of strings to encode at once.\n :param progress_bar: If true, displays progress bar during embedding.\n :param normalize_embeddings: If set to true, returned vectors will have length 1.\n \"\"\"\n\n self.model_name_or_path = model_name_or_path\n # TODO: remove device parameter and use Haystack's device management once migrated\n self.device = device or \"cpu\"\n self.use_auth_token = use_auth_token\n self.prefix = prefix\n self.suffix = suffix\n self.batch_size = batch_size\n self.progress_bar = progress_bar\n self.normalize_embeddings = normalize_embeddings\n\n def to_dict(self) -> Dict[str, Any]:\n \"\"\"\n Serialize this component to a dictionary.\n \"\"\"\n return default_to_dict(\n self,\n model_name_or_path=self.model_name_or_path,\n device=self.device,\n use_auth_token=self.use_auth_token,\n prefix=self.prefix,\n suffix=self.suffix,\n batch_size=self.batch_size,\n progress_bar=self.progress_bar,\n normalize_embeddings=self.normalize_embeddings,\n )\n\n @classmethod\n def from_dict(cls, data: Dict[str, Any]) -> \"SentenceTransformersTextEmbedder\":\n \"\"\"\n Deserialize this component from a dictionary.\n \"\"\"\n return default_from_dict(cls, data)\n\n def warm_up(self):\n \"\"\"\n Load the embedding backend.\n \"\"\"\n if not hasattr(self, \"embedding_backend\"):\n self.embedding_backend = _SentenceTransformersEmbeddingBackendFactory.get_embedding_backend(\n model_name_or_path=self.model_name_or_path, device=self.device, use_auth_token=self.use_auth_token\n )\n\n @component.output_types(embeddings=List[List[float]])\n def run(self, texts: List[str]):\n \"\"\"Embed a list of strings.\"\"\"\n if not isinstance(texts, list) or not isinstance(texts[0], str):\n raise TypeError(\n \"SentenceTransformersTextEmbedder expects a list of strings as input.\"\n \"In case you want to embed a list of Documents, please use the SentenceTransformersDocumentEmbedder.\"\n )\n if not hasattr(self, \"embedding_backend\"):\n raise RuntimeError(\"The embedding model has not been loaded. Please call warm_up() before running.\")\n\n texts_to_embed = [self.prefix + text + self.suffix for text in texts]\n embeddings = self.embedding_backend.embed(\n texts_to_embed,\n batch_size=self.batch_size,\n show_progress_bar=self.progress_bar,\n normalize_embeddings=self.normalize_embeddings,\n )\n return {\"embeddings\": embeddings}\n", "path": "haystack/preview/components/embedders/sentence_transformers_text_embedder.py"}], "after_files": [{"content": "from typing import List, Optional, Union, Dict, Any\n\nfrom haystack.preview import component, default_to_dict, default_from_dict\nfrom haystack.preview.embedding_backends.sentence_transformers_backend import (\n _SentenceTransformersEmbeddingBackendFactory,\n)\n\n\n@component\nclass SentenceTransformersTextEmbedder:\n \"\"\"\n A component for embedding strings using Sentence Transformers models.\n \"\"\"\n\n def __init__(\n self,\n model_name_or_path: str = \"sentence-transformers/all-mpnet-base-v2\",\n device: Optional[str] = None,\n use_auth_token: Union[bool, str, None] = None,\n prefix: str = \"\",\n suffix: str = \"\",\n batch_size: int = 32,\n progress_bar: bool = True,\n normalize_embeddings: bool = False,\n ):\n \"\"\"\n Create a SentenceTransformersTextEmbedder component.\n\n :param model_name_or_path: Local path or name of the model in Hugging Face's model hub, such as ``'sentence-transformers/all-mpnet-base-v2'``.\n :param device: Device (like 'cuda' / 'cpu') that should be used for computation. If None, checks if a GPU can be used.\n :param use_auth_token: The API token used to download private models from Hugging Face.\n If this parameter is set to `True`, then the token generated when running\n `transformers-cli login` (stored in ~/.huggingface) will be used.\n :param prefix: A string to add to the beginning of each text.\n :param suffix: A string to add to the end of each text.\n :param batch_size: Number of strings to encode at once.\n :param progress_bar: If true, displays progress bar during embedding.\n :param normalize_embeddings: If set to true, returned vectors will have length 1.\n \"\"\"\n\n self.model_name_or_path = model_name_or_path\n # TODO: remove device parameter and use Haystack's device management once migrated\n self.device = device or \"cpu\"\n self.use_auth_token = use_auth_token\n self.prefix = prefix\n self.suffix = suffix\n self.batch_size = batch_size\n self.progress_bar = progress_bar\n self.normalize_embeddings = normalize_embeddings\n\n def to_dict(self) -> Dict[str, Any]:\n \"\"\"\n Serialize this component to a dictionary.\n \"\"\"\n return default_to_dict(\n self,\n model_name_or_path=self.model_name_or_path,\n device=self.device,\n use_auth_token=self.use_auth_token,\n prefix=self.prefix,\n suffix=self.suffix,\n batch_size=self.batch_size,\n progress_bar=self.progress_bar,\n normalize_embeddings=self.normalize_embeddings,\n )\n\n @classmethod\n def from_dict(cls, data: Dict[str, Any]) -> \"SentenceTransformersTextEmbedder\":\n \"\"\"\n Deserialize this component from a dictionary.\n \"\"\"\n return default_from_dict(cls, data)\n\n def warm_up(self):\n \"\"\"\n Load the embedding backend.\n \"\"\"\n if not hasattr(self, \"embedding_backend\"):\n self.embedding_backend = _SentenceTransformersEmbeddingBackendFactory.get_embedding_backend(\n model_name_or_path=self.model_name_or_path, device=self.device, use_auth_token=self.use_auth_token\n )\n\n @component.output_types(embedding=List[float])\n def run(self, text: str):\n \"\"\"Embed a string.\"\"\"\n if not isinstance(text, str):\n raise TypeError(\n \"SentenceTransformersTextEmbedder expects a string as input.\"\n \"In case you want to embed a list of Documents, please use the SentenceTransformersDocumentEmbedder.\"\n )\n if not hasattr(self, \"embedding_backend\"):\n raise RuntimeError(\"The embedding model has not been loaded. Please call warm_up() before running.\")\n\n text_to_embed = self.prefix + text + self.suffix\n embedding = self.embedding_backend.embed(\n [text_to_embed],\n batch_size=self.batch_size,\n show_progress_bar=self.progress_bar,\n normalize_embeddings=self.normalize_embeddings,\n )[0]\n return {\"embedding\": embedding}\n", "path": "haystack/preview/components/embedders/sentence_transformers_text_embedder.py"}]} | 1,367 | 412 |
gh_patches_debug_6558 | rasdani/github-patches | git_diff | strawberry-graphql__strawberry-615 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
strawberry.utils.typing.get_optional_annotation fails when provided an `Optional[Union]`
`strawberry.utils.typing.get_optional_annotation` fails when provided an `Optional[Union]`
```pycon
>>> from typing import Optional, Union
>>> from strawberry.utils.typing import get_optional_annotation
>>> get_optional_annotation(Optional[Union[int, str]])
<class 'int'>
```
This should return `Union[int, str]` instead
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `strawberry/utils/typing.py`
Content:
```
1 import typing
2 from collections.abc import AsyncGenerator, Callable
3 from typing import Type, TypeVar
4
5
6 try:
7 from typing import ForwardRef # type: ignore
8 except ImportError: # pragma: no cover
9 # ForwardRef is private in python 3.6 and 3.7
10 from typing import _ForwardRef as ForwardRef # type: ignore
11
12
13 def is_list(annotation: Type) -> bool:
14 """Returns True if annotation is a List"""
15
16 annotation_origin = getattr(annotation, "__origin__", None)
17
18 return annotation_origin == list
19
20
21 def is_union(annotation: Type) -> bool:
22 """Returns True if annotation is a Union"""
23
24 annotation_origin = getattr(annotation, "__origin__", None)
25
26 return annotation_origin == typing.Union
27
28
29 def is_optional(annotation: Type) -> bool:
30 """Returns True if the annotation is Optional[SomeType]"""
31
32 # Optionals are represented as unions
33
34 if not is_union(annotation):
35 return False
36
37 types = annotation.__args__
38
39 # A Union to be optional needs to have at least one None type
40 return any([x == None.__class__ for x in types]) # noqa:E711
41
42
43 def get_optional_annotation(annotation: Type) -> Type:
44 types = annotation.__args__
45 non_none_types = [x for x in types if x != None.__class__] # noqa:E711
46
47 return non_none_types[0]
48
49
50 def get_list_annotation(annotation: Type) -> Type:
51 return annotation.__args__[0]
52
53
54 def is_async_generator(annotation: Type) -> bool:
55 return getattr(annotation, "__origin__", None) == AsyncGenerator
56
57
58 def get_async_generator_annotation(annotation: Type) -> Type:
59 return annotation.__args__[0]
60
61
62 def is_generic(annotation: Type) -> bool:
63 """Returns True if the annotation is or extends a generic."""
64 return (
65 isinstance(annotation, type)
66 and issubclass(annotation, typing.Generic) # type:ignore
67 or isinstance(annotation, typing._GenericAlias) # type:ignore
68 and annotation.__origin__
69 not in (
70 list,
71 typing.Union,
72 tuple,
73 typing.ClassVar,
74 AsyncGenerator,
75 )
76 )
77
78
79 def is_type_var(annotation: Type) -> bool:
80 """Returns True if the annotation is a TypeVar."""
81
82 return isinstance(annotation, TypeVar) # type:ignore
83
84
85 def has_type_var(annotation: Type) -> bool:
86 """
87 Returns True if the annotation or any of
88 its argument have a TypeVar as argument.
89 """
90 return any(
91 is_type_var(arg) or has_type_var(arg)
92 for arg in getattr(annotation, "__args__", [])
93 )
94
95
96 def get_parameters(annotation: Type):
97 if (
98 isinstance(annotation, typing._GenericAlias) # type:ignore
99 or isinstance(annotation, type)
100 and issubclass(annotation, typing.Generic) # type:ignore
101 and annotation is not typing.Generic
102 ):
103 return annotation.__parameters__
104 else:
105 return () # pragma: no cover
106
107
108 def get_origin(annotation: Type):
109 if isinstance(annotation, typing._GenericAlias): # type:ignore
110 return (
111 annotation.__origin__
112 if annotation.__origin__ is not typing.ClassVar
113 else None
114 )
115
116 if annotation is typing.Generic: # pragma: no cover
117 return typing.Generic
118
119 return None # pragma: no cover
120
121
122 def get_args(annotation: Type):
123 if isinstance(annotation, typing._GenericAlias): # type:ignore
124 res = annotation.__args__
125
126 if (
127 get_origin(annotation) is Callable and res[0] is not Ellipsis
128 ): # pragma: no cover
129 res = (list(res[:-1]), res[-1])
130
131 return res
132
133 return ()
134
135
136 def is_forward_ref(annotation: Type) -> bool:
137 return isinstance(annotation, ForwardRef)
138
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/strawberry/utils/typing.py b/strawberry/utils/typing.py
--- a/strawberry/utils/typing.py
+++ b/strawberry/utils/typing.py
@@ -42,7 +42,14 @@
def get_optional_annotation(annotation: Type) -> Type:
types = annotation.__args__
- non_none_types = [x for x in types if x != None.__class__] # noqa:E711
+
+ non_none_types = tuple(x for x in types if x != None.__class__) # noqa:E711
+
+ # if we have multiple non none types we want to return a copy of this
+ # type (normally a Union type).
+
+ if len(non_none_types) > 1:
+ return annotation.copy_with(non_none_types)
return non_none_types[0]
| {"golden_diff": "diff --git a/strawberry/utils/typing.py b/strawberry/utils/typing.py\n--- a/strawberry/utils/typing.py\n+++ b/strawberry/utils/typing.py\n@@ -42,7 +42,14 @@\n \n def get_optional_annotation(annotation: Type) -> Type:\n types = annotation.__args__\n- non_none_types = [x for x in types if x != None.__class__] # noqa:E711\n+\n+ non_none_types = tuple(x for x in types if x != None.__class__) # noqa:E711\n+\n+ # if we have multiple non none types we want to return a copy of this\n+ # type (normally a Union type).\n+\n+ if len(non_none_types) > 1:\n+ return annotation.copy_with(non_none_types)\n \n return non_none_types[0]\n", "issue": "strawberry.utils.typing.get_optional_annotation fails when provided an `Optional[Union]`\n`strawberry.utils.typing.get_optional_annotation` fails when provided an `Optional[Union]`\r\n\r\n```pycon\r\n>>> from typing import Optional, Union\r\n>>> from strawberry.utils.typing import get_optional_annotation\r\n\r\n>>> get_optional_annotation(Optional[Union[int, str]])\r\n<class 'int'>\r\n```\r\nThis should return `Union[int, str]` instead \n", "before_files": [{"content": "import typing\nfrom collections.abc import AsyncGenerator, Callable\nfrom typing import Type, TypeVar\n\n\ntry:\n from typing import ForwardRef # type: ignore\nexcept ImportError: # pragma: no cover\n # ForwardRef is private in python 3.6 and 3.7\n from typing import _ForwardRef as ForwardRef # type: ignore\n\n\ndef is_list(annotation: Type) -> bool:\n \"\"\"Returns True if annotation is a List\"\"\"\n\n annotation_origin = getattr(annotation, \"__origin__\", None)\n\n return annotation_origin == list\n\n\ndef is_union(annotation: Type) -> bool:\n \"\"\"Returns True if annotation is a Union\"\"\"\n\n annotation_origin = getattr(annotation, \"__origin__\", None)\n\n return annotation_origin == typing.Union\n\n\ndef is_optional(annotation: Type) -> bool:\n \"\"\"Returns True if the annotation is Optional[SomeType]\"\"\"\n\n # Optionals are represented as unions\n\n if not is_union(annotation):\n return False\n\n types = annotation.__args__\n\n # A Union to be optional needs to have at least one None type\n return any([x == None.__class__ for x in types]) # noqa:E711\n\n\ndef get_optional_annotation(annotation: Type) -> Type:\n types = annotation.__args__\n non_none_types = [x for x in types if x != None.__class__] # noqa:E711\n\n return non_none_types[0]\n\n\ndef get_list_annotation(annotation: Type) -> Type:\n return annotation.__args__[0]\n\n\ndef is_async_generator(annotation: Type) -> bool:\n return getattr(annotation, \"__origin__\", None) == AsyncGenerator\n\n\ndef get_async_generator_annotation(annotation: Type) -> Type:\n return annotation.__args__[0]\n\n\ndef is_generic(annotation: Type) -> bool:\n \"\"\"Returns True if the annotation is or extends a generic.\"\"\"\n return (\n isinstance(annotation, type)\n and issubclass(annotation, typing.Generic) # type:ignore\n or isinstance(annotation, typing._GenericAlias) # type:ignore\n and annotation.__origin__\n not in (\n list,\n typing.Union,\n tuple,\n typing.ClassVar,\n AsyncGenerator,\n )\n )\n\n\ndef is_type_var(annotation: Type) -> bool:\n \"\"\"Returns True if the annotation is a TypeVar.\"\"\"\n\n return isinstance(annotation, TypeVar) # type:ignore\n\n\ndef has_type_var(annotation: Type) -> bool:\n \"\"\"\n Returns True if the annotation or any of\n its argument have a TypeVar as argument.\n \"\"\"\n return any(\n is_type_var(arg) or has_type_var(arg)\n for arg in getattr(annotation, \"__args__\", [])\n )\n\n\ndef get_parameters(annotation: Type):\n if (\n isinstance(annotation, typing._GenericAlias) # type:ignore\n or isinstance(annotation, type)\n and issubclass(annotation, typing.Generic) # type:ignore\n and annotation is not typing.Generic\n ):\n return annotation.__parameters__\n else:\n return () # pragma: no cover\n\n\ndef get_origin(annotation: Type):\n if isinstance(annotation, typing._GenericAlias): # type:ignore\n return (\n annotation.__origin__\n if annotation.__origin__ is not typing.ClassVar\n else None\n )\n\n if annotation is typing.Generic: # pragma: no cover\n return typing.Generic\n\n return None # pragma: no cover\n\n\ndef get_args(annotation: Type):\n if isinstance(annotation, typing._GenericAlias): # type:ignore\n res = annotation.__args__\n\n if (\n get_origin(annotation) is Callable and res[0] is not Ellipsis\n ): # pragma: no cover\n res = (list(res[:-1]), res[-1])\n\n return res\n\n return ()\n\n\ndef is_forward_ref(annotation: Type) -> bool:\n return isinstance(annotation, ForwardRef)\n", "path": "strawberry/utils/typing.py"}], "after_files": [{"content": "import typing\nfrom collections.abc import AsyncGenerator, Callable\nfrom typing import Type, TypeVar\n\n\ntry:\n from typing import ForwardRef # type: ignore\nexcept ImportError: # pragma: no cover\n # ForwardRef is private in python 3.6 and 3.7\n from typing import _ForwardRef as ForwardRef # type: ignore\n\n\ndef is_list(annotation: Type) -> bool:\n \"\"\"Returns True if annotation is a List\"\"\"\n\n annotation_origin = getattr(annotation, \"__origin__\", None)\n\n return annotation_origin == list\n\n\ndef is_union(annotation: Type) -> bool:\n \"\"\"Returns True if annotation is a Union\"\"\"\n\n annotation_origin = getattr(annotation, \"__origin__\", None)\n\n return annotation_origin == typing.Union\n\n\ndef is_optional(annotation: Type) -> bool:\n \"\"\"Returns True if the annotation is Optional[SomeType]\"\"\"\n\n # Optionals are represented as unions\n\n if not is_union(annotation):\n return False\n\n types = annotation.__args__\n\n # A Union to be optional needs to have at least one None type\n return any([x == None.__class__ for x in types]) # noqa:E711\n\n\ndef get_optional_annotation(annotation: Type) -> Type:\n types = annotation.__args__\n\n non_none_types = tuple(x for x in types if x != None.__class__) # noqa:E711\n\n # if we have multiple non none types we want to return a copy of this\n # type (normally a Union type).\n\n if len(non_none_types) > 1:\n return annotation.copy_with(non_none_types)\n\n return non_none_types[0]\n\n\ndef get_list_annotation(annotation: Type) -> Type:\n return annotation.__args__[0]\n\n\ndef is_async_generator(annotation: Type) -> bool:\n return getattr(annotation, \"__origin__\", None) == AsyncGenerator\n\n\ndef get_async_generator_annotation(annotation: Type) -> Type:\n return annotation.__args__[0]\n\n\ndef is_generic(annotation: Type) -> bool:\n \"\"\"Returns True if the annotation is or extends a generic.\"\"\"\n return (\n isinstance(annotation, type)\n and issubclass(annotation, typing.Generic) # type:ignore\n or isinstance(annotation, typing._GenericAlias) # type:ignore\n and annotation.__origin__\n not in (\n list,\n typing.Union,\n tuple,\n typing.ClassVar,\n AsyncGenerator,\n )\n )\n\n\ndef is_type_var(annotation: Type) -> bool:\n \"\"\"Returns True if the annotation is a TypeVar.\"\"\"\n\n return isinstance(annotation, TypeVar) # type:ignore\n\n\ndef has_type_var(annotation: Type) -> bool:\n \"\"\"\n Returns True if the annotation or any of\n its argument have a TypeVar as argument.\n \"\"\"\n return any(\n is_type_var(arg) or has_type_var(arg)\n for arg in getattr(annotation, \"__args__\", [])\n )\n\n\ndef get_parameters(annotation: Type):\n if (\n isinstance(annotation, typing._GenericAlias) # type:ignore\n or isinstance(annotation, type)\n and issubclass(annotation, typing.Generic) # type:ignore\n and annotation is not typing.Generic\n ):\n return annotation.__parameters__\n else:\n return () # pragma: no cover\n\n\ndef get_origin(annotation: Type):\n if isinstance(annotation, typing._GenericAlias): # type:ignore\n return (\n annotation.__origin__\n if annotation.__origin__ is not typing.ClassVar\n else None\n )\n\n if annotation is typing.Generic: # pragma: no cover\n return typing.Generic\n\n return None # pragma: no cover\n\n\ndef get_args(annotation: Type):\n if isinstance(annotation, typing._GenericAlias): # type:ignore\n res = annotation.__args__\n\n if (\n get_origin(annotation) is Callable and res[0] is not Ellipsis\n ): # pragma: no cover\n res = (list(res[:-1]), res[-1])\n\n return res\n\n return ()\n\n\ndef is_forward_ref(annotation: Type) -> bool:\n return isinstance(annotation, ForwardRef)\n", "path": "strawberry/utils/typing.py"}]} | 1,526 | 194 |
gh_patches_debug_32082 | rasdani/github-patches | git_diff | aws__aws-cli-900 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Provide better error message for invalid endpoint urls
The error message could provide more context about what exactly went wrong with the request. For example:
```
$ aws s3api list-buckets --endpoint-url example.com
Invalid URL u'/': No schema supplied
```
A better error message would be something like:
```
$ aws s3api list-buckets --endpoint-url example.com
Bad value for --endpoint-url "example.com": scheme is missing. Must be of the form http://<hostname>/ or https://<hostname>/
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `awscli/compat.py`
Content:
```
1 # Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
3 # Licensed under the Apache License, Version 2.0 (the "License"). You
4 # may not use this file except in compliance with the License. A copy of
5 # the License is located at
6
7 # http://aws.amazon.com/apache2.0/
8
9 # or in the "license" file accompanying this file. This file is
10 # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 # ANY KIND, either express or implied. See the License for the specific
12 # language governing permissions and limitations under the License.
13 import sys
14 import six
15
16 if six.PY3:
17 import locale
18
19 def get_stdout_text_writer():
20 return sys.stdout
21
22 def compat_open(filename, mode='r', encoding=None):
23 """Back-port open() that accepts an encoding argument.
24
25 In python3 this uses the built in open() and in python2 this
26 uses the io.open() function.
27
28 If the file is not being opened in binary mode, then we'll
29 use locale.getpreferredencoding() to find the preferred
30 encoding.
31
32 """
33 if 'b' not in mode:
34 encoding = locale.getpreferredencoding()
35 return open(filename, mode, encoding=encoding)
36
37 else:
38 import codecs
39 import locale
40 import io
41
42 def get_stdout_text_writer():
43 # In python3, all the sys.stdout/sys.stderr streams are in text
44 # mode. This means they expect unicode, and will encode the
45 # unicode automatically before actually writing to stdout/stderr.
46 # In python2, that's not the case. In order to provide a consistent
47 # interface, we can create a wrapper around sys.stdout that will take
48 # unicode, and automatically encode it to the preferred encoding.
49 # That way consumers can just call get_stdout_text_writer() and write
50 # unicode to the returned stream. Note that get_stdout_text_writer
51 # just returns sys.stdout in the PY3 section above because python3
52 # handles this.
53 return codecs.getwriter(locale.getpreferredencoding())(sys.stdout)
54
55 def compat_open(filename, mode='r', encoding=None):
56 # See docstring for compat_open in the PY3 section above.
57 if 'b' not in mode:
58 encoding = locale.getpreferredencoding()
59 return io.open(filename, mode, encoding=encoding)
60
```
Path: `awscli/customizations/globalargs.py`
Content:
```
1 # Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License"). You
4 # may not use this file except in compliance with the License. A copy of
5 # the License is located at
6 #
7 # http://aws.amazon.com/apache2.0/
8 #
9 # or in the "license" file accompanying this file. This file is
10 # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 # ANY KIND, either express or implied. See the License for the specific
12 # language governing permissions and limitations under the License.
13 import sys
14 import os
15
16 import jmespath
17
18
19 def register_parse_global_args(cli):
20 cli.register('top-level-args-parsed', resolve_types)
21 cli.register('top-level-args-parsed', no_sign_request)
22
23
24 def resolve_types(parsed_args, **kwargs):
25 # This emulates the "type" arg from argparse, but does so in a way
26 # that plugins can also hook into this process.
27 _resolve_arg(parsed_args, 'query')
28 _resolve_arg(parsed_args, 'verify_ssl')
29
30
31 def _resolve_arg(parsed_args, name):
32 value = getattr(parsed_args, name, None)
33 if value is not None:
34 new_value = getattr(sys.modules[__name__], '_resolve_%s' % name)(value)
35 setattr(parsed_args, name, new_value)
36
37
38 def _resolve_query(value):
39 try:
40 return jmespath.compile(value)
41 except Exception as e:
42 raise ValueError("Bad value for --query %s: %s" % (value, str(e)))
43
44
45 def _resolve_verify_ssl(value):
46 verify = None
47 if not value:
48 verify = False
49 else:
50 verify = os.environ.get('AWS_CA_BUNDLE')
51 return verify
52
53
54 def no_sign_request(parsed_args, session, **kwargs):
55 if not parsed_args.sign_request:
56 # In order to make signing disabled for all requests
57 # we need to set the signature_version to None for
58 # any service created. This ensures that get_endpoint()
59 # will not look for auth.
60 session.register('service-created', disable_signing)
61
62
63 def disable_signing(service, **kwargs):
64 service.signature_version = None
65
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/awscli/compat.py b/awscli/compat.py
--- a/awscli/compat.py
+++ b/awscli/compat.py
@@ -15,6 +15,7 @@
if six.PY3:
import locale
+ import urllib.parse as urlparse
def get_stdout_text_writer():
return sys.stdout
@@ -38,6 +39,7 @@
import codecs
import locale
import io
+ import urlparse
def get_stdout_text_writer():
# In python3, all the sys.stdout/sys.stderr streams are in text
diff --git a/awscli/customizations/globalargs.py b/awscli/customizations/globalargs.py
--- a/awscli/customizations/globalargs.py
+++ b/awscli/customizations/globalargs.py
@@ -15,6 +15,8 @@
import jmespath
+from awscli.compat import urlparse
+
def register_parse_global_args(cli):
cli.register('top-level-args-parsed', resolve_types)
@@ -26,6 +28,7 @@
# that plugins can also hook into this process.
_resolve_arg(parsed_args, 'query')
_resolve_arg(parsed_args, 'verify_ssl')
+ _resolve_arg(parsed_args, 'endpoint_url')
def _resolve_arg(parsed_args, name):
@@ -51,6 +54,17 @@
return verify
+def _resolve_endpoint_url(value):
+ parsed = urlparse.urlparse(value)
+ # Our http library requires you specify an endpoint url
+ # that contains a scheme, so we'll verify that up front.
+ if not parsed.scheme:
+ raise ValueError('Bad value for --endpoint-url "%s": scheme is '
+ 'missing. Must be of the form '
+ 'http://<hostname>/ or https://<hostname>/' % value)
+ return value
+
+
def no_sign_request(parsed_args, session, **kwargs):
if not parsed_args.sign_request:
# In order to make signing disabled for all requests
| {"golden_diff": "diff --git a/awscli/compat.py b/awscli/compat.py\n--- a/awscli/compat.py\n+++ b/awscli/compat.py\n@@ -15,6 +15,7 @@\n \n if six.PY3:\n import locale\n+ import urllib.parse as urlparse\n \n def get_stdout_text_writer():\n return sys.stdout\n@@ -38,6 +39,7 @@\n import codecs\n import locale\n import io\n+ import urlparse\n \n def get_stdout_text_writer():\n # In python3, all the sys.stdout/sys.stderr streams are in text\ndiff --git a/awscli/customizations/globalargs.py b/awscli/customizations/globalargs.py\n--- a/awscli/customizations/globalargs.py\n+++ b/awscli/customizations/globalargs.py\n@@ -15,6 +15,8 @@\n \n import jmespath\n \n+from awscli.compat import urlparse\n+\n \n def register_parse_global_args(cli):\n cli.register('top-level-args-parsed', resolve_types)\n@@ -26,6 +28,7 @@\n # that plugins can also hook into this process.\n _resolve_arg(parsed_args, 'query')\n _resolve_arg(parsed_args, 'verify_ssl')\n+ _resolve_arg(parsed_args, 'endpoint_url')\n \n \n def _resolve_arg(parsed_args, name):\n@@ -51,6 +54,17 @@\n return verify\n \n \n+def _resolve_endpoint_url(value):\n+ parsed = urlparse.urlparse(value)\n+ # Our http library requires you specify an endpoint url\n+ # that contains a scheme, so we'll verify that up front.\n+ if not parsed.scheme:\n+ raise ValueError('Bad value for --endpoint-url \"%s\": scheme is '\n+ 'missing. Must be of the form '\n+ 'http://<hostname>/ or https://<hostname>/' % value)\n+ return value\n+\n+\n def no_sign_request(parsed_args, session, **kwargs):\n if not parsed_args.sign_request:\n # In order to make signing disabled for all requests\n", "issue": "Provide better error message for invalid endpoint urls\nThe error message could provide more context about what exactly went wrong with the request. For example:\n\n```\n$ aws s3api list-buckets --endpoint-url example.com\n\nInvalid URL u'/': No schema supplied\n```\n\nA better error message would be something like:\n\n```\n$ aws s3api list-buckets --endpoint-url example.com\n\nBad value for --endpoint-url \"example.com\": scheme is missing. Must be of the form http://<hostname>/ or https://<hostname>/\n```\n\n", "before_files": [{"content": "# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n\n# Licensed under the Apache License, Version 2.0 (the \"License\"). You\n# may not use this file except in compliance with the License. A copy of\n# the License is located at\n\n# http://aws.amazon.com/apache2.0/\n\n# or in the \"license\" file accompanying this file. This file is\n# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF\n# ANY KIND, either express or implied. See the License for the specific\n# language governing permissions and limitations under the License.\nimport sys\nimport six\n\nif six.PY3:\n import locale\n\n def get_stdout_text_writer():\n return sys.stdout\n\n def compat_open(filename, mode='r', encoding=None):\n \"\"\"Back-port open() that accepts an encoding argument.\n\n In python3 this uses the built in open() and in python2 this\n uses the io.open() function.\n\n If the file is not being opened in binary mode, then we'll\n use locale.getpreferredencoding() to find the preferred\n encoding.\n\n \"\"\"\n if 'b' not in mode:\n encoding = locale.getpreferredencoding()\n return open(filename, mode, encoding=encoding)\n\nelse:\n import codecs\n import locale\n import io\n\n def get_stdout_text_writer():\n # In python3, all the sys.stdout/sys.stderr streams are in text\n # mode. This means they expect unicode, and will encode the\n # unicode automatically before actually writing to stdout/stderr.\n # In python2, that's not the case. In order to provide a consistent\n # interface, we can create a wrapper around sys.stdout that will take\n # unicode, and automatically encode it to the preferred encoding.\n # That way consumers can just call get_stdout_text_writer() and write\n # unicode to the returned stream. Note that get_stdout_text_writer\n # just returns sys.stdout in the PY3 section above because python3\n # handles this.\n return codecs.getwriter(locale.getpreferredencoding())(sys.stdout)\n\n def compat_open(filename, mode='r', encoding=None):\n # See docstring for compat_open in the PY3 section above.\n if 'b' not in mode:\n encoding = locale.getpreferredencoding()\n return io.open(filename, mode, encoding=encoding)\n", "path": "awscli/compat.py"}, {"content": "# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"). You\n# may not use this file except in compliance with the License. A copy of\n# the License is located at\n#\n# http://aws.amazon.com/apache2.0/\n#\n# or in the \"license\" file accompanying this file. This file is\n# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF\n# ANY KIND, either express or implied. See the License for the specific\n# language governing permissions and limitations under the License.\nimport sys\nimport os\n\nimport jmespath\n\n\ndef register_parse_global_args(cli):\n cli.register('top-level-args-parsed', resolve_types)\n cli.register('top-level-args-parsed', no_sign_request)\n\n\ndef resolve_types(parsed_args, **kwargs):\n # This emulates the \"type\" arg from argparse, but does so in a way\n # that plugins can also hook into this process.\n _resolve_arg(parsed_args, 'query')\n _resolve_arg(parsed_args, 'verify_ssl')\n\n\ndef _resolve_arg(parsed_args, name):\n value = getattr(parsed_args, name, None)\n if value is not None:\n new_value = getattr(sys.modules[__name__], '_resolve_%s' % name)(value)\n setattr(parsed_args, name, new_value)\n\n\ndef _resolve_query(value):\n try:\n return jmespath.compile(value)\n except Exception as e:\n raise ValueError(\"Bad value for --query %s: %s\" % (value, str(e)))\n\n\ndef _resolve_verify_ssl(value):\n verify = None\n if not value:\n verify = False\n else:\n verify = os.environ.get('AWS_CA_BUNDLE')\n return verify\n\n\ndef no_sign_request(parsed_args, session, **kwargs):\n if not parsed_args.sign_request:\n # In order to make signing disabled for all requests\n # we need to set the signature_version to None for\n # any service created. This ensures that get_endpoint()\n # will not look for auth.\n session.register('service-created', disable_signing)\n\n\ndef disable_signing(service, **kwargs):\n service.signature_version = None\n", "path": "awscli/customizations/globalargs.py"}], "after_files": [{"content": "# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n\n# Licensed under the Apache License, Version 2.0 (the \"License\"). You\n# may not use this file except in compliance with the License. A copy of\n# the License is located at\n\n# http://aws.amazon.com/apache2.0/\n\n# or in the \"license\" file accompanying this file. This file is\n# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF\n# ANY KIND, either express or implied. See the License for the specific\n# language governing permissions and limitations under the License.\nimport sys\nimport six\n\nif six.PY3:\n import locale\n import urllib.parse as urlparse\n\n def get_stdout_text_writer():\n return sys.stdout\n\n def compat_open(filename, mode='r', encoding=None):\n \"\"\"Back-port open() that accepts an encoding argument.\n\n In python3 this uses the built in open() and in python2 this\n uses the io.open() function.\n\n If the file is not being opened in binary mode, then we'll\n use locale.getpreferredencoding() to find the preferred\n encoding.\n\n \"\"\"\n if 'b' not in mode:\n encoding = locale.getpreferredencoding()\n return open(filename, mode, encoding=encoding)\n\nelse:\n import codecs\n import locale\n import io\n import urlparse\n\n def get_stdout_text_writer():\n # In python3, all the sys.stdout/sys.stderr streams are in text\n # mode. This means they expect unicode, and will encode the\n # unicode automatically before actually writing to stdout/stderr.\n # In python2, that's not the case. In order to provide a consistent\n # interface, we can create a wrapper around sys.stdout that will take\n # unicode, and automatically encode it to the preferred encoding.\n # That way consumers can just call get_stdout_text_writer() and write\n # unicode to the returned stream. Note that get_stdout_text_writer\n # just returns sys.stdout in the PY3 section above because python3\n # handles this.\n return codecs.getwriter(locale.getpreferredencoding())(sys.stdout)\n\n def compat_open(filename, mode='r', encoding=None):\n # See docstring for compat_open in the PY3 section above.\n if 'b' not in mode:\n encoding = locale.getpreferredencoding()\n return io.open(filename, mode, encoding=encoding)\n", "path": "awscli/compat.py"}, {"content": "# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"). You\n# may not use this file except in compliance with the License. A copy of\n# the License is located at\n#\n# http://aws.amazon.com/apache2.0/\n#\n# or in the \"license\" file accompanying this file. This file is\n# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF\n# ANY KIND, either express or implied. See the License for the specific\n# language governing permissions and limitations under the License.\nimport sys\nimport os\n\nimport jmespath\n\nfrom awscli.compat import urlparse\n\n\ndef register_parse_global_args(cli):\n cli.register('top-level-args-parsed', resolve_types)\n cli.register('top-level-args-parsed', no_sign_request)\n\n\ndef resolve_types(parsed_args, **kwargs):\n # This emulates the \"type\" arg from argparse, but does so in a way\n # that plugins can also hook into this process.\n _resolve_arg(parsed_args, 'query')\n _resolve_arg(parsed_args, 'verify_ssl')\n _resolve_arg(parsed_args, 'endpoint_url')\n\n\ndef _resolve_arg(parsed_args, name):\n value = getattr(parsed_args, name, None)\n if value is not None:\n new_value = getattr(sys.modules[__name__], '_resolve_%s' % name)(value)\n setattr(parsed_args, name, new_value)\n\n\ndef _resolve_query(value):\n try:\n return jmespath.compile(value)\n except Exception as e:\n raise ValueError(\"Bad value for --query %s: %s\" % (value, str(e)))\n\n\ndef _resolve_verify_ssl(value):\n verify = None\n if not value:\n verify = False\n else:\n verify = os.environ.get('AWS_CA_BUNDLE')\n return verify\n\n\ndef _resolve_endpoint_url(value):\n parsed = urlparse.urlparse(value)\n # Our http library requires you specify an endpoint url\n # that contains a scheme, so we'll verify that up front.\n if not parsed.scheme:\n raise ValueError('Bad value for --endpoint-url \"%s\": scheme is '\n 'missing. Must be of the form '\n 'http://<hostname>/ or https://<hostname>/' % value)\n return value\n\n\ndef no_sign_request(parsed_args, session, **kwargs):\n if not parsed_args.sign_request:\n # In order to make signing disabled for all requests\n # we need to set the signature_version to None for\n # any service created. This ensures that get_endpoint()\n # will not look for auth.\n session.register('service-created', disable_signing)\n\n\ndef disable_signing(service, **kwargs):\n service.signature_version = None\n", "path": "awscli/customizations/globalargs.py"}]} | 1,648 | 440 |
gh_patches_debug_7632 | rasdani/github-patches | git_diff | aws__aws-cli-4308 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
aws emr create-cluster help command returns error
how to reproduce
1. upgrade to awscli 1.16.190 or 1.16.194 or 1.16.196
at the moment it's enough to install via pip either on macOS(1.16.194) or on linux(1.16.196), or using Homebrew(1.16.190) on macOS
```
# on Ubuntu 16.04 linux
$ pip install --upgrade awscli
<... output skipped - but it was successful, no errors ...>
$ aws --version
aws-cli/1.16.196 Python/2.7.12 Linux/4.4.0-97-generic botocore/1.12.186
$ aws emr create-cluster help
[Errno 2] No such file or directory: '/usr/local/lib/python2.7/dist-packages/awscli/examples/emr/create-cluster-synopsis.txt'
#or on macOS just for example using the one installed via Homebrew
$ brew install awscli
<... output skipped - but it was successful, no errors ...>
$ aws --version
aws-cli/1.16.190 Python/3.7.4 Darwin/18.6.0 botocore/1.12.180
$ aws emr create-cluster help
[Errno 2] No such file or directory: '/usr/local/Cellar/awscli/1.16.190/libexec/lib/python3.7/site-packages/awscli/examples/emr/create-cluster-synopsis.txt'
#or on macOS using aws installed via pip3
$ aws emr create-cluster help
[Errno 2] No such file or directory: '/usr/local/lib/python3.7/site-packages/awscli/examples/emr/create-cluster-synopsis.txt'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2 import codecs
3 import os.path
4 import re
5 import sys
6
7 from setuptools import setup, find_packages
8
9
10 here = os.path.abspath(os.path.dirname(__file__))
11
12
13 def read(*parts):
14 return codecs.open(os.path.join(here, *parts), 'r').read()
15
16
17 def find_version(*file_paths):
18 version_file = read(*file_paths)
19 version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
20 version_file, re.M)
21 if version_match:
22 return version_match.group(1)
23 raise RuntimeError("Unable to find version string.")
24
25
26 requires = ['botocore==1.12.187',
27 'colorama>=0.2.5,<=0.3.9',
28 'docutils>=0.10',
29 'rsa>=3.1.2,<=3.5.0',
30 's3transfer>=0.2.0,<0.3.0']
31
32
33 if sys.version_info[:2] == (2, 6):
34 # For python2.6 we have to require argparse since it
35 # was not in stdlib until 2.7.
36 requires.append('argparse>=1.1')
37
38 # For Python 2.6, we have to require a different verion of PyYAML since the latest
39 # versions dropped support for Python 2.6.
40 requires.append('PyYAML>=3.10,<=3.13')
41 else:
42 requires.append('PyYAML>=3.10,<=5.1')
43
44
45 setup_options = dict(
46 name='awscli',
47 version=find_version("awscli", "__init__.py"),
48 description='Universal Command Line Environment for AWS.',
49 long_description=read('README.rst'),
50 author='Amazon Web Services',
51 url='http://aws.amazon.com/cli/',
52 scripts=['bin/aws', 'bin/aws.cmd',
53 'bin/aws_completer', 'bin/aws_zsh_completer.sh',
54 'bin/aws_bash_completer'],
55 packages=find_packages(exclude=['tests*']),
56 package_data={'awscli': ['data/*.json', 'examples/*/*.rst',
57 'examples/*/*/*.rst', 'topics/*.rst',
58 'topics/*.json']},
59 install_requires=requires,
60 extras_require={
61 ':python_version=="2.6"': [
62 'argparse>=1.1',
63 ]
64 },
65 license="Apache License 2.0",
66 classifiers=[
67 'Development Status :: 5 - Production/Stable',
68 'Intended Audience :: Developers',
69 'Intended Audience :: System Administrators',
70 'Natural Language :: English',
71 'License :: OSI Approved :: Apache Software License',
72 'Programming Language :: Python',
73 'Programming Language :: Python :: 2',
74 'Programming Language :: Python :: 2.6',
75 'Programming Language :: Python :: 2.7',
76 'Programming Language :: Python :: 3',
77 'Programming Language :: Python :: 3.3',
78 'Programming Language :: Python :: 3.4',
79 'Programming Language :: Python :: 3.5',
80 'Programming Language :: Python :: 3.6',
81 'Programming Language :: Python :: 3.7',
82 ],
83 )
84
85 if 'py2exe' in sys.argv:
86 # This will actually give us a py2exe command.
87 import py2exe
88 # And we have some py2exe specific options.
89 setup_options['options'] = {
90 'py2exe': {
91 'optimize': 0,
92 'skip_archive': True,
93 'dll_excludes': ['crypt32.dll'],
94 'packages': ['docutils', 'urllib', 'httplib', 'HTMLParser',
95 'awscli', 'ConfigParser', 'xml.etree', 'pipes'],
96 }
97 }
98 setup_options['console'] = ['bin/aws']
99
100
101 setup(**setup_options)
102
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -54,6 +54,7 @@
'bin/aws_bash_completer'],
packages=find_packages(exclude=['tests*']),
package_data={'awscli': ['data/*.json', 'examples/*/*.rst',
+ 'examples/*/*.txt', 'examples/*/*/*.txt',
'examples/*/*/*.rst', 'topics/*.rst',
'topics/*.json']},
install_requires=requires,
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -54,6 +54,7 @@\n 'bin/aws_bash_completer'],\n packages=find_packages(exclude=['tests*']),\n package_data={'awscli': ['data/*.json', 'examples/*/*.rst',\n+ 'examples/*/*.txt', 'examples/*/*/*.txt',\n 'examples/*/*/*.rst', 'topics/*.rst',\n 'topics/*.json']},\n install_requires=requires,\n", "issue": "aws emr create-cluster help command returns error\nhow to reproduce \r\n\r\n1. upgrade to awscli 1.16.190 or 1.16.194 or 1.16.196\r\nat the moment it's enough to install via pip either on macOS(1.16.194) or on linux(1.16.196), or using Homebrew(1.16.190) on macOS\r\n```\r\n# on Ubuntu 16.04 linux \r\n$ pip install --upgrade awscli\r\n<... output skipped - but it was successful, no errors ...>\r\n\r\n$ aws --version\r\naws-cli/1.16.196 Python/2.7.12 Linux/4.4.0-97-generic botocore/1.12.186\r\n\r\n$ aws emr create-cluster help\r\n\r\n[Errno 2] No such file or directory: '/usr/local/lib/python2.7/dist-packages/awscli/examples/emr/create-cluster-synopsis.txt'\r\n\r\n\r\n\r\n#or on macOS just for example using the one installed via Homebrew\r\n$ brew install awscli\r\n<... output skipped - but it was successful, no errors ...>\r\n\r\n$ aws --version\r\naws-cli/1.16.190 Python/3.7.4 Darwin/18.6.0 botocore/1.12.180\r\n\r\n$ aws emr create-cluster help\r\n[Errno 2] No such file or directory: '/usr/local/Cellar/awscli/1.16.190/libexec/lib/python3.7/site-packages/awscli/examples/emr/create-cluster-synopsis.txt'\r\n\r\n#or on macOS using aws installed via pip3\r\n$ aws emr create-cluster help\r\n\r\n[Errno 2] No such file or directory: '/usr/local/lib/python3.7/site-packages/awscli/examples/emr/create-cluster-synopsis.txt'\r\n```\n", "before_files": [{"content": "#!/usr/bin/env python\nimport codecs\nimport os.path\nimport re\nimport sys\n\nfrom setuptools import setup, find_packages\n\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\n\ndef read(*parts):\n return codecs.open(os.path.join(here, *parts), 'r').read()\n\n\ndef find_version(*file_paths):\n version_file = read(*file_paths)\n version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\",\n version_file, re.M)\n if version_match:\n return version_match.group(1)\n raise RuntimeError(\"Unable to find version string.\")\n\n\nrequires = ['botocore==1.12.187',\n 'colorama>=0.2.5,<=0.3.9',\n 'docutils>=0.10',\n 'rsa>=3.1.2,<=3.5.0',\n 's3transfer>=0.2.0,<0.3.0']\n\n\nif sys.version_info[:2] == (2, 6):\n # For python2.6 we have to require argparse since it\n # was not in stdlib until 2.7.\n requires.append('argparse>=1.1')\n\n # For Python 2.6, we have to require a different verion of PyYAML since the latest\n # versions dropped support for Python 2.6.\n requires.append('PyYAML>=3.10,<=3.13')\nelse:\n requires.append('PyYAML>=3.10,<=5.1')\n\n\nsetup_options = dict(\n name='awscli',\n version=find_version(\"awscli\", \"__init__.py\"),\n description='Universal Command Line Environment for AWS.',\n long_description=read('README.rst'),\n author='Amazon Web Services',\n url='http://aws.amazon.com/cli/',\n scripts=['bin/aws', 'bin/aws.cmd',\n 'bin/aws_completer', 'bin/aws_zsh_completer.sh',\n 'bin/aws_bash_completer'],\n packages=find_packages(exclude=['tests*']),\n package_data={'awscli': ['data/*.json', 'examples/*/*.rst',\n 'examples/*/*/*.rst', 'topics/*.rst',\n 'topics/*.json']},\n install_requires=requires,\n extras_require={\n ':python_version==\"2.6\"': [\n 'argparse>=1.1',\n ]\n },\n license=\"Apache License 2.0\",\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'Natural Language :: English',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n ],\n)\n\nif 'py2exe' in sys.argv:\n # This will actually give us a py2exe command.\n import py2exe\n # And we have some py2exe specific options.\n setup_options['options'] = {\n 'py2exe': {\n 'optimize': 0,\n 'skip_archive': True,\n 'dll_excludes': ['crypt32.dll'],\n 'packages': ['docutils', 'urllib', 'httplib', 'HTMLParser',\n 'awscli', 'ConfigParser', 'xml.etree', 'pipes'],\n }\n }\n setup_options['console'] = ['bin/aws']\n\n\nsetup(**setup_options)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\nimport codecs\nimport os.path\nimport re\nimport sys\n\nfrom setuptools import setup, find_packages\n\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\n\ndef read(*parts):\n return codecs.open(os.path.join(here, *parts), 'r').read()\n\n\ndef find_version(*file_paths):\n version_file = read(*file_paths)\n version_match = re.search(r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\",\n version_file, re.M)\n if version_match:\n return version_match.group(1)\n raise RuntimeError(\"Unable to find version string.\")\n\n\nrequires = ['botocore==1.12.187',\n 'colorama>=0.2.5,<=0.3.9',\n 'docutils>=0.10',\n 'rsa>=3.1.2,<=3.5.0',\n 's3transfer>=0.2.0,<0.3.0']\n\n\nif sys.version_info[:2] == (2, 6):\n # For python2.6 we have to require argparse since it\n # was not in stdlib until 2.7.\n requires.append('argparse>=1.1')\n\n # For Python 2.6, we have to require a different verion of PyYAML since the latest\n # versions dropped support for Python 2.6.\n requires.append('PyYAML>=3.10,<=3.13')\nelse:\n requires.append('PyYAML>=3.10,<=5.1')\n\n\nsetup_options = dict(\n name='awscli',\n version=find_version(\"awscli\", \"__init__.py\"),\n description='Universal Command Line Environment for AWS.',\n long_description=read('README.rst'),\n author='Amazon Web Services',\n url='http://aws.amazon.com/cli/',\n scripts=['bin/aws', 'bin/aws.cmd',\n 'bin/aws_completer', 'bin/aws_zsh_completer.sh',\n 'bin/aws_bash_completer'],\n packages=find_packages(exclude=['tests*']),\n package_data={'awscli': ['data/*.json', 'examples/*/*.rst',\n 'examples/*/*.txt', 'examples/*/*/*.txt',\n 'examples/*/*/*.rst', 'topics/*.rst',\n 'topics/*.json']},\n install_requires=requires,\n extras_require={\n ':python_version==\"2.6\"': [\n 'argparse>=1.1',\n ]\n },\n license=\"Apache License 2.0\",\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'Natural Language :: English',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n ],\n)\n\nif 'py2exe' in sys.argv:\n # This will actually give us a py2exe command.\n import py2exe\n # And we have some py2exe specific options.\n setup_options['options'] = {\n 'py2exe': {\n 'optimize': 0,\n 'skip_archive': True,\n 'dll_excludes': ['crypt32.dll'],\n 'packages': ['docutils', 'urllib', 'httplib', 'HTMLParser',\n 'awscli', 'ConfigParser', 'xml.etree', 'pipes'],\n }\n }\n setup_options['console'] = ['bin/aws']\n\n\nsetup(**setup_options)\n", "path": "setup.py"}]} | 1,723 | 110 |
gh_patches_debug_6935 | rasdani/github-patches | git_diff | googleapis__google-auth-library-python-51 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Create system tests for service account-based credentials
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 # Copyright 2014 Google Inc.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from setuptools import find_packages
16 from setuptools import setup
17
18
19 DEPENDENCIES = (
20 'pyasn1>=0.1.7',
21 'pyasn1-modules>=0.0.5',
22 'rsa>=3.1.4',
23 'six>=1.9.0',
24 )
25
26
27 with open('README.rst', 'r') as fh:
28 long_description = fh.read()
29
30 setup(
31 name='google-auth',
32 version='0.0.1',
33 author='Google Cloud Platform',
34 author_email='[email protected]',
35 description='Google Authentication Library',
36 long_description=long_description,
37 url='https://github.com/GoogleCloudPlatform/google-auth-library-python',
38 packages=find_packages(exclude='tests'),
39 namespace_packages=('google',),
40 install_requires=DEPENDENCIES,
41 license='Apache 2.0',
42 keywords='google auth oauth client',
43 classifiers=(
44 'Programming Language :: Python :: 2',
45 'Programming Language :: Python :: 2.7',
46 'Programming Language :: Python :: 3',
47 'Programming Language :: Python :: 3.4',
48 'Programming Language :: Python :: 3.5',
49 'Development Status :: 3 - Alpha',
50 'Intended Audience :: Developers',
51 'License :: OSI Approved :: Apache Software License',
52 'Operating System :: POSIX',
53 'Operating System :: Microsoft :: Windows',
54 'Operating System :: MacOS :: MacOS X',
55 'Operating System :: OS Independent',
56 'Topic :: Internet :: WWW/HTTP',
57 ),
58 )
59
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -35,7 +35,7 @@
description='Google Authentication Library',
long_description=long_description,
url='https://github.com/GoogleCloudPlatform/google-auth-library-python',
- packages=find_packages(exclude='tests'),
+ packages=find_packages(exclude=('tests', 'system_tests')),
namespace_packages=('google',),
install_requires=DEPENDENCIES,
license='Apache 2.0',
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -35,7 +35,7 @@\n description='Google Authentication Library',\n long_description=long_description,\n url='https://github.com/GoogleCloudPlatform/google-auth-library-python',\n- packages=find_packages(exclude='tests'),\n+ packages=find_packages(exclude=('tests', 'system_tests')),\n namespace_packages=('google',),\n install_requires=DEPENDENCIES,\n license='Apache 2.0',\n", "issue": "Create system tests for service account-based credentials\n\n", "before_files": [{"content": "# Copyright 2014 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom setuptools import find_packages\nfrom setuptools import setup\n\n\nDEPENDENCIES = (\n 'pyasn1>=0.1.7',\n 'pyasn1-modules>=0.0.5',\n 'rsa>=3.1.4',\n 'six>=1.9.0',\n)\n\n\nwith open('README.rst', 'r') as fh:\n long_description = fh.read()\n\nsetup(\n name='google-auth',\n version='0.0.1',\n author='Google Cloud Platform',\n author_email='[email protected]',\n description='Google Authentication Library',\n long_description=long_description,\n url='https://github.com/GoogleCloudPlatform/google-auth-library-python',\n packages=find_packages(exclude='tests'),\n namespace_packages=('google',),\n install_requires=DEPENDENCIES,\n license='Apache 2.0',\n keywords='google auth oauth client',\n classifiers=(\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Development Status :: 3 - Alpha',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Operating System :: POSIX',\n 'Operating System :: Microsoft :: Windows',\n 'Operating System :: MacOS :: MacOS X',\n 'Operating System :: OS Independent',\n 'Topic :: Internet :: WWW/HTTP',\n ),\n)\n", "path": "setup.py"}], "after_files": [{"content": "# Copyright 2014 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom setuptools import find_packages\nfrom setuptools import setup\n\n\nDEPENDENCIES = (\n 'pyasn1>=0.1.7',\n 'pyasn1-modules>=0.0.5',\n 'rsa>=3.1.4',\n 'six>=1.9.0',\n)\n\n\nwith open('README.rst', 'r') as fh:\n long_description = fh.read()\n\nsetup(\n name='google-auth',\n version='0.0.1',\n author='Google Cloud Platform',\n author_email='[email protected]',\n description='Google Authentication Library',\n long_description=long_description,\n url='https://github.com/GoogleCloudPlatform/google-auth-library-python',\n packages=find_packages(exclude=('tests', 'system_tests')),\n namespace_packages=('google',),\n install_requires=DEPENDENCIES,\n license='Apache 2.0',\n keywords='google auth oauth client',\n classifiers=(\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Development Status :: 3 - Alpha',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Operating System :: POSIX',\n 'Operating System :: Microsoft :: Windows',\n 'Operating System :: MacOS :: MacOS X',\n 'Operating System :: OS Independent',\n 'Topic :: Internet :: WWW/HTTP',\n ),\n)\n", "path": "setup.py"}]} | 834 | 109 |
gh_patches_debug_18389 | rasdani/github-patches | git_diff | akvo__akvo-rsr-1829 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
IATI import
## Test plan
1. Take any IATI file from the [IATI registry](http://iatiregistry.org)
2. Make sure the reporting organisation is in RSR, with the correct IATI Organisation ID and 'Reportable' set to True.
3. In the old admin, add a new IATI import. Either fill in an external URL, or a local file. The user that is selected will get an email with a summary of the import. _Note that for larger files (e.g. > 100 projects), you might see a timeout. However, the import should continue to run in the background._
4. When the import is done, it should show up with status completed, and the mail with import details should have been sent. _Note that files up to 100 projects take about half a minute._
## Issue description
See product design repository: https://github.com/akvo/akvo-product-design/issues/97
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `akvo/rsr/migrations/0032_auto_20151001_0956.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 from __future__ import unicode_literals
3
4 from django.db import models, migrations
5 import akvo.rsr.models.iati_import
6 from django.conf import settings
7 import akvo.rsr.fields
8
9
10 class Migration(migrations.Migration):
11
12 dependencies = [
13 ('rsr', '0031_auto_20150825_1109'),
14 ]
15
16 operations = [
17 migrations.CreateModel(
18 name='IatiImport',
19 fields=[
20 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
21 ('url', models.URLField(verbose_name='url', blank=True)),
22 ('local_file', models.FileField(upload_to=akvo.rsr.models.iati_import.file_path, verbose_name='local file', blank=True)),
23 ('status', models.PositiveSmallIntegerField(default=1, verbose_name='status', choices=[(1, 'pending'), (2, 'retrieving file'), (3, 'import in progress'), (4, 'completed'), (5, 'cancelled')])),
24 ('start_date', models.DateTimeField(null=True, verbose_name='start date', blank=True)),
25 ('end_date', models.DateTimeField(null=True, verbose_name='end date', blank=True)),
26 ],
27 options={
28 'verbose_name': 'IATI import',
29 'verbose_name_plural': 'IATI imports',
30 },
31 bases=(models.Model,),
32 ),
33 migrations.CreateModel(
34 name='IatiImportLog',
35 fields=[
36 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
37 ('severity', models.PositiveSmallIntegerField(default=1, verbose_name='severity', choices=[(0, 'information'), (1, 'critical error'), (2, 'value not saved'), (3, 'value partly saved')])),
38 ('text', akvo.rsr.fields.ValidXMLTextField(verbose_name='text')),
39 ('iati_import', models.ForeignKey(related_name='iati_import_logs', verbose_name='iati_import', to='rsr.IatiImport')),
40 ('project', models.ForeignKey(related_name='iati_project_import_logs', verbose_name='project', blank=True, to='rsr.Project', null=True)),
41 ],
42 options={
43 'verbose_name': 'IATI import log',
44 'verbose_name_plural': 'IATI import logs',
45 },
46 bases=(models.Model,),
47 ),
48 migrations.CreateModel(
49 name='IatiProjectImport',
50 fields=[
51 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
52 ('action', models.PositiveSmallIntegerField(verbose_name='action', choices=[(1, 'create'), (2, 'update')])),
53 ('status', models.PositiveSmallIntegerField(default=1, verbose_name='status', choices=[(1, 'pending'), (2, 'import in progress'), (3, 'completed'), (4, 'cancelled')])),
54 ('start_date', models.DateTimeField(null=True, verbose_name='start date', blank=True)),
55 ('end_date', models.DateTimeField(null=True, verbose_name='end date', blank=True)),
56 ('iati_import', models.ForeignKey(related_name='iati_project_imports', verbose_name='iati_import', to='rsr.IatiImport')),
57 ('project', models.ForeignKey(related_name='iati_project_imports', verbose_name='project', to='rsr.Project')),
58 ],
59 options={
60 'verbose_name': 'IATI project import',
61 'verbose_name_plural': 'IATI project imports',
62 },
63 bases=(models.Model,),
64 ),
65 migrations.AddField(
66 model_name='iatiimport',
67 name='projects',
68 field=models.ManyToManyField(to='rsr.Project', verbose_name='projects', through='rsr.IatiProjectImport', blank=True),
69 preserve_default=True,
70 ),
71 migrations.AddField(
72 model_name='iatiimport',
73 name='user',
74 field=models.ForeignKey(related_name='iati_imports', verbose_name='user', to=settings.AUTH_USER_MODEL),
75 preserve_default=True,
76 ),
77 migrations.AlterField(
78 model_name='budgetitem',
79 name='amount',
80 field=models.DecimalField(null=True, verbose_name='amount', max_digits=14, decimal_places=2, blank=True),
81 preserve_default=True,
82 ),
83 migrations.AlterField(
84 model_name='partnership',
85 name='funding_amount',
86 field=models.DecimalField(decimal_places=2, max_digits=14, blank=True, help_text="The funding amount of the partner.<br>Note that it's only possible to indicate a funding amount for funding partners.", null=True, verbose_name='funding amount', db_index=True),
87 preserve_default=True,
88 ),
89 migrations.AlterField(
90 model_name='planneddisbursement',
91 name='value',
92 field=models.DecimalField(null=True, verbose_name='value', max_digits=14, decimal_places=2, blank=True),
93 preserve_default=True,
94 ),
95 migrations.AlterField(
96 model_name='project',
97 name='budget',
98 field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, verbose_name='project budget', db_index=True),
99 preserve_default=True,
100 ),
101 migrations.AlterField(
102 model_name='project',
103 name='funds',
104 field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, db_index=True),
105 preserve_default=True,
106 ),
107 migrations.AlterField(
108 model_name='project',
109 name='funds_needed',
110 field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, db_index=True),
111 preserve_default=True,
112 ),
113 migrations.AlterField(
114 model_name='transaction',
115 name='value',
116 field=models.DecimalField(decimal_places=2, max_digits=14, blank=True, help_text='Enter the transaction amount.', null=True, verbose_name='value'),
117 preserve_default=True,
118 ),
119 ]
120
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/akvo/rsr/migrations/0032_auto_20151001_0956.py b/akvo/rsr/migrations/0032_auto_20151001_0956.py
--- a/akvo/rsr/migrations/0032_auto_20151001_0956.py
+++ b/akvo/rsr/migrations/0032_auto_20151001_0956.py
@@ -116,4 +116,10 @@
field=models.DecimalField(decimal_places=2, max_digits=14, blank=True, help_text='Enter the transaction amount.', null=True, verbose_name='value'),
preserve_default=True,
),
+ migrations.AlterField(
+ model_name='project',
+ name='iati_activity_id',
+ field=akvo.rsr.fields.ValidXMLCharField(null=True, max_length=100, blank=True, help_text='This should be the official unique IATI Identifier for the project. The identifier consists of the IATI organisation identifier and the (organisations internal) project identifier, e.g. NL-KVK-31156201-TZ1234. (100 characters)<br>Note that \'projects\' in this form are the same as \'activities\' in IATI.<br><a href="http://iatistandard.org/activity-standard/iati-activities/iati-activity/iati-identifier" target="_blank">How to create</a>', verbose_name='IATI Project Identifier', db_index=True),
+ preserve_default=True,
+ ),
]
| {"golden_diff": "diff --git a/akvo/rsr/migrations/0032_auto_20151001_0956.py b/akvo/rsr/migrations/0032_auto_20151001_0956.py\n--- a/akvo/rsr/migrations/0032_auto_20151001_0956.py\n+++ b/akvo/rsr/migrations/0032_auto_20151001_0956.py\n@@ -116,4 +116,10 @@\n field=models.DecimalField(decimal_places=2, max_digits=14, blank=True, help_text='Enter the transaction amount.', null=True, verbose_name='value'),\n preserve_default=True,\n ),\n+ migrations.AlterField(\n+ model_name='project',\n+ name='iati_activity_id',\n+ field=akvo.rsr.fields.ValidXMLCharField(null=True, max_length=100, blank=True, help_text='This should be the official unique IATI Identifier for the project. The identifier consists of the IATI organisation identifier and the (organisations internal) project identifier, e.g. NL-KVK-31156201-TZ1234. (100 characters)<br>Note that \\'projects\\' in this form are the same as \\'activities\\' in IATI.<br><a href=\"http://iatistandard.org/activity-standard/iati-activities/iati-activity/iati-identifier\" target=\"_blank\">How to create</a>', verbose_name='IATI Project Identifier', db_index=True),\n+ preserve_default=True,\n+ ),\n ]\n", "issue": "IATI import\n## Test plan\n1. Take any IATI file from the [IATI registry](http://iatiregistry.org)\n2. Make sure the reporting organisation is in RSR, with the correct IATI Organisation ID and 'Reportable' set to True.\n3. In the old admin, add a new IATI import. Either fill in an external URL, or a local file. The user that is selected will get an email with a summary of the import. _Note that for larger files (e.g. > 100 projects), you might see a timeout. However, the import should continue to run in the background._\n4. When the import is done, it should show up with status completed, and the mail with import details should have been sent. _Note that files up to 100 projects take about half a minute._\n## Issue description\n\nSee product design repository: https://github.com/akvo/akvo-product-design/issues/97\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\nimport akvo.rsr.models.iati_import\nfrom django.conf import settings\nimport akvo.rsr.fields\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('rsr', '0031_auto_20150825_1109'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='IatiImport',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('url', models.URLField(verbose_name='url', blank=True)),\n ('local_file', models.FileField(upload_to=akvo.rsr.models.iati_import.file_path, verbose_name='local file', blank=True)),\n ('status', models.PositiveSmallIntegerField(default=1, verbose_name='status', choices=[(1, 'pending'), (2, 'retrieving file'), (3, 'import in progress'), (4, 'completed'), (5, 'cancelled')])),\n ('start_date', models.DateTimeField(null=True, verbose_name='start date', blank=True)),\n ('end_date', models.DateTimeField(null=True, verbose_name='end date', blank=True)),\n ],\n options={\n 'verbose_name': 'IATI import',\n 'verbose_name_plural': 'IATI imports',\n },\n bases=(models.Model,),\n ),\n migrations.CreateModel(\n name='IatiImportLog',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('severity', models.PositiveSmallIntegerField(default=1, verbose_name='severity', choices=[(0, 'information'), (1, 'critical error'), (2, 'value not saved'), (3, 'value partly saved')])),\n ('text', akvo.rsr.fields.ValidXMLTextField(verbose_name='text')),\n ('iati_import', models.ForeignKey(related_name='iati_import_logs', verbose_name='iati_import', to='rsr.IatiImport')),\n ('project', models.ForeignKey(related_name='iati_project_import_logs', verbose_name='project', blank=True, to='rsr.Project', null=True)),\n ],\n options={\n 'verbose_name': 'IATI import log',\n 'verbose_name_plural': 'IATI import logs',\n },\n bases=(models.Model,),\n ),\n migrations.CreateModel(\n name='IatiProjectImport',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('action', models.PositiveSmallIntegerField(verbose_name='action', choices=[(1, 'create'), (2, 'update')])),\n ('status', models.PositiveSmallIntegerField(default=1, verbose_name='status', choices=[(1, 'pending'), (2, 'import in progress'), (3, 'completed'), (4, 'cancelled')])),\n ('start_date', models.DateTimeField(null=True, verbose_name='start date', blank=True)),\n ('end_date', models.DateTimeField(null=True, verbose_name='end date', blank=True)),\n ('iati_import', models.ForeignKey(related_name='iati_project_imports', verbose_name='iati_import', to='rsr.IatiImport')),\n ('project', models.ForeignKey(related_name='iati_project_imports', verbose_name='project', to='rsr.Project')),\n ],\n options={\n 'verbose_name': 'IATI project import',\n 'verbose_name_plural': 'IATI project imports',\n },\n bases=(models.Model,),\n ),\n migrations.AddField(\n model_name='iatiimport',\n name='projects',\n field=models.ManyToManyField(to='rsr.Project', verbose_name='projects', through='rsr.IatiProjectImport', blank=True),\n preserve_default=True,\n ),\n migrations.AddField(\n model_name='iatiimport',\n name='user',\n field=models.ForeignKey(related_name='iati_imports', verbose_name='user', to=settings.AUTH_USER_MODEL),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='budgetitem',\n name='amount',\n field=models.DecimalField(null=True, verbose_name='amount', max_digits=14, decimal_places=2, blank=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='partnership',\n name='funding_amount',\n field=models.DecimalField(decimal_places=2, max_digits=14, blank=True, help_text=\"The funding amount of the partner.<br>Note that it's only possible to indicate a funding amount for funding partners.\", null=True, verbose_name='funding amount', db_index=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='planneddisbursement',\n name='value',\n field=models.DecimalField(null=True, verbose_name='value', max_digits=14, decimal_places=2, blank=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='project',\n name='budget',\n field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, verbose_name='project budget', db_index=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='project',\n name='funds',\n field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, db_index=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='project',\n name='funds_needed',\n field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, db_index=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='transaction',\n name='value',\n field=models.DecimalField(decimal_places=2, max_digits=14, blank=True, help_text='Enter the transaction amount.', null=True, verbose_name='value'),\n preserve_default=True,\n ),\n ]\n", "path": "akvo/rsr/migrations/0032_auto_20151001_0956.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\nimport akvo.rsr.models.iati_import\nfrom django.conf import settings\nimport akvo.rsr.fields\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('rsr', '0031_auto_20150825_1109'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='IatiImport',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('url', models.URLField(verbose_name='url', blank=True)),\n ('local_file', models.FileField(upload_to=akvo.rsr.models.iati_import.file_path, verbose_name='local file', blank=True)),\n ('status', models.PositiveSmallIntegerField(default=1, verbose_name='status', choices=[(1, 'pending'), (2, 'retrieving file'), (3, 'import in progress'), (4, 'completed'), (5, 'cancelled')])),\n ('start_date', models.DateTimeField(null=True, verbose_name='start date', blank=True)),\n ('end_date', models.DateTimeField(null=True, verbose_name='end date', blank=True)),\n ],\n options={\n 'verbose_name': 'IATI import',\n 'verbose_name_plural': 'IATI imports',\n },\n bases=(models.Model,),\n ),\n migrations.CreateModel(\n name='IatiImportLog',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('severity', models.PositiveSmallIntegerField(default=1, verbose_name='severity', choices=[(0, 'information'), (1, 'critical error'), (2, 'value not saved'), (3, 'value partly saved')])),\n ('text', akvo.rsr.fields.ValidXMLTextField(verbose_name='text')),\n ('iati_import', models.ForeignKey(related_name='iati_import_logs', verbose_name='iati_import', to='rsr.IatiImport')),\n ('project', models.ForeignKey(related_name='iati_project_import_logs', verbose_name='project', blank=True, to='rsr.Project', null=True)),\n ],\n options={\n 'verbose_name': 'IATI import log',\n 'verbose_name_plural': 'IATI import logs',\n },\n bases=(models.Model,),\n ),\n migrations.CreateModel(\n name='IatiProjectImport',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('action', models.PositiveSmallIntegerField(verbose_name='action', choices=[(1, 'create'), (2, 'update')])),\n ('status', models.PositiveSmallIntegerField(default=1, verbose_name='status', choices=[(1, 'pending'), (2, 'import in progress'), (3, 'completed'), (4, 'cancelled')])),\n ('start_date', models.DateTimeField(null=True, verbose_name='start date', blank=True)),\n ('end_date', models.DateTimeField(null=True, verbose_name='end date', blank=True)),\n ('iati_import', models.ForeignKey(related_name='iati_project_imports', verbose_name='iati_import', to='rsr.IatiImport')),\n ('project', models.ForeignKey(related_name='iati_project_imports', verbose_name='project', to='rsr.Project')),\n ],\n options={\n 'verbose_name': 'IATI project import',\n 'verbose_name_plural': 'IATI project imports',\n },\n bases=(models.Model,),\n ),\n migrations.AddField(\n model_name='iatiimport',\n name='projects',\n field=models.ManyToManyField(to='rsr.Project', verbose_name='projects', through='rsr.IatiProjectImport', blank=True),\n preserve_default=True,\n ),\n migrations.AddField(\n model_name='iatiimport',\n name='user',\n field=models.ForeignKey(related_name='iati_imports', verbose_name='user', to=settings.AUTH_USER_MODEL),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='budgetitem',\n name='amount',\n field=models.DecimalField(null=True, verbose_name='amount', max_digits=14, decimal_places=2, blank=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='partnership',\n name='funding_amount',\n field=models.DecimalField(decimal_places=2, max_digits=14, blank=True, help_text=\"The funding amount of the partner.<br>Note that it's only possible to indicate a funding amount for funding partners.\", null=True, verbose_name='funding amount', db_index=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='planneddisbursement',\n name='value',\n field=models.DecimalField(null=True, verbose_name='value', max_digits=14, decimal_places=2, blank=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='project',\n name='budget',\n field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, verbose_name='project budget', db_index=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='project',\n name='funds',\n field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, db_index=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='project',\n name='funds_needed',\n field=models.DecimalField(decimal_places=2, default=0, max_digits=14, blank=True, null=True, db_index=True),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='transaction',\n name='value',\n field=models.DecimalField(decimal_places=2, max_digits=14, blank=True, help_text='Enter the transaction amount.', null=True, verbose_name='value'),\n preserve_default=True,\n ),\n migrations.AlterField(\n model_name='project',\n name='iati_activity_id',\n field=akvo.rsr.fields.ValidXMLCharField(null=True, max_length=100, blank=True, help_text='This should be the official unique IATI Identifier for the project. The identifier consists of the IATI organisation identifier and the (organisations internal) project identifier, e.g. NL-KVK-31156201-TZ1234. (100 characters)<br>Note that \\'projects\\' in this form are the same as \\'activities\\' in IATI.<br><a href=\"http://iatistandard.org/activity-standard/iati-activities/iati-activity/iati-identifier\" target=\"_blank\">How to create</a>', verbose_name='IATI Project Identifier', db_index=True),\n preserve_default=True,\n ),\n ]\n", "path": "akvo/rsr/migrations/0032_auto_20151001_0956.py"}]} | 2,020 | 374 |
gh_patches_debug_40166 | rasdani/github-patches | git_diff | learningequality__kolibri-2092 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Setup wizard is broken
## Summary
* Submitting the setup wizard returns `{language_code: ["This field is required."]}`
## System information
- Version: 0.6
## How to reproduce
1. Go through setup wizard
## Real-life consequences
Sadness
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kolibri/core/device/serializers.py`
Content:
```
1 from django.db import transaction
2 from django.utils.translation import check_for_language, ugettext_lazy as _
3 from kolibri.auth.constants.facility_presets import choices, mappings
4 from kolibri.auth.constants.role_kinds import ADMIN
5 from kolibri.auth.models import Facility, FacilityUser
6 from kolibri.auth.serializers import FacilitySerializer, FacilityUserSerializer
7 from rest_framework import serializers
8
9 from .models import DevicePermissions, DeviceSettings
10
11
12 class DevicePermissionsSerializer(serializers.ModelSerializer):
13
14 class Meta:
15 model = DevicePermissions
16 fields = (
17 'user', 'is_superuser', 'can_manage_content',
18 )
19
20 class NoFacilityFacilityUserSerializer(FacilityUserSerializer):
21
22 class Meta:
23 model = FacilityUser
24 fields = ('id', 'username', 'full_name', 'password', )
25
26
27 class DeviceProvisionSerializer(serializers.Serializer):
28 facility = FacilitySerializer()
29 preset = serializers.ChoiceField(choices=choices)
30 superuser = NoFacilityFacilityUserSerializer()
31 language_code = serializers.CharField(max_length=15)
32
33 class Meta:
34 fields = ('facility', 'dataset', 'superuser', 'language_code')
35
36 def validate_language_code(self, language_code):
37 """
38 Check that the language_code is supported by Kolibri
39 """
40 if not check_for_language(language_code):
41 raise serializers.ValidationError(_("Language is not supported by Kolibri"))
42 return language_code
43
44 def create(self, validated_data):
45 """
46 Endpoint for initial setup of a device.
47 Expects a value for:
48 default language - the default language of this Kolibri device
49 facility - the required fields for setting up a facility
50 facilitydataset - facility configuration options
51 superuser - the required fields for a facilityuser who will be set as the super user for this device
52 """
53 with transaction.atomic():
54 facility = Facility.objects.create(**validated_data.pop('facility'))
55 preset = validated_data.pop('preset')
56 dataset_data = mappings[preset]
57 for key, value in dataset_data.items():
58 setattr(facility.dataset, key, value)
59 facility.dataset.save()
60 superuser_data = validated_data.pop('superuser')
61 superuser_data['facility'] = facility
62 superuser = FacilityUserSerializer(data=superuser_data).create(superuser_data)
63 facility.add_role(superuser, ADMIN)
64 DevicePermissions.objects.create(user=superuser, is_superuser=True)
65 language_code = validated_data.pop('language_code')
66 device_settings, created = DeviceSettings.objects.get_or_create()
67 device_settings.is_provisioned = True
68 device_settings.language_code = language_code
69 device_settings.save()
70 return {
71 "facility": facility,
72 "preset": preset,
73 "superuser": superuser,
74 "language_code": language_code
75 }
76
```
Path: `kolibri/core/device/models.py`
Content:
```
1 from django.conf import settings
2 from django.db import models
3 from kolibri.auth.models import FacilityUser
4
5 from .permissions import UserCanManageDevicePermissions
6
7
8 class DevicePermissions(models.Model):
9 """
10 This class stores metadata about device permissions for FacilityUsers.
11 """
12
13 permissions = UserCanManageDevicePermissions()
14
15 user = models.OneToOneField(FacilityUser, on_delete=models.CASCADE, related_name='devicepermissions', blank=False, null=False, primary_key=True)
16 is_superuser = models.BooleanField(default=False)
17 can_manage_content = models.BooleanField(default=False)
18
19
20 class DeviceSettings(models.Model):
21 """
22 This class stores data about settings particular to this device
23 """
24
25 is_provisioned = models.BooleanField(default=False)
26 language_code = models.CharField(max_length=15, default=settings.LANGUAGE_CODE)
27
28 def save(self, *args, **kwargs):
29 self.pk = 1
30 super(DeviceSettings, self).save(*args, **kwargs)
31
```
Path: `kolibri/core/device/migrations/0001_initial.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 # Generated by Django 1.9.7 on 2017-08-16 23:05
3 from __future__ import unicode_literals
4
5 import django.db.models.deletion
6 from django.conf import settings
7 from django.db import migrations, models
8
9
10 class Migration(migrations.Migration):
11
12 initial = True
13
14 dependencies = [
15 ('kolibriauth', '0003_auto_20170621_0958'),
16 ]
17
18 operations = [
19 migrations.CreateModel(
20 name='DevicePermissions',
21 fields=[
22 ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='devicepermissions', serialize=False, to=settings.AUTH_USER_MODEL)),
23 ('is_superuser', models.BooleanField(default=False)),
24 ('can_manage_content', models.BooleanField(default=False)),
25 ],
26 ),
27 migrations.CreateModel(
28 name='DeviceSettings',
29 fields=[
30 ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
31 ('is_provisioned', models.BooleanField(default=False)),
32 ('language_code', models.CharField(default='en', max_length=15)),
33 ],
34 ),
35 ]
36
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/kolibri/core/device/migrations/0001_initial.py b/kolibri/core/device/migrations/0001_initial.py
--- a/kolibri/core/device/migrations/0001_initial.py
+++ b/kolibri/core/device/migrations/0001_initial.py
@@ -29,7 +29,7 @@
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_provisioned', models.BooleanField(default=False)),
- ('language_code', models.CharField(default='en', max_length=15)),
+ ('language_id', models.CharField(default='en', max_length=15)),
],
),
]
diff --git a/kolibri/core/device/models.py b/kolibri/core/device/models.py
--- a/kolibri/core/device/models.py
+++ b/kolibri/core/device/models.py
@@ -23,7 +23,7 @@
"""
is_provisioned = models.BooleanField(default=False)
- language_code = models.CharField(max_length=15, default=settings.LANGUAGE_CODE)
+ language_id = models.CharField(max_length=15, default=settings.LANGUAGE_CODE)
def save(self, *args, **kwargs):
self.pk = 1
diff --git a/kolibri/core/device/serializers.py b/kolibri/core/device/serializers.py
--- a/kolibri/core/device/serializers.py
+++ b/kolibri/core/device/serializers.py
@@ -28,18 +28,18 @@
facility = FacilitySerializer()
preset = serializers.ChoiceField(choices=choices)
superuser = NoFacilityFacilityUserSerializer()
- language_code = serializers.CharField(max_length=15)
+ language_id = serializers.CharField(max_length=15)
class Meta:
- fields = ('facility', 'dataset', 'superuser', 'language_code')
+ fields = ('facility', 'dataset', 'superuser', 'language_id')
- def validate_language_code(self, language_code):
+ def validate_language_id(self, language_id):
"""
- Check that the language_code is supported by Kolibri
+ Check that the language_id is supported by Kolibri
"""
- if not check_for_language(language_code):
+ if not check_for_language(language_id):
raise serializers.ValidationError(_("Language is not supported by Kolibri"))
- return language_code
+ return language_id
def create(self, validated_data):
"""
@@ -62,14 +62,14 @@
superuser = FacilityUserSerializer(data=superuser_data).create(superuser_data)
facility.add_role(superuser, ADMIN)
DevicePermissions.objects.create(user=superuser, is_superuser=True)
- language_code = validated_data.pop('language_code')
+ language_id = validated_data.pop('language_id')
device_settings, created = DeviceSettings.objects.get_or_create()
device_settings.is_provisioned = True
- device_settings.language_code = language_code
+ device_settings.language_id = language_id
device_settings.save()
return {
"facility": facility,
"preset": preset,
"superuser": superuser,
- "language_code": language_code
+ "language_id": language_id
}
| {"golden_diff": "diff --git a/kolibri/core/device/migrations/0001_initial.py b/kolibri/core/device/migrations/0001_initial.py\n--- a/kolibri/core/device/migrations/0001_initial.py\n+++ b/kolibri/core/device/migrations/0001_initial.py\n@@ -29,7 +29,7 @@\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('is_provisioned', models.BooleanField(default=False)),\n- ('language_code', models.CharField(default='en', max_length=15)),\n+ ('language_id', models.CharField(default='en', max_length=15)),\n ],\n ),\n ]\ndiff --git a/kolibri/core/device/models.py b/kolibri/core/device/models.py\n--- a/kolibri/core/device/models.py\n+++ b/kolibri/core/device/models.py\n@@ -23,7 +23,7 @@\n \"\"\"\n \n is_provisioned = models.BooleanField(default=False)\n- language_code = models.CharField(max_length=15, default=settings.LANGUAGE_CODE)\n+ language_id = models.CharField(max_length=15, default=settings.LANGUAGE_CODE)\n \n def save(self, *args, **kwargs):\n self.pk = 1\ndiff --git a/kolibri/core/device/serializers.py b/kolibri/core/device/serializers.py\n--- a/kolibri/core/device/serializers.py\n+++ b/kolibri/core/device/serializers.py\n@@ -28,18 +28,18 @@\n facility = FacilitySerializer()\n preset = serializers.ChoiceField(choices=choices)\n superuser = NoFacilityFacilityUserSerializer()\n- language_code = serializers.CharField(max_length=15)\n+ language_id = serializers.CharField(max_length=15)\n \n class Meta:\n- fields = ('facility', 'dataset', 'superuser', 'language_code')\n+ fields = ('facility', 'dataset', 'superuser', 'language_id')\n \n- def validate_language_code(self, language_code):\n+ def validate_language_id(self, language_id):\n \"\"\"\n- Check that the language_code is supported by Kolibri\n+ Check that the language_id is supported by Kolibri\n \"\"\"\n- if not check_for_language(language_code):\n+ if not check_for_language(language_id):\n raise serializers.ValidationError(_(\"Language is not supported by Kolibri\"))\n- return language_code\n+ return language_id\n \n def create(self, validated_data):\n \"\"\"\n@@ -62,14 +62,14 @@\n superuser = FacilityUserSerializer(data=superuser_data).create(superuser_data)\n facility.add_role(superuser, ADMIN)\n DevicePermissions.objects.create(user=superuser, is_superuser=True)\n- language_code = validated_data.pop('language_code')\n+ language_id = validated_data.pop('language_id')\n device_settings, created = DeviceSettings.objects.get_or_create()\n device_settings.is_provisioned = True\n- device_settings.language_code = language_code\n+ device_settings.language_id = language_id\n device_settings.save()\n return {\n \"facility\": facility,\n \"preset\": preset,\n \"superuser\": superuser,\n- \"language_code\": language_code\n+ \"language_id\": language_id\n }\n", "issue": "Setup wizard is broken\n## Summary\r\n\r\n* Submitting the setup wizard returns `{language_code: [\"This field is required.\"]}`\r\n\r\n## System information\r\n\r\n - Version: 0.6\r\n\r\n## How to reproduce\r\n\r\n1. Go through setup wizard\r\n\r\n## Real-life consequences\r\n\r\nSadness\n", "before_files": [{"content": "from django.db import transaction\nfrom django.utils.translation import check_for_language, ugettext_lazy as _\nfrom kolibri.auth.constants.facility_presets import choices, mappings\nfrom kolibri.auth.constants.role_kinds import ADMIN\nfrom kolibri.auth.models import Facility, FacilityUser\nfrom kolibri.auth.serializers import FacilitySerializer, FacilityUserSerializer\nfrom rest_framework import serializers\n\nfrom .models import DevicePermissions, DeviceSettings\n\n\nclass DevicePermissionsSerializer(serializers.ModelSerializer):\n\n class Meta:\n model = DevicePermissions\n fields = (\n 'user', 'is_superuser', 'can_manage_content',\n )\n\nclass NoFacilityFacilityUserSerializer(FacilityUserSerializer):\n\n class Meta:\n model = FacilityUser\n fields = ('id', 'username', 'full_name', 'password', )\n\n\nclass DeviceProvisionSerializer(serializers.Serializer):\n facility = FacilitySerializer()\n preset = serializers.ChoiceField(choices=choices)\n superuser = NoFacilityFacilityUserSerializer()\n language_code = serializers.CharField(max_length=15)\n\n class Meta:\n fields = ('facility', 'dataset', 'superuser', 'language_code')\n\n def validate_language_code(self, language_code):\n \"\"\"\n Check that the language_code is supported by Kolibri\n \"\"\"\n if not check_for_language(language_code):\n raise serializers.ValidationError(_(\"Language is not supported by Kolibri\"))\n return language_code\n\n def create(self, validated_data):\n \"\"\"\n Endpoint for initial setup of a device.\n Expects a value for:\n default language - the default language of this Kolibri device\n facility - the required fields for setting up a facility\n facilitydataset - facility configuration options\n superuser - the required fields for a facilityuser who will be set as the super user for this device\n \"\"\"\n with transaction.atomic():\n facility = Facility.objects.create(**validated_data.pop('facility'))\n preset = validated_data.pop('preset')\n dataset_data = mappings[preset]\n for key, value in dataset_data.items():\n setattr(facility.dataset, key, value)\n facility.dataset.save()\n superuser_data = validated_data.pop('superuser')\n superuser_data['facility'] = facility\n superuser = FacilityUserSerializer(data=superuser_data).create(superuser_data)\n facility.add_role(superuser, ADMIN)\n DevicePermissions.objects.create(user=superuser, is_superuser=True)\n language_code = validated_data.pop('language_code')\n device_settings, created = DeviceSettings.objects.get_or_create()\n device_settings.is_provisioned = True\n device_settings.language_code = language_code\n device_settings.save()\n return {\n \"facility\": facility,\n \"preset\": preset,\n \"superuser\": superuser,\n \"language_code\": language_code\n }\n", "path": "kolibri/core/device/serializers.py"}, {"content": "from django.conf import settings\nfrom django.db import models\nfrom kolibri.auth.models import FacilityUser\n\nfrom .permissions import UserCanManageDevicePermissions\n\n\nclass DevicePermissions(models.Model):\n \"\"\"\n This class stores metadata about device permissions for FacilityUsers.\n \"\"\"\n\n permissions = UserCanManageDevicePermissions()\n\n user = models.OneToOneField(FacilityUser, on_delete=models.CASCADE, related_name='devicepermissions', blank=False, null=False, primary_key=True)\n is_superuser = models.BooleanField(default=False)\n can_manage_content = models.BooleanField(default=False)\n\n\nclass DeviceSettings(models.Model):\n \"\"\"\n This class stores data about settings particular to this device\n \"\"\"\n\n is_provisioned = models.BooleanField(default=False)\n language_code = models.CharField(max_length=15, default=settings.LANGUAGE_CODE)\n\n def save(self, *args, **kwargs):\n self.pk = 1\n super(DeviceSettings, self).save(*args, **kwargs)\n", "path": "kolibri/core/device/models.py"}, {"content": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.7 on 2017-08-16 23:05\nfrom __future__ import unicode_literals\n\nimport django.db.models.deletion\nfrom django.conf import settings\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ('kolibriauth', '0003_auto_20170621_0958'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='DevicePermissions',\n fields=[\n ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='devicepermissions', serialize=False, to=settings.AUTH_USER_MODEL)),\n ('is_superuser', models.BooleanField(default=False)),\n ('can_manage_content', models.BooleanField(default=False)),\n ],\n ),\n migrations.CreateModel(\n name='DeviceSettings',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('is_provisioned', models.BooleanField(default=False)),\n ('language_code', models.CharField(default='en', max_length=15)),\n ],\n ),\n ]\n", "path": "kolibri/core/device/migrations/0001_initial.py"}], "after_files": [{"content": "from django.db import transaction\nfrom django.utils.translation import check_for_language, ugettext_lazy as _\nfrom kolibri.auth.constants.facility_presets import choices, mappings\nfrom kolibri.auth.constants.role_kinds import ADMIN\nfrom kolibri.auth.models import Facility, FacilityUser\nfrom kolibri.auth.serializers import FacilitySerializer, FacilityUserSerializer\nfrom rest_framework import serializers\n\nfrom .models import DevicePermissions, DeviceSettings\n\n\nclass DevicePermissionsSerializer(serializers.ModelSerializer):\n\n class Meta:\n model = DevicePermissions\n fields = (\n 'user', 'is_superuser', 'can_manage_content',\n )\n\nclass NoFacilityFacilityUserSerializer(FacilityUserSerializer):\n\n class Meta:\n model = FacilityUser\n fields = ('id', 'username', 'full_name', 'password', )\n\n\nclass DeviceProvisionSerializer(serializers.Serializer):\n facility = FacilitySerializer()\n preset = serializers.ChoiceField(choices=choices)\n superuser = NoFacilityFacilityUserSerializer()\n language_id = serializers.CharField(max_length=15)\n\n class Meta:\n fields = ('facility', 'dataset', 'superuser', 'language_id')\n\n def validate_language_id(self, language_id):\n \"\"\"\n Check that the language_id is supported by Kolibri\n \"\"\"\n if not check_for_language(language_id):\n raise serializers.ValidationError(_(\"Language is not supported by Kolibri\"))\n return language_id\n\n def create(self, validated_data):\n \"\"\"\n Endpoint for initial setup of a device.\n Expects a value for:\n default language - the default language of this Kolibri device\n facility - the required fields for setting up a facility\n facilitydataset - facility configuration options\n superuser - the required fields for a facilityuser who will be set as the super user for this device\n \"\"\"\n with transaction.atomic():\n facility = Facility.objects.create(**validated_data.pop('facility'))\n preset = validated_data.pop('preset')\n dataset_data = mappings[preset]\n for key, value in dataset_data.items():\n setattr(facility.dataset, key, value)\n facility.dataset.save()\n superuser_data = validated_data.pop('superuser')\n superuser_data['facility'] = facility\n superuser = FacilityUserSerializer(data=superuser_data).create(superuser_data)\n facility.add_role(superuser, ADMIN)\n DevicePermissions.objects.create(user=superuser, is_superuser=True)\n language_id = validated_data.pop('language_id')\n device_settings, created = DeviceSettings.objects.get_or_create()\n device_settings.is_provisioned = True\n device_settings.language_id = language_id\n device_settings.save()\n return {\n \"facility\": facility,\n \"preset\": preset,\n \"superuser\": superuser,\n \"language_id\": language_id\n }\n", "path": "kolibri/core/device/serializers.py"}, {"content": "from django.conf import settings\nfrom django.db import models\nfrom kolibri.auth.models import FacilityUser\n\nfrom .permissions import UserCanManageDevicePermissions\n\n\nclass DevicePermissions(models.Model):\n \"\"\"\n This class stores metadata about device permissions for FacilityUsers.\n \"\"\"\n\n permissions = UserCanManageDevicePermissions()\n\n user = models.OneToOneField(FacilityUser, on_delete=models.CASCADE, related_name='devicepermissions', blank=False, null=False, primary_key=True)\n is_superuser = models.BooleanField(default=False)\n can_manage_content = models.BooleanField(default=False)\n\n\nclass DeviceSettings(models.Model):\n \"\"\"\n This class stores data about settings particular to this device\n \"\"\"\n\n is_provisioned = models.BooleanField(default=False)\n language_id = models.CharField(max_length=15, default=settings.LANGUAGE_CODE)\n\n def save(self, *args, **kwargs):\n self.pk = 1\n super(DeviceSettings, self).save(*args, **kwargs)\n", "path": "kolibri/core/device/models.py"}, {"content": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.7 on 2017-08-16 23:05\nfrom __future__ import unicode_literals\n\nimport django.db.models.deletion\nfrom django.conf import settings\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ('kolibriauth', '0003_auto_20170621_0958'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='DevicePermissions',\n fields=[\n ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='devicepermissions', serialize=False, to=settings.AUTH_USER_MODEL)),\n ('is_superuser', models.BooleanField(default=False)),\n ('can_manage_content', models.BooleanField(default=False)),\n ],\n ),\n migrations.CreateModel(\n name='DeviceSettings',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('is_provisioned', models.BooleanField(default=False)),\n ('language_id', models.CharField(default='en', max_length=15)),\n ],\n ),\n ]\n", "path": "kolibri/core/device/migrations/0001_initial.py"}]} | 1,679 | 718 |
gh_patches_debug_24573 | rasdani/github-patches | git_diff | TabbycatDebate__tabbycat-1258 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error message for BP voting ballots crashes with 500
Sentry Issue: [BACKEND-2BV](https://sentry.io/organizations/tabbycat/issues/1252961179/?referrer=github_integration)
```
AttributeError: 'tuple' object has no attribute 'replace'
(6 additional frame(s) were not displayed)
...
File "django/views/generic/base.py", line 97, in dispatch
return handler(request, *args, **kwargs)
File "django/views/generic/base.py", line 158, in get
context = self.get_context_data(**kwargs)
File "options/views.py", line 54, in get_context_data
"configuration to use consensus ballots."))
File "django/utils/translation/__init__.py", line 79, in gettext
return _trans.gettext(message)
File "django/utils/translation/trans_real.py", line 282, in gettext
eol_message = message.replace('\r\n', '\n').replace('\r', '\n')
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tabbycat/options/views.py`
Content:
```
1 import logging
2
3 from django.contrib import messages
4 from django.http import Http404
5 from django.utils.text import slugify
6 from django.utils.translation import gettext as _
7 from django.views.generic import TemplateView
8 from dynamic_preferences.views import PreferenceFormView
9
10 from actionlog.mixins import LogActionMixin
11 from actionlog.models import ActionLogEntry
12 from tournaments.mixins import TournamentMixin
13 from utils.mixins import AdministratorMixin
14 from utils.misc import reverse_tournament
15
16 from .presets import all_presets, get_preferences_data
17 from .forms import tournament_preference_form_builder
18 from .preferences import tournament_preferences_registry
19
20 logger = logging.getLogger(__name__)
21
22
23 class TournamentConfigIndexView(AdministratorMixin, TournamentMixin, TemplateView):
24 template_name = "preferences_index.html"
25
26 def get_preset_options(self):
27 """Returns a list of all preset classes."""
28 preset_options = []
29
30 for preset_class in all_presets():
31 preset_class.slugified_name = slugify(preset_class.__name__)
32 preset_options.append(preset_class)
33
34 preset_options.sort(key=lambda x: (x.show_in_list, x.name))
35 return preset_options
36
37 def get_context_data(self, **kwargs):
38 kwargs["presets"] = self.get_preset_options()
39 t = self.tournament
40 if t.pref('teams_in_debate') == 'bp':
41 if t.pref('ballots_per_debate_prelim') == 'per-adj' or \
42 t.pref('ballots_per_debate_elim') == 'per-adj':
43 error = _(("Your draw rules specify four teams per-debate but ",
44 "your ballot setting specifies that adjudicators ",
45 "submit independent ballots. These settings ",
46 "<strong>are not compatible and will cause results ",
47 "entry to crash</strong>. You need to go back to ",
48 "the Debate Rules settings and change your ",
49 "configuration to use consensus ballots."))
50 messages.error(self.request, error)
51
52 return super().get_context_data(**kwargs)
53
54
55 class TournamentPreferenceFormView(AdministratorMixin, LogActionMixin, TournamentMixin, PreferenceFormView):
56 registry = tournament_preferences_registry
57 section = None
58 template_name = "preferences_section_set.html"
59
60 action_log_type = ActionLogEntry.ACTION_TYPE_OPTIONS_EDIT
61
62 def form_valid(self, *args, **kwargs):
63 messages.success(self.request, _("Tournament options (%(section)s) saved.") % {'section': self.section.verbose_name})
64 return super().form_valid(*args, **kwargs)
65
66 def get_success_url(self):
67 return reverse_tournament('options-tournament-index', self.tournament)
68
69 def get_form_class(self, *args, **kwargs):
70 section = self.kwargs.get('section', None)
71 form_class = tournament_preference_form_builder(instance=self.tournament, section=section)
72 return form_class
73
74
75 class ConfirmTournamentPreferencesView(AdministratorMixin, TournamentMixin, TemplateView):
76 template_name = "preferences_presets_confirm.html"
77
78 def get_selected_preset(self):
79 preset_name = self.kwargs["preset_name"]
80 # Retrieve the class that matches the name
81 selected_presets = [x for x in all_presets() if slugify(x.__name__) == preset_name]
82 if len(selected_presets) == 0:
83 logger.warning("Could not find preset: %s", preset_name)
84 raise Http404("Preset {!r} no found.".format(preset_name))
85 elif len(selected_presets) > 1:
86 logger.warning("Found more than one preset for %s", preset_name)
87 return selected_presets[0]
88
89 def get_context_data(self, **kwargs):
90 selected_preset = self.get_selected_preset()
91 preset_preferences = get_preferences_data(selected_preset, self.tournament)
92 kwargs["preset_title"] = selected_preset.name
93 kwargs["preset_name"] = self.kwargs["preset_name"]
94 kwargs["changed_preferences"] = [p for p in preset_preferences if p['changed']]
95 kwargs["unchanged_preferences"] = [p for p in preset_preferences if not p['changed']]
96 return super().get_context_data(**kwargs)
97
98 def get_template_names(self):
99 if self.request.method == 'GET':
100 return ["preferences_presets_confirm.html"]
101 else:
102 return ["preferences_presets_complete.html"]
103
104 def save_presets(self):
105 selected_preset = self.get_selected_preset()
106 preset_preferences = get_preferences_data(selected_preset, self.tournament)
107
108 for pref in preset_preferences:
109 self.tournament.preferences[pref['key']] = pref['new_value']
110
111 ActionLogEntry.objects.log(type=ActionLogEntry.ACTION_TYPE_OPTIONS_EDIT,
112 user=self.request.user, tournament=self.tournament, content_object=self.tournament)
113 messages.success(self.request, _("Tournament options saved according to preset "
114 "%(name)s.") % {'name': selected_preset.name})
115
116 def post(self, request, *args, **kwargs):
117 context = self.get_context_data(**kwargs)
118 self.save_presets()
119 return self.render_to_response(context)
120
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/tabbycat/options/views.py b/tabbycat/options/views.py
--- a/tabbycat/options/views.py
+++ b/tabbycat/options/views.py
@@ -40,13 +40,13 @@
if t.pref('teams_in_debate') == 'bp':
if t.pref('ballots_per_debate_prelim') == 'per-adj' or \
t.pref('ballots_per_debate_elim') == 'per-adj':
- error = _(("Your draw rules specify four teams per-debate but ",
- "your ballot setting specifies that adjudicators ",
- "submit independent ballots. These settings ",
- "<strong>are not compatible and will cause results ",
- "entry to crash</strong>. You need to go back to ",
- "the Debate Rules settings and change your ",
- "configuration to use consensus ballots."))
+ error = _("Your draw rules specify four teams per-debate but "
+ "your ballot setting specifies that adjudicators "
+ "submit independent ballots. These settings "
+ "<strong>are not compatible and will cause results "
+ "entry to crash</strong>. You need to go back to "
+ "the Debate Rules settings and change your "
+ "configuration to use consensus ballots.")
messages.error(self.request, error)
return super().get_context_data(**kwargs)
| {"golden_diff": "diff --git a/tabbycat/options/views.py b/tabbycat/options/views.py\n--- a/tabbycat/options/views.py\n+++ b/tabbycat/options/views.py\n@@ -40,13 +40,13 @@\n if t.pref('teams_in_debate') == 'bp':\n if t.pref('ballots_per_debate_prelim') == 'per-adj' or \\\n t.pref('ballots_per_debate_elim') == 'per-adj':\n- error = _((\"Your draw rules specify four teams per-debate but \",\n- \"your ballot setting specifies that adjudicators \",\n- \"submit independent ballots. These settings \",\n- \"<strong>are not compatible and will cause results \",\n- \"entry to crash</strong>. You need to go back to \",\n- \"the Debate Rules settings and change your \",\n- \"configuration to use consensus ballots.\"))\n+ error = _(\"Your draw rules specify four teams per-debate but \"\n+ \"your ballot setting specifies that adjudicators \"\n+ \"submit independent ballots. These settings \"\n+ \"<strong>are not compatible and will cause results \"\n+ \"entry to crash</strong>. You need to go back to \"\n+ \"the Debate Rules settings and change your \"\n+ \"configuration to use consensus ballots.\")\n messages.error(self.request, error)\n \n return super().get_context_data(**kwargs)\n", "issue": "Error message for BP voting ballots crashes with 500\nSentry Issue: [BACKEND-2BV](https://sentry.io/organizations/tabbycat/issues/1252961179/?referrer=github_integration)\n\n```\nAttributeError: 'tuple' object has no attribute 'replace'\n(6 additional frame(s) were not displayed)\n...\n File \"django/views/generic/base.py\", line 97, in dispatch\n return handler(request, *args, **kwargs)\n File \"django/views/generic/base.py\", line 158, in get\n context = self.get_context_data(**kwargs)\n File \"options/views.py\", line 54, in get_context_data\n \"configuration to use consensus ballots.\"))\n File \"django/utils/translation/__init__.py\", line 79, in gettext\n return _trans.gettext(message)\n File \"django/utils/translation/trans_real.py\", line 282, in gettext\n eol_message = message.replace('\\r\\n', '\\n').replace('\\r', '\\n')\n```\n", "before_files": [{"content": "import logging\n\nfrom django.contrib import messages\nfrom django.http import Http404\nfrom django.utils.text import slugify\nfrom django.utils.translation import gettext as _\nfrom django.views.generic import TemplateView\nfrom dynamic_preferences.views import PreferenceFormView\n\nfrom actionlog.mixins import LogActionMixin\nfrom actionlog.models import ActionLogEntry\nfrom tournaments.mixins import TournamentMixin\nfrom utils.mixins import AdministratorMixin\nfrom utils.misc import reverse_tournament\n\nfrom .presets import all_presets, get_preferences_data\nfrom .forms import tournament_preference_form_builder\nfrom .preferences import tournament_preferences_registry\n\nlogger = logging.getLogger(__name__)\n\n\nclass TournamentConfigIndexView(AdministratorMixin, TournamentMixin, TemplateView):\n template_name = \"preferences_index.html\"\n\n def get_preset_options(self):\n \"\"\"Returns a list of all preset classes.\"\"\"\n preset_options = []\n\n for preset_class in all_presets():\n preset_class.slugified_name = slugify(preset_class.__name__)\n preset_options.append(preset_class)\n\n preset_options.sort(key=lambda x: (x.show_in_list, x.name))\n return preset_options\n\n def get_context_data(self, **kwargs):\n kwargs[\"presets\"] = self.get_preset_options()\n t = self.tournament\n if t.pref('teams_in_debate') == 'bp':\n if t.pref('ballots_per_debate_prelim') == 'per-adj' or \\\n t.pref('ballots_per_debate_elim') == 'per-adj':\n error = _((\"Your draw rules specify four teams per-debate but \",\n \"your ballot setting specifies that adjudicators \",\n \"submit independent ballots. These settings \",\n \"<strong>are not compatible and will cause results \",\n \"entry to crash</strong>. You need to go back to \",\n \"the Debate Rules settings and change your \",\n \"configuration to use consensus ballots.\"))\n messages.error(self.request, error)\n\n return super().get_context_data(**kwargs)\n\n\nclass TournamentPreferenceFormView(AdministratorMixin, LogActionMixin, TournamentMixin, PreferenceFormView):\n registry = tournament_preferences_registry\n section = None\n template_name = \"preferences_section_set.html\"\n\n action_log_type = ActionLogEntry.ACTION_TYPE_OPTIONS_EDIT\n\n def form_valid(self, *args, **kwargs):\n messages.success(self.request, _(\"Tournament options (%(section)s) saved.\") % {'section': self.section.verbose_name})\n return super().form_valid(*args, **kwargs)\n\n def get_success_url(self):\n return reverse_tournament('options-tournament-index', self.tournament)\n\n def get_form_class(self, *args, **kwargs):\n section = self.kwargs.get('section', None)\n form_class = tournament_preference_form_builder(instance=self.tournament, section=section)\n return form_class\n\n\nclass ConfirmTournamentPreferencesView(AdministratorMixin, TournamentMixin, TemplateView):\n template_name = \"preferences_presets_confirm.html\"\n\n def get_selected_preset(self):\n preset_name = self.kwargs[\"preset_name\"]\n # Retrieve the class that matches the name\n selected_presets = [x for x in all_presets() if slugify(x.__name__) == preset_name]\n if len(selected_presets) == 0:\n logger.warning(\"Could not find preset: %s\", preset_name)\n raise Http404(\"Preset {!r} no found.\".format(preset_name))\n elif len(selected_presets) > 1:\n logger.warning(\"Found more than one preset for %s\", preset_name)\n return selected_presets[0]\n\n def get_context_data(self, **kwargs):\n selected_preset = self.get_selected_preset()\n preset_preferences = get_preferences_data(selected_preset, self.tournament)\n kwargs[\"preset_title\"] = selected_preset.name\n kwargs[\"preset_name\"] = self.kwargs[\"preset_name\"]\n kwargs[\"changed_preferences\"] = [p for p in preset_preferences if p['changed']]\n kwargs[\"unchanged_preferences\"] = [p for p in preset_preferences if not p['changed']]\n return super().get_context_data(**kwargs)\n\n def get_template_names(self):\n if self.request.method == 'GET':\n return [\"preferences_presets_confirm.html\"]\n else:\n return [\"preferences_presets_complete.html\"]\n\n def save_presets(self):\n selected_preset = self.get_selected_preset()\n preset_preferences = get_preferences_data(selected_preset, self.tournament)\n\n for pref in preset_preferences:\n self.tournament.preferences[pref['key']] = pref['new_value']\n\n ActionLogEntry.objects.log(type=ActionLogEntry.ACTION_TYPE_OPTIONS_EDIT,\n user=self.request.user, tournament=self.tournament, content_object=self.tournament)\n messages.success(self.request, _(\"Tournament options saved according to preset \"\n \"%(name)s.\") % {'name': selected_preset.name})\n\n def post(self, request, *args, **kwargs):\n context = self.get_context_data(**kwargs)\n self.save_presets()\n return self.render_to_response(context)\n", "path": "tabbycat/options/views.py"}], "after_files": [{"content": "import logging\n\nfrom django.contrib import messages\nfrom django.http import Http404\nfrom django.utils.text import slugify\nfrom django.utils.translation import gettext as _\nfrom django.views.generic import TemplateView\nfrom dynamic_preferences.views import PreferenceFormView\n\nfrom actionlog.mixins import LogActionMixin\nfrom actionlog.models import ActionLogEntry\nfrom tournaments.mixins import TournamentMixin\nfrom utils.mixins import AdministratorMixin\nfrom utils.misc import reverse_tournament\n\nfrom .presets import all_presets, get_preferences_data\nfrom .forms import tournament_preference_form_builder\nfrom .preferences import tournament_preferences_registry\n\nlogger = logging.getLogger(__name__)\n\n\nclass TournamentConfigIndexView(AdministratorMixin, TournamentMixin, TemplateView):\n template_name = \"preferences_index.html\"\n\n def get_preset_options(self):\n \"\"\"Returns a list of all preset classes.\"\"\"\n preset_options = []\n\n for preset_class in all_presets():\n preset_class.slugified_name = slugify(preset_class.__name__)\n preset_options.append(preset_class)\n\n preset_options.sort(key=lambda x: (x.show_in_list, x.name))\n return preset_options\n\n def get_context_data(self, **kwargs):\n kwargs[\"presets\"] = self.get_preset_options()\n t = self.tournament\n if t.pref('teams_in_debate') == 'bp':\n if t.pref('ballots_per_debate_prelim') == 'per-adj' or \\\n t.pref('ballots_per_debate_elim') == 'per-adj':\n error = _(\"Your draw rules specify four teams per-debate but \"\n \"your ballot setting specifies that adjudicators \"\n \"submit independent ballots. These settings \"\n \"<strong>are not compatible and will cause results \"\n \"entry to crash</strong>. You need to go back to \"\n \"the Debate Rules settings and change your \"\n \"configuration to use consensus ballots.\")\n messages.error(self.request, error)\n\n return super().get_context_data(**kwargs)\n\n\nclass TournamentPreferenceFormView(AdministratorMixin, LogActionMixin, TournamentMixin, PreferenceFormView):\n registry = tournament_preferences_registry\n section = None\n template_name = \"preferences_section_set.html\"\n\n action_log_type = ActionLogEntry.ACTION_TYPE_OPTIONS_EDIT\n\n def form_valid(self, *args, **kwargs):\n messages.success(self.request, _(\"Tournament options (%(section)s) saved.\") % {'section': self.section.verbose_name})\n return super().form_valid(*args, **kwargs)\n\n def get_success_url(self):\n return reverse_tournament('options-tournament-index', self.tournament)\n\n def get_form_class(self, *args, **kwargs):\n section = self.kwargs.get('section', None)\n form_class = tournament_preference_form_builder(instance=self.tournament, section=section)\n return form_class\n\n\nclass ConfirmTournamentPreferencesView(AdministratorMixin, TournamentMixin, TemplateView):\n template_name = \"preferences_presets_confirm.html\"\n\n def get_selected_preset(self):\n preset_name = self.kwargs[\"preset_name\"]\n # Retrieve the class that matches the name\n selected_presets = [x for x in all_presets() if slugify(x.__name__) == preset_name]\n if len(selected_presets) == 0:\n logger.warning(\"Could not find preset: %s\", preset_name)\n raise Http404(\"Preset {!r} no found.\".format(preset_name))\n elif len(selected_presets) > 1:\n logger.warning(\"Found more than one preset for %s\", preset_name)\n return selected_presets[0]\n\n def get_context_data(self, **kwargs):\n selected_preset = self.get_selected_preset()\n preset_preferences = get_preferences_data(selected_preset, self.tournament)\n kwargs[\"preset_title\"] = selected_preset.name\n kwargs[\"preset_name\"] = self.kwargs[\"preset_name\"]\n kwargs[\"changed_preferences\"] = [p for p in preset_preferences if p['changed']]\n kwargs[\"unchanged_preferences\"] = [p for p in preset_preferences if not p['changed']]\n return super().get_context_data(**kwargs)\n\n def get_template_names(self):\n if self.request.method == 'GET':\n return [\"preferences_presets_confirm.html\"]\n else:\n return [\"preferences_presets_complete.html\"]\n\n def save_presets(self):\n selected_preset = self.get_selected_preset()\n preset_preferences = get_preferences_data(selected_preset, self.tournament)\n\n for pref in preset_preferences:\n self.tournament.preferences[pref['key']] = pref['new_value']\n\n ActionLogEntry.objects.log(type=ActionLogEntry.ACTION_TYPE_OPTIONS_EDIT,\n user=self.request.user, tournament=self.tournament, content_object=self.tournament)\n messages.success(self.request, _(\"Tournament options saved according to preset \"\n \"%(name)s.\") % {'name': selected_preset.name})\n\n def post(self, request, *args, **kwargs):\n context = self.get_context_data(**kwargs)\n self.save_presets()\n return self.render_to_response(context)\n", "path": "tabbycat/options/views.py"}]} | 1,823 | 296 |
gh_patches_debug_21936 | rasdani/github-patches | git_diff | beeware__toga-1373 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Use Alpha Version of Pythonnet
**Description**
Pythonnet has released a few days ago an [alpha version](https://pypi.org/project/pythonnet/3.0.0a1/) of Pythonnet 3.0.
ATM we use a hashed version (8d93c39d) of Pythonnet instead of an official release.
In the case that we don't want to wait until an official version of Pythonnet is released (which we don't have any approximation when this would happen), I think we should at least use the alpha version.
**Describe alternatives you've considered**
An alternative is to keep the hashed version as it is :)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/winforms/setup.py`
Content:
```
1 #!/usr/bin/env python
2 import re
3
4 from setuptools import setup
5
6 # Version handline needs to be programatic because
7 # we can't import toga_winforms to compute the version;
8 # and to support versioned subpackage dependencies
9 with open('toga_winforms/__init__.py', encoding='utf8') as version_file:
10 version_match = re.search(
11 r"^__version__ = ['\"]([^'\"]*)['\"]",
12 version_file.read(),
13 re.M
14 )
15 if version_match:
16 version = version_match.group(1)
17 else:
18 raise RuntimeError("Unable to find version string.")
19
20 setup(
21 version=version,
22 install_requires=[
23 # The Python.net team hasn't published 2.X wheels for Python 3.9 or 3.10,
24 # and their development effort seems to be focussed on the 3.X branch;
25 # they've indicated they're not planning to make the 2.X branch compatible
26 # with Python 3.10. If we want to be able to support "current" Python,
27 # we need to work off a source release until they formally release 3.0.
28 #
29 # The 8d93c39d hash is, as best as I can work out, what was in the
30 # 3.0.0-preview2021-10-05 release published to nuget - but they didn't
31 # tag anything for that release. That release contained a bug
32 # (https://github.com/pythonnet/pythonnet/issues/1613) that didn't play well
33 # with pip 21.3, so we use 94b1a71c which was released about a month later.
34 'pythonnet @ git+https://github.com/pythonnet/pythonnet@94b1a71c#egg=pythonnet',
35 'toga-core==%s' % version,
36 ],
37 test_suite='tests',
38 test_require=[
39 'toga-dummy==%s' % version,
40 ]
41 )
42
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/winforms/setup.py b/src/winforms/setup.py
--- a/src/winforms/setup.py
+++ b/src/winforms/setup.py
@@ -24,14 +24,11 @@
# and their development effort seems to be focussed on the 3.X branch;
# they've indicated they're not planning to make the 2.X branch compatible
# with Python 3.10. If we want to be able to support "current" Python,
- # we need to work off a source release until they formally release 3.0.
+ # we need to use the 3.0 branch.
#
- # The 8d93c39d hash is, as best as I can work out, what was in the
- # 3.0.0-preview2021-10-05 release published to nuget - but they didn't
- # tag anything for that release. That release contained a bug
- # (https://github.com/pythonnet/pythonnet/issues/1613) that didn't play well
- # with pip 21.3, so we use 94b1a71c which was released about a month later.
- 'pythonnet @ git+https://github.com/pythonnet/pythonnet@94b1a71c#egg=pythonnet',
+ # At time of writing, the most recent (and only) version of Python.net 3.0
+ # that has been released is the alpha version 3.0.0a1.
+ 'pythonnet>=3.0.0a1',
'toga-core==%s' % version,
],
test_suite='tests',
| {"golden_diff": "diff --git a/src/winforms/setup.py b/src/winforms/setup.py\n--- a/src/winforms/setup.py\n+++ b/src/winforms/setup.py\n@@ -24,14 +24,11 @@\n # and their development effort seems to be focussed on the 3.X branch;\n # they've indicated they're not planning to make the 2.X branch compatible\n # with Python 3.10. If we want to be able to support \"current\" Python,\n- # we need to work off a source release until they formally release 3.0.\n+ # we need to use the 3.0 branch.\n #\n- # The 8d93c39d hash is, as best as I can work out, what was in the\n- # 3.0.0-preview2021-10-05 release published to nuget - but they didn't\n- # tag anything for that release. That release contained a bug\n- # (https://github.com/pythonnet/pythonnet/issues/1613) that didn't play well\n- # with pip 21.3, so we use 94b1a71c which was released about a month later.\n- 'pythonnet @ git+https://github.com/pythonnet/pythonnet@94b1a71c#egg=pythonnet',\n+ # At time of writing, the most recent (and only) version of Python.net 3.0\n+ # that has been released is the alpha version 3.0.0a1.\n+ 'pythonnet>=3.0.0a1',\n 'toga-core==%s' % version,\n ],\n test_suite='tests',\n", "issue": "Use Alpha Version of Pythonnet\n**Description**\r\nPythonnet has released a few days ago an [alpha version](https://pypi.org/project/pythonnet/3.0.0a1/) of Pythonnet 3.0.\r\nATM we use a hashed version (8d93c39d) of Pythonnet instead of an official release.\r\n\r\nIn the case that we don't want to wait until an official version of Pythonnet is released (which we don't have any approximation when this would happen), I think we should at least use the alpha version.\r\n\r\n**Describe alternatives you've considered**\r\nAn alternative is to keep the hashed version as it is :)\n", "before_files": [{"content": "#!/usr/bin/env python\nimport re\n\nfrom setuptools import setup\n\n# Version handline needs to be programatic because\n# we can't import toga_winforms to compute the version;\n# and to support versioned subpackage dependencies\nwith open('toga_winforms/__init__.py', encoding='utf8') as version_file:\n version_match = re.search(\n r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\",\n version_file.read(),\n re.M\n )\n if version_match:\n version = version_match.group(1)\n else:\n raise RuntimeError(\"Unable to find version string.\")\n\nsetup(\n version=version,\n install_requires=[\n # The Python.net team hasn't published 2.X wheels for Python 3.9 or 3.10,\n # and their development effort seems to be focussed on the 3.X branch;\n # they've indicated they're not planning to make the 2.X branch compatible\n # with Python 3.10. If we want to be able to support \"current\" Python,\n # we need to work off a source release until they formally release 3.0.\n #\n # The 8d93c39d hash is, as best as I can work out, what was in the\n # 3.0.0-preview2021-10-05 release published to nuget - but they didn't\n # tag anything for that release. That release contained a bug\n # (https://github.com/pythonnet/pythonnet/issues/1613) that didn't play well\n # with pip 21.3, so we use 94b1a71c which was released about a month later.\n 'pythonnet @ git+https://github.com/pythonnet/pythonnet@94b1a71c#egg=pythonnet',\n 'toga-core==%s' % version,\n ],\n test_suite='tests',\n test_require=[\n 'toga-dummy==%s' % version,\n ]\n)\n", "path": "src/winforms/setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\nimport re\n\nfrom setuptools import setup\n\n# Version handline needs to be programatic because\n# we can't import toga_winforms to compute the version;\n# and to support versioned subpackage dependencies\nwith open('toga_winforms/__init__.py', encoding='utf8') as version_file:\n version_match = re.search(\n r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\",\n version_file.read(),\n re.M\n )\n if version_match:\n version = version_match.group(1)\n else:\n raise RuntimeError(\"Unable to find version string.\")\n\nsetup(\n version=version,\n install_requires=[\n # The Python.net team hasn't published 2.X wheels for Python 3.9 or 3.10,\n # and their development effort seems to be focussed on the 3.X branch;\n # they've indicated they're not planning to make the 2.X branch compatible\n # with Python 3.10. If we want to be able to support \"current\" Python,\n # we need to use the 3.0 branch.\n #\n # At time of writing, the most recent (and only) version of Python.net 3.0\n # that has been released is the alpha version 3.0.0a1.\n 'pythonnet>=3.0.0a1',\n 'toga-core==%s' % version,\n ],\n test_suite='tests',\n test_require=[\n 'toga-dummy==%s' % version,\n ]\n)\n", "path": "src/winforms/setup.py"}]} | 918 | 381 |
gh_patches_debug_26975 | rasdani/github-patches | git_diff | inventree__InvenTree-4151 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[FR] :checkered_flag: Simple API endpoint to change user metadata
### Please verify that this feature request has NOT been suggested before.
- [X] I checked and didn't find similar feature request
### Problem statement
we currently provide an API endpoint for editing user data via a form. Fetching and changing endpoints are different and take different parameters.
### Suggested solution
I think it would be better to provide a RetrieveUpdateAPI endpoint under `/api/user/me` (somewhat a convention). This endpoint could provide and patch all the metadata for a user.
### Describe alternatives you've considered
N/A
### Examples of other systems
_No response_
### Do you want to develop this?
- [X] I want to develop this.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `InvenTree/users/api.py`
Content:
```
1 """DRF API definition for the 'users' app"""
2
3 from django.contrib.auth.models import User
4 from django.core.exceptions import ObjectDoesNotExist
5 from django.urls import include, path, re_path
6
7 from django_filters.rest_framework import DjangoFilterBackend
8 from rest_framework import filters, permissions, status
9 from rest_framework.authtoken.models import Token
10 from rest_framework.response import Response
11 from rest_framework.views import APIView
12
13 from InvenTree.mixins import ListAPI, RetrieveAPI
14 from InvenTree.serializers import UserSerializer
15 from users.models import Owner, RuleSet, check_user_role
16 from users.serializers import OwnerSerializer
17
18
19 class OwnerList(ListAPI):
20 """List API endpoint for Owner model.
21
22 Cannot create.
23 """
24
25 queryset = Owner.objects.all()
26 serializer_class = OwnerSerializer
27
28 def filter_queryset(self, queryset):
29 """Implement text search for the "owner" model.
30
31 Note that an "owner" can be either a group, or a user,
32 so we cannot do a direct text search.
33
34 A "hack" here is to post-process the queryset and simply
35 remove any values which do not match.
36
37 It is not necessarily "efficient" to do it this way,
38 but until we determine a better way, this is what we have...
39 """
40 search_term = str(self.request.query_params.get('search', '')).lower()
41
42 queryset = super().filter_queryset(queryset)
43
44 if not search_term:
45 return queryset
46
47 results = []
48
49 # Extract search term f
50
51 for result in queryset.all():
52 if search_term in result.name().lower():
53 results.append(result)
54
55 return results
56
57
58 class OwnerDetail(RetrieveAPI):
59 """Detail API endpoint for Owner model.
60
61 Cannot edit or delete
62 """
63
64 queryset = Owner.objects.all()
65 serializer_class = OwnerSerializer
66
67
68 class RoleDetails(APIView):
69 """API endpoint which lists the available role permissions for the current user.
70
71 (Requires authentication)
72 """
73
74 permission_classes = [
75 permissions.IsAuthenticated
76 ]
77
78 def get(self, request, *args, **kwargs):
79 """Return the list of roles / permissions available to the current user"""
80 user = request.user
81
82 roles = {}
83
84 for ruleset in RuleSet.RULESET_CHOICES:
85
86 role, text = ruleset
87
88 permissions = []
89
90 for permission in RuleSet.RULESET_PERMISSIONS:
91 if check_user_role(user, role, permission):
92
93 permissions.append(permission)
94
95 if len(permissions) > 0:
96 roles[role] = permissions
97 else:
98 roles[role] = None # pragma: no cover
99
100 data = {
101 'user': user.pk,
102 'username': user.username,
103 'roles': roles,
104 'is_staff': user.is_staff,
105 'is_superuser': user.is_superuser,
106 }
107
108 return Response(data)
109
110
111 class UserDetail(RetrieveAPI):
112 """Detail endpoint for a single user."""
113
114 queryset = User.objects.all()
115 serializer_class = UserSerializer
116 permission_classes = (permissions.IsAuthenticated,)
117
118
119 class UserList(ListAPI):
120 """List endpoint for detail on all users."""
121
122 queryset = User.objects.all()
123 serializer_class = UserSerializer
124 permission_classes = (permissions.IsAuthenticated,)
125
126 filter_backends = [
127 DjangoFilterBackend,
128 filters.SearchFilter,
129 ]
130
131 search_fields = [
132 'first_name',
133 'last_name',
134 'username',
135 ]
136
137
138 class GetAuthToken(APIView):
139 """Return authentication token for an authenticated user."""
140
141 permission_classes = [
142 permissions.IsAuthenticated,
143 ]
144
145 def get(self, request, *args, **kwargs):
146 """Return an API token if the user is authenticated
147
148 - If the user already has a token, return it
149 - Otherwise, create a new token
150 """
151 if request.user.is_authenticated:
152 # Get the user token (or create one if it does not exist)
153 token, created = Token.objects.get_or_create(user=request.user)
154 return Response({
155 'token': token.key,
156 })
157
158 def delete(self, request):
159 """User has requested deletion of API token"""
160 try:
161 request.user.auth_token.delete()
162 return Response({"success": "Successfully logged out."},
163 status=status.HTTP_202_ACCEPTED)
164 except (AttributeError, ObjectDoesNotExist):
165 return Response({"error": "Bad request"},
166 status=status.HTTP_400_BAD_REQUEST)
167
168
169 user_urls = [
170
171 re_path(r'roles/?$', RoleDetails.as_view(), name='api-user-roles'),
172 re_path(r'token/?$', GetAuthToken.as_view(), name='api-token'),
173
174 re_path(r'^owner/', include([
175 path('<int:pk>/', OwnerDetail.as_view(), name='api-owner-detail'),
176 re_path(r'^.*$', OwnerList.as_view(), name='api-owner-list'),
177 ])),
178
179 re_path(r'^(?P<pk>[0-9]+)/?$', UserDetail.as_view(), name='user-detail'),
180 path('', UserList.as_view()),
181 ]
182
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/InvenTree/users/api.py b/InvenTree/users/api.py
--- a/InvenTree/users/api.py
+++ b/InvenTree/users/api.py
@@ -10,7 +10,7 @@
from rest_framework.response import Response
from rest_framework.views import APIView
-from InvenTree.mixins import ListAPI, RetrieveAPI
+from InvenTree.mixins import ListAPI, RetrieveAPI, RetrieveUpdateAPI
from InvenTree.serializers import UserSerializer
from users.models import Owner, RuleSet, check_user_role
from users.serializers import OwnerSerializer
@@ -116,6 +116,14 @@
permission_classes = (permissions.IsAuthenticated,)
+class MeUserDetail(RetrieveUpdateAPI, UserDetail):
+ """Detail endpoint for current user."""
+
+ def get_object(self):
+ """Always return the current user object"""
+ return self.request.user
+
+
class UserList(ListAPI):
"""List endpoint for detail on all users."""
@@ -170,6 +178,7 @@
re_path(r'roles/?$', RoleDetails.as_view(), name='api-user-roles'),
re_path(r'token/?$', GetAuthToken.as_view(), name='api-token'),
+ re_path(r'^me/', MeUserDetail.as_view(), name='api-user-me'),
re_path(r'^owner/', include([
path('<int:pk>/', OwnerDetail.as_view(), name='api-owner-detail'),
| {"golden_diff": "diff --git a/InvenTree/users/api.py b/InvenTree/users/api.py\n--- a/InvenTree/users/api.py\n+++ b/InvenTree/users/api.py\n@@ -10,7 +10,7 @@\n from rest_framework.response import Response\n from rest_framework.views import APIView\n \n-from InvenTree.mixins import ListAPI, RetrieveAPI\n+from InvenTree.mixins import ListAPI, RetrieveAPI, RetrieveUpdateAPI\n from InvenTree.serializers import UserSerializer\n from users.models import Owner, RuleSet, check_user_role\n from users.serializers import OwnerSerializer\n@@ -116,6 +116,14 @@\n permission_classes = (permissions.IsAuthenticated,)\n \n \n+class MeUserDetail(RetrieveUpdateAPI, UserDetail):\n+ \"\"\"Detail endpoint for current user.\"\"\"\n+\n+ def get_object(self):\n+ \"\"\"Always return the current user object\"\"\"\n+ return self.request.user\n+\n+\n class UserList(ListAPI):\n \"\"\"List endpoint for detail on all users.\"\"\"\n \n@@ -170,6 +178,7 @@\n \n re_path(r'roles/?$', RoleDetails.as_view(), name='api-user-roles'),\n re_path(r'token/?$', GetAuthToken.as_view(), name='api-token'),\n+ re_path(r'^me/', MeUserDetail.as_view(), name='api-user-me'),\n \n re_path(r'^owner/', include([\n path('<int:pk>/', OwnerDetail.as_view(), name='api-owner-detail'),\n", "issue": "[FR] :checkered_flag: Simple API endpoint to change user metadata\n### Please verify that this feature request has NOT been suggested before.\n\n- [X] I checked and didn't find similar feature request\n\n### Problem statement\n\nwe currently provide an API endpoint for editing user data via a form. Fetching and changing endpoints are different and take different parameters.\n\n### Suggested solution\n\nI think it would be better to provide a RetrieveUpdateAPI endpoint under `/api/user/me` (somewhat a convention). This endpoint could provide and patch all the metadata for a user.\n\n### Describe alternatives you've considered\n\nN/A\n\n### Examples of other systems\n\n_No response_\n\n### Do you want to develop this?\n\n- [X] I want to develop this.\n", "before_files": [{"content": "\"\"\"DRF API definition for the 'users' app\"\"\"\n\nfrom django.contrib.auth.models import User\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.urls import include, path, re_path\n\nfrom django_filters.rest_framework import DjangoFilterBackend\nfrom rest_framework import filters, permissions, status\nfrom rest_framework.authtoken.models import Token\nfrom rest_framework.response import Response\nfrom rest_framework.views import APIView\n\nfrom InvenTree.mixins import ListAPI, RetrieveAPI\nfrom InvenTree.serializers import UserSerializer\nfrom users.models import Owner, RuleSet, check_user_role\nfrom users.serializers import OwnerSerializer\n\n\nclass OwnerList(ListAPI):\n \"\"\"List API endpoint for Owner model.\n\n Cannot create.\n \"\"\"\n\n queryset = Owner.objects.all()\n serializer_class = OwnerSerializer\n\n def filter_queryset(self, queryset):\n \"\"\"Implement text search for the \"owner\" model.\n\n Note that an \"owner\" can be either a group, or a user,\n so we cannot do a direct text search.\n\n A \"hack\" here is to post-process the queryset and simply\n remove any values which do not match.\n\n It is not necessarily \"efficient\" to do it this way,\n but until we determine a better way, this is what we have...\n \"\"\"\n search_term = str(self.request.query_params.get('search', '')).lower()\n\n queryset = super().filter_queryset(queryset)\n\n if not search_term:\n return queryset\n\n results = []\n\n # Extract search term f\n\n for result in queryset.all():\n if search_term in result.name().lower():\n results.append(result)\n\n return results\n\n\nclass OwnerDetail(RetrieveAPI):\n \"\"\"Detail API endpoint for Owner model.\n\n Cannot edit or delete\n \"\"\"\n\n queryset = Owner.objects.all()\n serializer_class = OwnerSerializer\n\n\nclass RoleDetails(APIView):\n \"\"\"API endpoint which lists the available role permissions for the current user.\n\n (Requires authentication)\n \"\"\"\n\n permission_classes = [\n permissions.IsAuthenticated\n ]\n\n def get(self, request, *args, **kwargs):\n \"\"\"Return the list of roles / permissions available to the current user\"\"\"\n user = request.user\n\n roles = {}\n\n for ruleset in RuleSet.RULESET_CHOICES:\n\n role, text = ruleset\n\n permissions = []\n\n for permission in RuleSet.RULESET_PERMISSIONS:\n if check_user_role(user, role, permission):\n\n permissions.append(permission)\n\n if len(permissions) > 0:\n roles[role] = permissions\n else:\n roles[role] = None # pragma: no cover\n\n data = {\n 'user': user.pk,\n 'username': user.username,\n 'roles': roles,\n 'is_staff': user.is_staff,\n 'is_superuser': user.is_superuser,\n }\n\n return Response(data)\n\n\nclass UserDetail(RetrieveAPI):\n \"\"\"Detail endpoint for a single user.\"\"\"\n\n queryset = User.objects.all()\n serializer_class = UserSerializer\n permission_classes = (permissions.IsAuthenticated,)\n\n\nclass UserList(ListAPI):\n \"\"\"List endpoint for detail on all users.\"\"\"\n\n queryset = User.objects.all()\n serializer_class = UserSerializer\n permission_classes = (permissions.IsAuthenticated,)\n\n filter_backends = [\n DjangoFilterBackend,\n filters.SearchFilter,\n ]\n\n search_fields = [\n 'first_name',\n 'last_name',\n 'username',\n ]\n\n\nclass GetAuthToken(APIView):\n \"\"\"Return authentication token for an authenticated user.\"\"\"\n\n permission_classes = [\n permissions.IsAuthenticated,\n ]\n\n def get(self, request, *args, **kwargs):\n \"\"\"Return an API token if the user is authenticated\n\n - If the user already has a token, return it\n - Otherwise, create a new token\n \"\"\"\n if request.user.is_authenticated:\n # Get the user token (or create one if it does not exist)\n token, created = Token.objects.get_or_create(user=request.user)\n return Response({\n 'token': token.key,\n })\n\n def delete(self, request):\n \"\"\"User has requested deletion of API token\"\"\"\n try:\n request.user.auth_token.delete()\n return Response({\"success\": \"Successfully logged out.\"},\n status=status.HTTP_202_ACCEPTED)\n except (AttributeError, ObjectDoesNotExist):\n return Response({\"error\": \"Bad request\"},\n status=status.HTTP_400_BAD_REQUEST)\n\n\nuser_urls = [\n\n re_path(r'roles/?$', RoleDetails.as_view(), name='api-user-roles'),\n re_path(r'token/?$', GetAuthToken.as_view(), name='api-token'),\n\n re_path(r'^owner/', include([\n path('<int:pk>/', OwnerDetail.as_view(), name='api-owner-detail'),\n re_path(r'^.*$', OwnerList.as_view(), name='api-owner-list'),\n ])),\n\n re_path(r'^(?P<pk>[0-9]+)/?$', UserDetail.as_view(), name='user-detail'),\n path('', UserList.as_view()),\n]\n", "path": "InvenTree/users/api.py"}], "after_files": [{"content": "\"\"\"DRF API definition for the 'users' app\"\"\"\n\nfrom django.contrib.auth.models import User\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.urls import include, path, re_path\n\nfrom django_filters.rest_framework import DjangoFilterBackend\nfrom rest_framework import filters, permissions, status\nfrom rest_framework.authtoken.models import Token\nfrom rest_framework.response import Response\nfrom rest_framework.views import APIView\n\nfrom InvenTree.mixins import ListAPI, RetrieveAPI, RetrieveUpdateAPI\nfrom InvenTree.serializers import UserSerializer\nfrom users.models import Owner, RuleSet, check_user_role\nfrom users.serializers import OwnerSerializer\n\n\nclass OwnerList(ListAPI):\n \"\"\"List API endpoint for Owner model.\n\n Cannot create.\n \"\"\"\n\n queryset = Owner.objects.all()\n serializer_class = OwnerSerializer\n\n def filter_queryset(self, queryset):\n \"\"\"Implement text search for the \"owner\" model.\n\n Note that an \"owner\" can be either a group, or a user,\n so we cannot do a direct text search.\n\n A \"hack\" here is to post-process the queryset and simply\n remove any values which do not match.\n\n It is not necessarily \"efficient\" to do it this way,\n but until we determine a better way, this is what we have...\n \"\"\"\n search_term = str(self.request.query_params.get('search', '')).lower()\n\n queryset = super().filter_queryset(queryset)\n\n if not search_term:\n return queryset\n\n results = []\n\n # Extract search term f\n\n for result in queryset.all():\n if search_term in result.name().lower():\n results.append(result)\n\n return results\n\n\nclass OwnerDetail(RetrieveAPI):\n \"\"\"Detail API endpoint for Owner model.\n\n Cannot edit or delete\n \"\"\"\n\n queryset = Owner.objects.all()\n serializer_class = OwnerSerializer\n\n\nclass RoleDetails(APIView):\n \"\"\"API endpoint which lists the available role permissions for the current user.\n\n (Requires authentication)\n \"\"\"\n\n permission_classes = [\n permissions.IsAuthenticated\n ]\n\n def get(self, request, *args, **kwargs):\n \"\"\"Return the list of roles / permissions available to the current user\"\"\"\n user = request.user\n\n roles = {}\n\n for ruleset in RuleSet.RULESET_CHOICES:\n\n role, text = ruleset\n\n permissions = []\n\n for permission in RuleSet.RULESET_PERMISSIONS:\n if check_user_role(user, role, permission):\n\n permissions.append(permission)\n\n if len(permissions) > 0:\n roles[role] = permissions\n else:\n roles[role] = None # pragma: no cover\n\n data = {\n 'user': user.pk,\n 'username': user.username,\n 'roles': roles,\n 'is_staff': user.is_staff,\n 'is_superuser': user.is_superuser,\n }\n\n return Response(data)\n\n\nclass UserDetail(RetrieveAPI):\n \"\"\"Detail endpoint for a single user.\"\"\"\n\n queryset = User.objects.all()\n serializer_class = UserSerializer\n permission_classes = (permissions.IsAuthenticated,)\n\n\nclass MeUserDetail(RetrieveUpdateAPI, UserDetail):\n \"\"\"Detail endpoint for current user.\"\"\"\n\n def get_object(self):\n \"\"\"Always return the current user object\"\"\"\n return self.request.user\n\n\nclass UserList(ListAPI):\n \"\"\"List endpoint for detail on all users.\"\"\"\n\n queryset = User.objects.all()\n serializer_class = UserSerializer\n permission_classes = (permissions.IsAuthenticated,)\n\n filter_backends = [\n DjangoFilterBackend,\n filters.SearchFilter,\n ]\n\n search_fields = [\n 'first_name',\n 'last_name',\n 'username',\n ]\n\n\nclass GetAuthToken(APIView):\n \"\"\"Return authentication token for an authenticated user.\"\"\"\n\n permission_classes = [\n permissions.IsAuthenticated,\n ]\n\n def get(self, request, *args, **kwargs):\n \"\"\"Return an API token if the user is authenticated\n\n - If the user already has a token, return it\n - Otherwise, create a new token\n \"\"\"\n if request.user.is_authenticated:\n # Get the user token (or create one if it does not exist)\n token, created = Token.objects.get_or_create(user=request.user)\n return Response({\n 'token': token.key,\n })\n\n def delete(self, request):\n \"\"\"User has requested deletion of API token\"\"\"\n try:\n request.user.auth_token.delete()\n return Response({\"success\": \"Successfully logged out.\"},\n status=status.HTTP_202_ACCEPTED)\n except (AttributeError, ObjectDoesNotExist):\n return Response({\"error\": \"Bad request\"},\n status=status.HTTP_400_BAD_REQUEST)\n\n\nuser_urls = [\n\n re_path(r'roles/?$', RoleDetails.as_view(), name='api-user-roles'),\n re_path(r'token/?$', GetAuthToken.as_view(), name='api-token'),\n re_path(r'^me/', MeUserDetail.as_view(), name='api-user-me'),\n\n re_path(r'^owner/', include([\n path('<int:pk>/', OwnerDetail.as_view(), name='api-owner-detail'),\n re_path(r'^.*$', OwnerList.as_view(), name='api-owner-list'),\n ])),\n\n re_path(r'^(?P<pk>[0-9]+)/?$', UserDetail.as_view(), name='user-detail'),\n path('', UserList.as_view()),\n]\n", "path": "InvenTree/users/api.py"}]} | 1,948 | 321 |
gh_patches_debug_23370 | rasdani/github-patches | git_diff | python-gitlab__python-gitlab-1373 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Cannot list package files
## Description of the problem, including code/CLI snippet
[Listing package files](https://docs.gitlab.com/ee/api/packages.html#list-package-files) appears to be unsupported. The API endpoint was introduced in GitLab 11.8.
## Expected Behavior
Listing package files should be possible.
## Actual Behavior
Listing package files is not possible.
## Specifications
- python-gitlab version: 2.6.0
- API version you are using (v3/v4): v4
- Gitlab server version (or gitlab.com): gitlab.com
PR incoming.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `gitlab/v4/objects/packages.py`
Content:
```
1 from gitlab.base import RESTManager, RESTObject
2 from gitlab.mixins import DeleteMixin, GetMixin, ListMixin, ObjectDeleteMixin
3
4
5 __all__ = [
6 "GroupPackage",
7 "GroupPackageManager",
8 "ProjectPackage",
9 "ProjectPackageManager",
10 ]
11
12
13 class GroupPackage(RESTObject):
14 pass
15
16
17 class GroupPackageManager(ListMixin, RESTManager):
18 _path = "/groups/%(group_id)s/packages"
19 _obj_cls = GroupPackage
20 _from_parent_attrs = {"group_id": "id"}
21 _list_filters = (
22 "exclude_subgroups",
23 "order_by",
24 "sort",
25 "package_type",
26 "package_name",
27 )
28
29
30 class ProjectPackage(ObjectDeleteMixin, RESTObject):
31 pass
32
33
34 class ProjectPackageManager(ListMixin, GetMixin, DeleteMixin, RESTManager):
35 _path = "/projects/%(project_id)s/packages"
36 _obj_cls = ProjectPackage
37 _from_parent_attrs = {"project_id": "id"}
38 _list_filters = (
39 "order_by",
40 "sort",
41 "package_type",
42 "package_name",
43 )
44
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/gitlab/v4/objects/packages.py b/gitlab/v4/objects/packages.py
--- a/gitlab/v4/objects/packages.py
+++ b/gitlab/v4/objects/packages.py
@@ -1,12 +1,13 @@
from gitlab.base import RESTManager, RESTObject
from gitlab.mixins import DeleteMixin, GetMixin, ListMixin, ObjectDeleteMixin
-
__all__ = [
"GroupPackage",
"GroupPackageManager",
"ProjectPackage",
"ProjectPackageManager",
+ "ProjectPackageFile",
+ "ProjectPackageFileManager",
]
@@ -28,7 +29,7 @@
class ProjectPackage(ObjectDeleteMixin, RESTObject):
- pass
+ _managers = (("package_files", "ProjectPackageFileManager"),)
class ProjectPackageManager(ListMixin, GetMixin, DeleteMixin, RESTManager):
@@ -41,3 +42,13 @@
"package_type",
"package_name",
)
+
+
+class ProjectPackageFile(RESTObject):
+ pass
+
+
+class ProjectPackageFileManager(ListMixin, RESTManager):
+ _path = "/projects/%(project_id)s/packages/%(package_id)s/package_files"
+ _obj_cls = ProjectPackageFile
+ _from_parent_attrs = {"project_id": "project_id", "package_id": "id"}
| {"golden_diff": "diff --git a/gitlab/v4/objects/packages.py b/gitlab/v4/objects/packages.py\n--- a/gitlab/v4/objects/packages.py\n+++ b/gitlab/v4/objects/packages.py\n@@ -1,12 +1,13 @@\n from gitlab.base import RESTManager, RESTObject\n from gitlab.mixins import DeleteMixin, GetMixin, ListMixin, ObjectDeleteMixin\n \n-\n __all__ = [\n \"GroupPackage\",\n \"GroupPackageManager\",\n \"ProjectPackage\",\n \"ProjectPackageManager\",\n+ \"ProjectPackageFile\",\n+ \"ProjectPackageFileManager\",\n ]\n \n \n@@ -28,7 +29,7 @@\n \n \n class ProjectPackage(ObjectDeleteMixin, RESTObject):\n- pass\n+ _managers = ((\"package_files\", \"ProjectPackageFileManager\"),)\n \n \n class ProjectPackageManager(ListMixin, GetMixin, DeleteMixin, RESTManager):\n@@ -41,3 +42,13 @@\n \"package_type\",\n \"package_name\",\n )\n+\n+\n+class ProjectPackageFile(RESTObject):\n+ pass\n+\n+\n+class ProjectPackageFileManager(ListMixin, RESTManager):\n+ _path = \"/projects/%(project_id)s/packages/%(package_id)s/package_files\"\n+ _obj_cls = ProjectPackageFile\n+ _from_parent_attrs = {\"project_id\": \"project_id\", \"package_id\": \"id\"}\n", "issue": "Cannot list package files\n## Description of the problem, including code/CLI snippet\r\n\r\n[Listing package files](https://docs.gitlab.com/ee/api/packages.html#list-package-files) appears to be unsupported. The API endpoint was introduced in GitLab 11.8.\r\n\r\n## Expected Behavior\r\n\r\nListing package files should be possible.\r\n\r\n## Actual Behavior\r\n\r\nListing package files is not possible.\r\n\r\n## Specifications\r\n\r\n - python-gitlab version: 2.6.0\r\n - API version you are using (v3/v4): v4\r\n - Gitlab server version (or gitlab.com): gitlab.com\r\n\r\n\r\nPR incoming.\n", "before_files": [{"content": "from gitlab.base import RESTManager, RESTObject\nfrom gitlab.mixins import DeleteMixin, GetMixin, ListMixin, ObjectDeleteMixin\n\n\n__all__ = [\n \"GroupPackage\",\n \"GroupPackageManager\",\n \"ProjectPackage\",\n \"ProjectPackageManager\",\n]\n\n\nclass GroupPackage(RESTObject):\n pass\n\n\nclass GroupPackageManager(ListMixin, RESTManager):\n _path = \"/groups/%(group_id)s/packages\"\n _obj_cls = GroupPackage\n _from_parent_attrs = {\"group_id\": \"id\"}\n _list_filters = (\n \"exclude_subgroups\",\n \"order_by\",\n \"sort\",\n \"package_type\",\n \"package_name\",\n )\n\n\nclass ProjectPackage(ObjectDeleteMixin, RESTObject):\n pass\n\n\nclass ProjectPackageManager(ListMixin, GetMixin, DeleteMixin, RESTManager):\n _path = \"/projects/%(project_id)s/packages\"\n _obj_cls = ProjectPackage\n _from_parent_attrs = {\"project_id\": \"id\"}\n _list_filters = (\n \"order_by\",\n \"sort\",\n \"package_type\",\n \"package_name\",\n )\n", "path": "gitlab/v4/objects/packages.py"}], "after_files": [{"content": "from gitlab.base import RESTManager, RESTObject\nfrom gitlab.mixins import DeleteMixin, GetMixin, ListMixin, ObjectDeleteMixin\n\n__all__ = [\n \"GroupPackage\",\n \"GroupPackageManager\",\n \"ProjectPackage\",\n \"ProjectPackageManager\",\n \"ProjectPackageFile\",\n \"ProjectPackageFileManager\",\n]\n\n\nclass GroupPackage(RESTObject):\n pass\n\n\nclass GroupPackageManager(ListMixin, RESTManager):\n _path = \"/groups/%(group_id)s/packages\"\n _obj_cls = GroupPackage\n _from_parent_attrs = {\"group_id\": \"id\"}\n _list_filters = (\n \"exclude_subgroups\",\n \"order_by\",\n \"sort\",\n \"package_type\",\n \"package_name\",\n )\n\n\nclass ProjectPackage(ObjectDeleteMixin, RESTObject):\n _managers = ((\"package_files\", \"ProjectPackageFileManager\"),)\n\n\nclass ProjectPackageManager(ListMixin, GetMixin, DeleteMixin, RESTManager):\n _path = \"/projects/%(project_id)s/packages\"\n _obj_cls = ProjectPackage\n _from_parent_attrs = {\"project_id\": \"id\"}\n _list_filters = (\n \"order_by\",\n \"sort\",\n \"package_type\",\n \"package_name\",\n )\n\n\nclass ProjectPackageFile(RESTObject):\n pass\n\n\nclass ProjectPackageFileManager(ListMixin, RESTManager):\n _path = \"/projects/%(project_id)s/packages/%(package_id)s/package_files\"\n _obj_cls = ProjectPackageFile\n _from_parent_attrs = {\"project_id\": \"project_id\", \"package_id\": \"id\"}\n", "path": "gitlab/v4/objects/packages.py"}]} | 713 | 293 |
gh_patches_debug_29144 | rasdani/github-patches | git_diff | qtile__qtile-2235 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
No filename provided Image widget causes QTile to crash.
# The issue
When no filename argument, OR an invalid filename is provided for the Image widget, Qtile seems to crash, and needs to be killed to restart. You are obviously not supposed to provide a non-existant image, but I have doubts that it crashing is intended behavior.
What I am describing here as a "crash" is no keyboard input being accepted, and windows from *all* other workspaces being displayed on the workspace you are currently on. If this is not actually a crash, I apologize, but regardless, Qtile becomes unusable until the process is killed and I am kicked back to my Display Manager.
# Steps to reproduce
In your bar, create a new ``Image`` widget somewhere inside. Either provide a path to an image that does not exist, or do not provide one period.
# Qtile version
This is the commit hash of the version I am running.
6c4d0557124989d46ffb2bb24f4468db687fcdb2
# Stack traces
No stack traces from xsession-errors, or the Qtile log are produced, however I have traced the error (through using the logger provided in the module's file) to the ``_configure`` method of the Image widget, and it seems to be coming the line: ``base._Widget._configure(self, qtile, bar)``
# Configuration
https://pastebin.com/qxBq6yPn
If there is any information I got wrong here, or some other bit of information I can provide that will help this issue get solved, I will try my best.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `libqtile/widget/image.py`
Content:
```
1 # Copyright (c) 2013 dequis
2 # Copyright (c) 2014 Sean Vig
3 # Copyright (c) 2014 Adi Sieker
4 #
5 # Permission is hereby granted, free of charge, to any person obtaining a copy
6 # of this software and associated documentation files (the "Software"), to deal
7 # in the Software without restriction, including without limitation the rights
8 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 # copies of the Software, and to permit persons to whom the Software is
10 # furnished to do so, subject to the following conditions:
11 #
12 # The above copyright notice and this permission notice shall be included in
13 # all copies or substantial portions of the Software.
14 #
15 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 # SOFTWARE.
22 import os
23
24 from libqtile import bar
25 from libqtile.images import Img
26 from libqtile.log_utils import logger
27 from libqtile.widget import base
28
29
30 class Image(base._Widget, base.MarginMixin):
31 """Display a PNG image on the bar"""
32 orientations = base.ORIENTATION_BOTH
33 defaults = [
34 ("scale", True, "Enable/Disable image scaling"),
35 ("rotate", 0.0, "rotate the image in degrees counter-clockwise"),
36 ("filename", None, "Image filename. Can contain '~'"),
37 ]
38
39 def __init__(self, length=bar.CALCULATED, width=None, **config):
40 # 'width' was replaced by 'length' since the widget can be installed in
41 # vertical bars
42 if width is not None:
43 logger.warning('width kwarg or positional argument is '
44 'deprecated. Please use length.')
45 length = width
46
47 base._Widget.__init__(self, length, **config)
48 self.add_defaults(Image.defaults)
49 self.add_defaults(base.MarginMixin.defaults)
50
51 # make the default 0 instead
52 self._variable_defaults["margin"] = 0
53
54 def _configure(self, qtile, bar):
55 base._Widget._configure(self, qtile, bar)
56
57 if not self.filename:
58 raise ValueError("Filename not set!")
59
60 self.filename = os.path.expanduser(self.filename)
61
62 if not os.path.exists(self.filename):
63 raise ValueError("File does not exist: {}".format(self.filename))
64
65 img = Img.from_path(self.filename)
66 self.img = img
67 img.theta = self.rotate
68 if not self.scale:
69 return
70 if self.bar.horizontal:
71 new_height = self.bar.height - (self.margin_y * 2)
72 img.resize(height=new_height)
73 else:
74 new_width = self.bar.width - (self.margin_x * 2)
75 img.resize(width=new_width)
76
77 def draw(self):
78 self.drawer.clear(self.background or self.bar.background)
79 self.drawer.ctx.save()
80 self.drawer.ctx.translate(self.margin_x, self.margin_y)
81 self.drawer.ctx.set_source(self.img.pattern)
82 self.drawer.ctx.paint()
83 self.drawer.ctx.restore()
84
85 if self.bar.horizontal:
86 self.drawer.draw(offsetx=self.offset, width=self.width)
87 else:
88 self.drawer.draw(offsety=self.offset, height=self.width)
89
90 def calculate_length(self):
91 if self.bar.horizontal:
92 return self.img.width + (self.margin_x * 2)
93 else:
94 return self.img.height + (self.margin_y * 2)
95
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/libqtile/widget/image.py b/libqtile/widget/image.py
--- a/libqtile/widget/image.py
+++ b/libqtile/widget/image.py
@@ -53,14 +53,17 @@
def _configure(self, qtile, bar):
base._Widget._configure(self, qtile, bar)
+ self.img = None
if not self.filename:
- raise ValueError("Filename not set!")
+ logger.warning("Image filename not set!")
+ return
self.filename = os.path.expanduser(self.filename)
if not os.path.exists(self.filename):
- raise ValueError("File does not exist: {}".format(self.filename))
+ logger.warning("Image does not exist: {}".format(self.filename))
+ return
img = Img.from_path(self.filename)
self.img = img
@@ -75,6 +78,9 @@
img.resize(width=new_width)
def draw(self):
+ if self.img is None:
+ return
+
self.drawer.clear(self.background or self.bar.background)
self.drawer.ctx.save()
self.drawer.ctx.translate(self.margin_x, self.margin_y)
@@ -88,6 +94,9 @@
self.drawer.draw(offsety=self.offset, height=self.width)
def calculate_length(self):
+ if self.img is None:
+ return 0
+
if self.bar.horizontal:
return self.img.width + (self.margin_x * 2)
else:
| {"golden_diff": "diff --git a/libqtile/widget/image.py b/libqtile/widget/image.py\n--- a/libqtile/widget/image.py\n+++ b/libqtile/widget/image.py\n@@ -53,14 +53,17 @@\n \n def _configure(self, qtile, bar):\n base._Widget._configure(self, qtile, bar)\n+ self.img = None\n \n if not self.filename:\n- raise ValueError(\"Filename not set!\")\n+ logger.warning(\"Image filename not set!\")\n+ return\n \n self.filename = os.path.expanduser(self.filename)\n \n if not os.path.exists(self.filename):\n- raise ValueError(\"File does not exist: {}\".format(self.filename))\n+ logger.warning(\"Image does not exist: {}\".format(self.filename))\n+ return\n \n img = Img.from_path(self.filename)\n self.img = img\n@@ -75,6 +78,9 @@\n img.resize(width=new_width)\n \n def draw(self):\n+ if self.img is None:\n+ return\n+\n self.drawer.clear(self.background or self.bar.background)\n self.drawer.ctx.save()\n self.drawer.ctx.translate(self.margin_x, self.margin_y)\n@@ -88,6 +94,9 @@\n self.drawer.draw(offsety=self.offset, height=self.width)\n \n def calculate_length(self):\n+ if self.img is None:\n+ return 0\n+\n if self.bar.horizontal:\n return self.img.width + (self.margin_x * 2)\n else:\n", "issue": "No filename provided Image widget causes QTile to crash.\n# The issue\r\nWhen no filename argument, OR an invalid filename is provided for the Image widget, Qtile seems to crash, and needs to be killed to restart. You are obviously not supposed to provide a non-existant image, but I have doubts that it crashing is intended behavior. \r\n\r\nWhat I am describing here as a \"crash\" is no keyboard input being accepted, and windows from *all* other workspaces being displayed on the workspace you are currently on. If this is not actually a crash, I apologize, but regardless, Qtile becomes unusable until the process is killed and I am kicked back to my Display Manager.\r\n\r\n# Steps to reproduce\r\nIn your bar, create a new ``Image`` widget somewhere inside. Either provide a path to an image that does not exist, or do not provide one period.\r\n\r\n# Qtile version\r\nThis is the commit hash of the version I am running.\r\n6c4d0557124989d46ffb2bb24f4468db687fcdb2\r\n\r\n# Stack traces\r\nNo stack traces from xsession-errors, or the Qtile log are produced, however I have traced the error (through using the logger provided in the module's file) to the ``_configure`` method of the Image widget, and it seems to be coming the line: ``base._Widget._configure(self, qtile, bar)``\r\n\r\n# Configuration\r\nhttps://pastebin.com/qxBq6yPn\r\n\r\nIf there is any information I got wrong here, or some other bit of information I can provide that will help this issue get solved, I will try my best.\n", "before_files": [{"content": "# Copyright (c) 2013 dequis\n# Copyright (c) 2014 Sean Vig\n# Copyright (c) 2014 Adi Sieker\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\nimport os\n\nfrom libqtile import bar\nfrom libqtile.images import Img\nfrom libqtile.log_utils import logger\nfrom libqtile.widget import base\n\n\nclass Image(base._Widget, base.MarginMixin):\n \"\"\"Display a PNG image on the bar\"\"\"\n orientations = base.ORIENTATION_BOTH\n defaults = [\n (\"scale\", True, \"Enable/Disable image scaling\"),\n (\"rotate\", 0.0, \"rotate the image in degrees counter-clockwise\"),\n (\"filename\", None, \"Image filename. Can contain '~'\"),\n ]\n\n def __init__(self, length=bar.CALCULATED, width=None, **config):\n # 'width' was replaced by 'length' since the widget can be installed in\n # vertical bars\n if width is not None:\n logger.warning('width kwarg or positional argument is '\n 'deprecated. Please use length.')\n length = width\n\n base._Widget.__init__(self, length, **config)\n self.add_defaults(Image.defaults)\n self.add_defaults(base.MarginMixin.defaults)\n\n # make the default 0 instead\n self._variable_defaults[\"margin\"] = 0\n\n def _configure(self, qtile, bar):\n base._Widget._configure(self, qtile, bar)\n\n if not self.filename:\n raise ValueError(\"Filename not set!\")\n\n self.filename = os.path.expanduser(self.filename)\n\n if not os.path.exists(self.filename):\n raise ValueError(\"File does not exist: {}\".format(self.filename))\n\n img = Img.from_path(self.filename)\n self.img = img\n img.theta = self.rotate\n if not self.scale:\n return\n if self.bar.horizontal:\n new_height = self.bar.height - (self.margin_y * 2)\n img.resize(height=new_height)\n else:\n new_width = self.bar.width - (self.margin_x * 2)\n img.resize(width=new_width)\n\n def draw(self):\n self.drawer.clear(self.background or self.bar.background)\n self.drawer.ctx.save()\n self.drawer.ctx.translate(self.margin_x, self.margin_y)\n self.drawer.ctx.set_source(self.img.pattern)\n self.drawer.ctx.paint()\n self.drawer.ctx.restore()\n\n if self.bar.horizontal:\n self.drawer.draw(offsetx=self.offset, width=self.width)\n else:\n self.drawer.draw(offsety=self.offset, height=self.width)\n\n def calculate_length(self):\n if self.bar.horizontal:\n return self.img.width + (self.margin_x * 2)\n else:\n return self.img.height + (self.margin_y * 2)\n", "path": "libqtile/widget/image.py"}], "after_files": [{"content": "# Copyright (c) 2013 dequis\n# Copyright (c) 2014 Sean Vig\n# Copyright (c) 2014 Adi Sieker\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\nimport os\n\nfrom libqtile import bar\nfrom libqtile.images import Img\nfrom libqtile.log_utils import logger\nfrom libqtile.widget import base\n\n\nclass Image(base._Widget, base.MarginMixin):\n \"\"\"Display a PNG image on the bar\"\"\"\n orientations = base.ORIENTATION_BOTH\n defaults = [\n (\"scale\", True, \"Enable/Disable image scaling\"),\n (\"rotate\", 0.0, \"rotate the image in degrees counter-clockwise\"),\n (\"filename\", None, \"Image filename. Can contain '~'\"),\n ]\n\n def __init__(self, length=bar.CALCULATED, width=None, **config):\n # 'width' was replaced by 'length' since the widget can be installed in\n # vertical bars\n if width is not None:\n logger.warning('width kwarg or positional argument is '\n 'deprecated. Please use length.')\n length = width\n\n base._Widget.__init__(self, length, **config)\n self.add_defaults(Image.defaults)\n self.add_defaults(base.MarginMixin.defaults)\n\n # make the default 0 instead\n self._variable_defaults[\"margin\"] = 0\n\n def _configure(self, qtile, bar):\n base._Widget._configure(self, qtile, bar)\n self.img = None\n\n if not self.filename:\n logger.warning(\"Image filename not set!\")\n return\n\n self.filename = os.path.expanduser(self.filename)\n\n if not os.path.exists(self.filename):\n logger.warning(\"Image does not exist: {}\".format(self.filename))\n return\n\n img = Img.from_path(self.filename)\n self.img = img\n img.theta = self.rotate\n if not self.scale:\n return\n if self.bar.horizontal:\n new_height = self.bar.height - (self.margin_y * 2)\n img.resize(height=new_height)\n else:\n new_width = self.bar.width - (self.margin_x * 2)\n img.resize(width=new_width)\n\n def draw(self):\n if self.img is None:\n return\n\n self.drawer.clear(self.background or self.bar.background)\n self.drawer.ctx.save()\n self.drawer.ctx.translate(self.margin_x, self.margin_y)\n self.drawer.ctx.set_source(self.img.pattern)\n self.drawer.ctx.paint()\n self.drawer.ctx.restore()\n\n if self.bar.horizontal:\n self.drawer.draw(offsetx=self.offset, width=self.width)\n else:\n self.drawer.draw(offsety=self.offset, height=self.width)\n\n def calculate_length(self):\n if self.img is None:\n return 0\n\n if self.bar.horizontal:\n return self.img.width + (self.margin_x * 2)\n else:\n return self.img.height + (self.margin_y * 2)\n", "path": "libqtile/widget/image.py"}]} | 1,608 | 319 |
gh_patches_debug_2346 | rasdani/github-patches | git_diff | mesonbuild__meson-10268 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
-Dbuildtype=release fails on CompCert
**Describe the bug**
Building with CompCert on release mode fails.
The error message is as follows:
```
ccomp: error: Unknown option `-03'
```
Note that this unknown option is "dash zero three" and not "dash O three". Maybe someone accidentally typed a zero where they wanted a letter "O"?
**To Reproduce**
The bug seems to trigger with any program.
Download this Meson "Hello, world!" program: [meson-compcert.zip](https://github.com/mesonbuild/meson/files/8468156/meson-compcert.zip).
Try to build it with:
```console
$ CC=ccomp meson setup -Dbuildtype=release build && meson compile -C build
```
**Expected behavior**
A successful build.
**system parameters**
I'm running Meson 0.62 from PyPi and my CompCert is the binary package of version 3.9 from the OpenBSD ports system.
My ninja is version 1.10.2 and my python is version 3.8.12.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mesonbuild/compilers/mixins/compcert.py`
Content:
```
1 # Copyright 2012-2019 The Meson development team
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6
7 # http://www.apache.org/licenses/LICENSE-2.0
8
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """Representations specific to the CompCert C compiler family."""
16
17 import os
18 import re
19 import typing as T
20
21 if T.TYPE_CHECKING:
22 from ...environment import Environment
23 from ...compilers.compilers import Compiler
24 else:
25 # This is a bit clever, for mypy we pretend that these mixins descend from
26 # Compiler, so we get all of the methods and attributes defined for us, but
27 # for runtime we make them descend from object (which all classes normally
28 # do). This gives up DRYer type checking, with no runtime impact
29 Compiler = object
30
31 ccomp_buildtype_args = {
32 'plain': [''],
33 'debug': ['-O0', '-g'],
34 'debugoptimized': ['-O0', '-g'],
35 'release': ['-03'],
36 'minsize': ['-Os'],
37 'custom': ['-Obranchless'],
38 } # type: T.Dict[str, T.List[str]]
39
40 ccomp_optimization_args = {
41 '0': ['-O0'],
42 'g': ['-O0'],
43 '1': ['-O1'],
44 '2': ['-O2'],
45 '3': ['-O3'],
46 's': ['-Os']
47 } # type: T.Dict[str, T.List[str]]
48
49 ccomp_debug_args = {
50 False: [],
51 True: ['-g']
52 } # type: T.Dict[bool, T.List[str]]
53
54 # As of CompCert 20.04, these arguments should be passed to the underlying gcc linker (via -WUl,<arg>)
55 # There are probably (many) more, but these are those used by picolibc
56 ccomp_args_to_wul = [
57 r"^-ffreestanding$",
58 r"^-r$"
59 ] # type: T.List[str]
60
61 class CompCertCompiler(Compiler):
62
63 id = 'ccomp'
64
65 def __init__(self) -> None:
66 # Assembly
67 self.can_compile_suffixes.add('s')
68 default_warn_args = [] # type: T.List[str]
69 self.warn_args = {'0': [],
70 '1': default_warn_args,
71 '2': default_warn_args + [],
72 '3': default_warn_args + []} # type: T.Dict[str, T.List[str]]
73
74 def get_always_args(self) -> T.List[str]:
75 return []
76
77 def get_pic_args(self) -> T.List[str]:
78 # As of now, CompCert does not support PIC
79 return []
80
81 def get_buildtype_args(self, buildtype: str) -> T.List[str]:
82 return ccomp_buildtype_args[buildtype]
83
84 def get_pch_suffix(self) -> str:
85 return 'pch'
86
87 def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
88 return []
89
90 def unix_args_to_native(self, args: T.List[str]) -> T.List[str]:
91 "Always returns a copy that can be independently mutated"
92 patched_args = [] # type: T.List[str]
93 for arg in args:
94 added = 0
95 for ptrn in ccomp_args_to_wul:
96 if re.match(ptrn, arg):
97 patched_args.append('-WUl,' + arg)
98 added = 1
99 if not added:
100 patched_args.append(arg)
101 return patched_args
102
103 def thread_flags(self, env: 'Environment') -> T.List[str]:
104 return []
105
106 def get_preprocess_only_args(self) -> T.List[str]:
107 return ['-E']
108
109 def get_compile_only_args(self) -> T.List[str]:
110 return ['-c']
111
112 def get_coverage_args(self) -> T.List[str]:
113 return []
114
115 def get_no_stdinc_args(self) -> T.List[str]:
116 return ['-nostdinc']
117
118 def get_no_stdlib_link_args(self) -> T.List[str]:
119 return ['-nostdlib']
120
121 def get_optimization_args(self, optimization_level: str) -> T.List[str]:
122 return ccomp_optimization_args[optimization_level]
123
124 def get_debug_args(self, is_debug: bool) -> T.List[str]:
125 return ccomp_debug_args[is_debug]
126
127 def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
128 for idx, i in enumerate(parameter_list):
129 if i[:9] == '-I':
130 parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
131
132 return parameter_list
133
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mesonbuild/compilers/mixins/compcert.py b/mesonbuild/compilers/mixins/compcert.py
--- a/mesonbuild/compilers/mixins/compcert.py
+++ b/mesonbuild/compilers/mixins/compcert.py
@@ -32,7 +32,7 @@
'plain': [''],
'debug': ['-O0', '-g'],
'debugoptimized': ['-O0', '-g'],
- 'release': ['-03'],
+ 'release': ['-O3'],
'minsize': ['-Os'],
'custom': ['-Obranchless'],
} # type: T.Dict[str, T.List[str]]
| {"golden_diff": "diff --git a/mesonbuild/compilers/mixins/compcert.py b/mesonbuild/compilers/mixins/compcert.py\n--- a/mesonbuild/compilers/mixins/compcert.py\n+++ b/mesonbuild/compilers/mixins/compcert.py\n@@ -32,7 +32,7 @@\n 'plain': [''],\n 'debug': ['-O0', '-g'],\n 'debugoptimized': ['-O0', '-g'],\n- 'release': ['-03'],\n+ 'release': ['-O3'],\n 'minsize': ['-Os'],\n 'custom': ['-Obranchless'],\n } # type: T.Dict[str, T.List[str]]\n", "issue": "-Dbuildtype=release fails on CompCert\n**Describe the bug**\r\nBuilding with CompCert on release mode fails. \r\nThe error message is as follows:\r\n```\r\nccomp: error: Unknown option `-03'\r\n```\r\nNote that this unknown option is \"dash zero three\" and not \"dash O three\". Maybe someone accidentally typed a zero where they wanted a letter \"O\"?\r\n\r\n**To Reproduce**\r\nThe bug seems to trigger with any program.\r\nDownload this Meson \"Hello, world!\" program: [meson-compcert.zip](https://github.com/mesonbuild/meson/files/8468156/meson-compcert.zip).\r\nTry to build it with:\r\n```console\r\n$ CC=ccomp meson setup -Dbuildtype=release build && meson compile -C build\r\n```\r\n\r\n**Expected behavior**\r\nA successful build.\r\n\r\n**system parameters**\r\nI'm running Meson 0.62 from PyPi and my CompCert is the binary package of version 3.9 from the OpenBSD ports system.\r\nMy ninja is version 1.10.2 and my python is version 3.8.12.\n", "before_files": [{"content": "# Copyright 2012-2019 The Meson development team\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Representations specific to the CompCert C compiler family.\"\"\"\n\nimport os\nimport re\nimport typing as T\n\nif T.TYPE_CHECKING:\n from ...environment import Environment\n from ...compilers.compilers import Compiler\nelse:\n # This is a bit clever, for mypy we pretend that these mixins descend from\n # Compiler, so we get all of the methods and attributes defined for us, but\n # for runtime we make them descend from object (which all classes normally\n # do). This gives up DRYer type checking, with no runtime impact\n Compiler = object\n\nccomp_buildtype_args = {\n 'plain': [''],\n 'debug': ['-O0', '-g'],\n 'debugoptimized': ['-O0', '-g'],\n 'release': ['-03'],\n 'minsize': ['-Os'],\n 'custom': ['-Obranchless'],\n} # type: T.Dict[str, T.List[str]]\n\nccomp_optimization_args = {\n '0': ['-O0'],\n 'g': ['-O0'],\n '1': ['-O1'],\n '2': ['-O2'],\n '3': ['-O3'],\n 's': ['-Os']\n} # type: T.Dict[str, T.List[str]]\n\nccomp_debug_args = {\n False: [],\n True: ['-g']\n} # type: T.Dict[bool, T.List[str]]\n\n# As of CompCert 20.04, these arguments should be passed to the underlying gcc linker (via -WUl,<arg>)\n# There are probably (many) more, but these are those used by picolibc\nccomp_args_to_wul = [\n r\"^-ffreestanding$\",\n r\"^-r$\"\n] # type: T.List[str]\n\nclass CompCertCompiler(Compiler):\n\n id = 'ccomp'\n\n def __init__(self) -> None:\n # Assembly\n self.can_compile_suffixes.add('s')\n default_warn_args = [] # type: T.List[str]\n self.warn_args = {'0': [],\n '1': default_warn_args,\n '2': default_warn_args + [],\n '3': default_warn_args + []} # type: T.Dict[str, T.List[str]]\n\n def get_always_args(self) -> T.List[str]:\n return []\n\n def get_pic_args(self) -> T.List[str]:\n # As of now, CompCert does not support PIC\n return []\n\n def get_buildtype_args(self, buildtype: str) -> T.List[str]:\n return ccomp_buildtype_args[buildtype]\n\n def get_pch_suffix(self) -> str:\n return 'pch'\n\n def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:\n return []\n\n def unix_args_to_native(self, args: T.List[str]) -> T.List[str]:\n \"Always returns a copy that can be independently mutated\"\n patched_args = [] # type: T.List[str]\n for arg in args:\n added = 0\n for ptrn in ccomp_args_to_wul:\n if re.match(ptrn, arg):\n patched_args.append('-WUl,' + arg)\n added = 1\n if not added:\n patched_args.append(arg)\n return patched_args\n\n def thread_flags(self, env: 'Environment') -> T.List[str]:\n return []\n\n def get_preprocess_only_args(self) -> T.List[str]:\n return ['-E']\n\n def get_compile_only_args(self) -> T.List[str]:\n return ['-c']\n\n def get_coverage_args(self) -> T.List[str]:\n return []\n\n def get_no_stdinc_args(self) -> T.List[str]:\n return ['-nostdinc']\n\n def get_no_stdlib_link_args(self) -> T.List[str]:\n return ['-nostdlib']\n\n def get_optimization_args(self, optimization_level: str) -> T.List[str]:\n return ccomp_optimization_args[optimization_level]\n\n def get_debug_args(self, is_debug: bool) -> T.List[str]:\n return ccomp_debug_args[is_debug]\n\n def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:\n for idx, i in enumerate(parameter_list):\n if i[:9] == '-I':\n parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))\n\n return parameter_list\n", "path": "mesonbuild/compilers/mixins/compcert.py"}], "after_files": [{"content": "# Copyright 2012-2019 The Meson development team\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Representations specific to the CompCert C compiler family.\"\"\"\n\nimport os\nimport re\nimport typing as T\n\nif T.TYPE_CHECKING:\n from ...environment import Environment\n from ...compilers.compilers import Compiler\nelse:\n # This is a bit clever, for mypy we pretend that these mixins descend from\n # Compiler, so we get all of the methods and attributes defined for us, but\n # for runtime we make them descend from object (which all classes normally\n # do). This gives up DRYer type checking, with no runtime impact\n Compiler = object\n\nccomp_buildtype_args = {\n 'plain': [''],\n 'debug': ['-O0', '-g'],\n 'debugoptimized': ['-O0', '-g'],\n 'release': ['-O3'],\n 'minsize': ['-Os'],\n 'custom': ['-Obranchless'],\n} # type: T.Dict[str, T.List[str]]\n\nccomp_optimization_args = {\n '0': ['-O0'],\n 'g': ['-O0'],\n '1': ['-O1'],\n '2': ['-O2'],\n '3': ['-O3'],\n 's': ['-Os']\n} # type: T.Dict[str, T.List[str]]\n\nccomp_debug_args = {\n False: [],\n True: ['-g']\n} # type: T.Dict[bool, T.List[str]]\n\n# As of CompCert 20.04, these arguments should be passed to the underlying gcc linker (via -WUl,<arg>)\n# There are probably (many) more, but these are those used by picolibc\nccomp_args_to_wul = [\n r\"^-ffreestanding$\",\n r\"^-r$\"\n] # type: T.List[str]\n\nclass CompCertCompiler(Compiler):\n\n id = 'ccomp'\n\n def __init__(self) -> None:\n # Assembly\n self.can_compile_suffixes.add('s')\n default_warn_args = [] # type: T.List[str]\n self.warn_args = {'0': [],\n '1': default_warn_args,\n '2': default_warn_args + [],\n '3': default_warn_args + []} # type: T.Dict[str, T.List[str]]\n\n def get_always_args(self) -> T.List[str]:\n return []\n\n def get_pic_args(self) -> T.List[str]:\n # As of now, CompCert does not support PIC\n return []\n\n def get_buildtype_args(self, buildtype: str) -> T.List[str]:\n return ccomp_buildtype_args[buildtype]\n\n def get_pch_suffix(self) -> str:\n return 'pch'\n\n def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:\n return []\n\n def unix_args_to_native(self, args: T.List[str]) -> T.List[str]:\n \"Always returns a copy that can be independently mutated\"\n patched_args = [] # type: T.List[str]\n for arg in args:\n added = 0\n for ptrn in ccomp_args_to_wul:\n if re.match(ptrn, arg):\n patched_args.append('-WUl,' + arg)\n added = 1\n if not added:\n patched_args.append(arg)\n return patched_args\n\n def thread_flags(self, env: 'Environment') -> T.List[str]:\n return []\n\n def get_preprocess_only_args(self) -> T.List[str]:\n return ['-E']\n\n def get_compile_only_args(self) -> T.List[str]:\n return ['-c']\n\n def get_coverage_args(self) -> T.List[str]:\n return []\n\n def get_no_stdinc_args(self) -> T.List[str]:\n return ['-nostdinc']\n\n def get_no_stdlib_link_args(self) -> T.List[str]:\n return ['-nostdlib']\n\n def get_optimization_args(self, optimization_level: str) -> T.List[str]:\n return ccomp_optimization_args[optimization_level]\n\n def get_debug_args(self, is_debug: bool) -> T.List[str]:\n return ccomp_debug_args[is_debug]\n\n def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:\n for idx, i in enumerate(parameter_list):\n if i[:9] == '-I':\n parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))\n\n return parameter_list\n", "path": "mesonbuild/compilers/mixins/compcert.py"}]} | 1,916 | 153 |
gh_patches_debug_37602 | rasdani/github-patches | git_diff | arviz-devs__arviz-625 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove load_data and save_data functions before 0.4
`load_data` and `save_data` are currently deprecated (after 0.3.1 release). They need to be removed after 0.4 (assuming next release is going to be 0.3.2).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `arviz/data/__init__.py`
Content:
```
1 """Code for loading and manipulating data structures."""
2 from .inference_data import InferenceData, concat
3 from .io_netcdf import from_netcdf, to_netcdf, load_data, save_data
4 from .datasets import load_arviz_data, list_datasets, clear_data_home
5 from .base import numpy_to_data_array, dict_to_dataset
6 from .converters import convert_to_dataset, convert_to_inference_data
7 from .io_cmdstan import from_cmdstan
8 from .io_dict import from_dict
9 from .io_pymc3 import from_pymc3
10 from .io_pystan import from_pystan
11 from .io_emcee import from_emcee
12 from .io_pyro import from_pyro
13 from .io_tfp import from_tfp
14
15 __all__ = [
16 "InferenceData",
17 "concat",
18 "load_arviz_data",
19 "list_datasets",
20 "clear_data_home",
21 "numpy_to_data_array",
22 "dict_to_dataset",
23 "convert_to_dataset",
24 "convert_to_inference_data",
25 "from_pymc3",
26 "from_pystan",
27 "from_emcee",
28 "from_cmdstan",
29 "from_dict",
30 "from_pyro",
31 "from_tfp",
32 "from_netcdf",
33 "to_netcdf",
34 "load_data",
35 "save_data",
36 ]
37
```
Path: `arviz/data/io_netcdf.py`
Content:
```
1 """Input and output support for data."""
2 import warnings
3 from .inference_data import InferenceData
4 from .converters import convert_to_inference_data
5
6
7 def from_netcdf(filename):
8 """Load netcdf file back into an arviz.InferenceData.
9
10 Parameters
11 ----------
12 filename : str
13 name or path of the file to load trace
14 """
15 return InferenceData.from_netcdf(filename)
16
17
18 def to_netcdf(data, filename, *, group="posterior", coords=None, dims=None):
19 """Save dataset as a netcdf file.
20
21 WARNING: Only idempotent in case `data` is InferenceData
22
23 Parameters
24 ----------
25 data : InferenceData, or any object accepted by `convert_to_inference_data`
26 Object to be saved
27 filename : str
28 name or path of the file to load trace
29 group : str (optional)
30 In case `data` is not InferenceData, this is the group it will be saved to
31 coords : dict (optional)
32 See `convert_to_inference_data`
33 dims : dict (optional)
34 See `convert_to_inference_data`
35
36 Returns
37 -------
38 str
39 filename saved to
40 """
41 inference_data = convert_to_inference_data(data, group=group, coords=coords, dims=dims)
42 file_name = inference_data.to_netcdf(filename)
43 return file_name
44
45
46 def load_data(filename):
47 """Load netcdf file back into an arviz.InferenceData.
48
49 Parameters
50 ----------
51 filename : str
52 name or path of the file to load trace
53
54 Note
55 ----
56 This function is deprecated and will be removed in 0.4.
57 Use `from_netcdf` instead.
58 """
59 warnings.warn(
60 "The 'load_data' function is deprecated as of 0.3.2, use 'from_netcdf' instead",
61 DeprecationWarning,
62 )
63 return from_netcdf(filename=filename)
64
65
66 def save_data(data, filename, *, group="posterior", coords=None, dims=None):
67 """Save dataset as a netcdf file.
68
69 WARNING: Only idempotent in case `data` is InferenceData
70
71 Parameters
72 ----------
73 data : InferenceData, or any object accepted by `convert_to_inference_data`
74 Object to be saved
75 filename : str
76 name or path of the file to load trace
77 group : str (optional)
78 In case `data` is not InferenceData, this is the group it will be saved to
79 coords : dict (optional)
80 See `convert_to_inference_data`
81 dims : dict (optional)
82 See `convert_to_inference_data`
83
84 Returns
85 -------
86 str
87 filename saved to
88
89 Note
90 ----
91 This function is deprecated and will be removed in 0.4.
92 Use `to_netcdf` instead.
93 """
94 warnings.warn(
95 "The 'save_data' function is deprecated as of 0.3.2, use 'to_netcdf' instead",
96 DeprecationWarning,
97 )
98 return to_netcdf(data=data, filename=filename, group=group, coords=coords, dims=dims)
99
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/arviz/data/__init__.py b/arviz/data/__init__.py
--- a/arviz/data/__init__.py
+++ b/arviz/data/__init__.py
@@ -1,6 +1,6 @@
"""Code for loading and manipulating data structures."""
from .inference_data import InferenceData, concat
-from .io_netcdf import from_netcdf, to_netcdf, load_data, save_data
+from .io_netcdf import from_netcdf, to_netcdf
from .datasets import load_arviz_data, list_datasets, clear_data_home
from .base import numpy_to_data_array, dict_to_dataset
from .converters import convert_to_dataset, convert_to_inference_data
@@ -31,6 +31,4 @@
"from_tfp",
"from_netcdf",
"to_netcdf",
- "load_data",
- "save_data",
]
diff --git a/arviz/data/io_netcdf.py b/arviz/data/io_netcdf.py
--- a/arviz/data/io_netcdf.py
+++ b/arviz/data/io_netcdf.py
@@ -1,5 +1,5 @@
"""Input and output support for data."""
-import warnings
+
from .inference_data import InferenceData
from .converters import convert_to_inference_data
@@ -41,58 +41,3 @@
inference_data = convert_to_inference_data(data, group=group, coords=coords, dims=dims)
file_name = inference_data.to_netcdf(filename)
return file_name
-
-
-def load_data(filename):
- """Load netcdf file back into an arviz.InferenceData.
-
- Parameters
- ----------
- filename : str
- name or path of the file to load trace
-
- Note
- ----
- This function is deprecated and will be removed in 0.4.
- Use `from_netcdf` instead.
- """
- warnings.warn(
- "The 'load_data' function is deprecated as of 0.3.2, use 'from_netcdf' instead",
- DeprecationWarning,
- )
- return from_netcdf(filename=filename)
-
-
-def save_data(data, filename, *, group="posterior", coords=None, dims=None):
- """Save dataset as a netcdf file.
-
- WARNING: Only idempotent in case `data` is InferenceData
-
- Parameters
- ----------
- data : InferenceData, or any object accepted by `convert_to_inference_data`
- Object to be saved
- filename : str
- name or path of the file to load trace
- group : str (optional)
- In case `data` is not InferenceData, this is the group it will be saved to
- coords : dict (optional)
- See `convert_to_inference_data`
- dims : dict (optional)
- See `convert_to_inference_data`
-
- Returns
- -------
- str
- filename saved to
-
- Note
- ----
- This function is deprecated and will be removed in 0.4.
- Use `to_netcdf` instead.
- """
- warnings.warn(
- "The 'save_data' function is deprecated as of 0.3.2, use 'to_netcdf' instead",
- DeprecationWarning,
- )
- return to_netcdf(data=data, filename=filename, group=group, coords=coords, dims=dims)
| {"golden_diff": "diff --git a/arviz/data/__init__.py b/arviz/data/__init__.py\n--- a/arviz/data/__init__.py\n+++ b/arviz/data/__init__.py\n@@ -1,6 +1,6 @@\n \"\"\"Code for loading and manipulating data structures.\"\"\"\n from .inference_data import InferenceData, concat\n-from .io_netcdf import from_netcdf, to_netcdf, load_data, save_data\n+from .io_netcdf import from_netcdf, to_netcdf\n from .datasets import load_arviz_data, list_datasets, clear_data_home\n from .base import numpy_to_data_array, dict_to_dataset\n from .converters import convert_to_dataset, convert_to_inference_data\n@@ -31,6 +31,4 @@\n \"from_tfp\",\n \"from_netcdf\",\n \"to_netcdf\",\n- \"load_data\",\n- \"save_data\",\n ]\ndiff --git a/arviz/data/io_netcdf.py b/arviz/data/io_netcdf.py\n--- a/arviz/data/io_netcdf.py\n+++ b/arviz/data/io_netcdf.py\n@@ -1,5 +1,5 @@\n \"\"\"Input and output support for data.\"\"\"\n-import warnings\n+\n from .inference_data import InferenceData\n from .converters import convert_to_inference_data\n \n@@ -41,58 +41,3 @@\n inference_data = convert_to_inference_data(data, group=group, coords=coords, dims=dims)\n file_name = inference_data.to_netcdf(filename)\n return file_name\n-\n-\n-def load_data(filename):\n- \"\"\"Load netcdf file back into an arviz.InferenceData.\n-\n- Parameters\n- ----------\n- filename : str\n- name or path of the file to load trace\n-\n- Note\n- ----\n- This function is deprecated and will be removed in 0.4.\n- Use `from_netcdf` instead.\n- \"\"\"\n- warnings.warn(\n- \"The 'load_data' function is deprecated as of 0.3.2, use 'from_netcdf' instead\",\n- DeprecationWarning,\n- )\n- return from_netcdf(filename=filename)\n-\n-\n-def save_data(data, filename, *, group=\"posterior\", coords=None, dims=None):\n- \"\"\"Save dataset as a netcdf file.\n-\n- WARNING: Only idempotent in case `data` is InferenceData\n-\n- Parameters\n- ----------\n- data : InferenceData, or any object accepted by `convert_to_inference_data`\n- Object to be saved\n- filename : str\n- name or path of the file to load trace\n- group : str (optional)\n- In case `data` is not InferenceData, this is the group it will be saved to\n- coords : dict (optional)\n- See `convert_to_inference_data`\n- dims : dict (optional)\n- See `convert_to_inference_data`\n-\n- Returns\n- -------\n- str\n- filename saved to\n-\n- Note\n- ----\n- This function is deprecated and will be removed in 0.4.\n- Use `to_netcdf` instead.\n- \"\"\"\n- warnings.warn(\n- \"The 'save_data' function is deprecated as of 0.3.2, use 'to_netcdf' instead\",\n- DeprecationWarning,\n- )\n- return to_netcdf(data=data, filename=filename, group=group, coords=coords, dims=dims)\n", "issue": "Remove load_data and save_data functions before 0.4\n`load_data` and `save_data` are currently deprecated (after 0.3.1 release). They need to be removed after 0.4 (assuming next release is going to be 0.3.2).\n", "before_files": [{"content": "\"\"\"Code for loading and manipulating data structures.\"\"\"\nfrom .inference_data import InferenceData, concat\nfrom .io_netcdf import from_netcdf, to_netcdf, load_data, save_data\nfrom .datasets import load_arviz_data, list_datasets, clear_data_home\nfrom .base import numpy_to_data_array, dict_to_dataset\nfrom .converters import convert_to_dataset, convert_to_inference_data\nfrom .io_cmdstan import from_cmdstan\nfrom .io_dict import from_dict\nfrom .io_pymc3 import from_pymc3\nfrom .io_pystan import from_pystan\nfrom .io_emcee import from_emcee\nfrom .io_pyro import from_pyro\nfrom .io_tfp import from_tfp\n\n__all__ = [\n \"InferenceData\",\n \"concat\",\n \"load_arviz_data\",\n \"list_datasets\",\n \"clear_data_home\",\n \"numpy_to_data_array\",\n \"dict_to_dataset\",\n \"convert_to_dataset\",\n \"convert_to_inference_data\",\n \"from_pymc3\",\n \"from_pystan\",\n \"from_emcee\",\n \"from_cmdstan\",\n \"from_dict\",\n \"from_pyro\",\n \"from_tfp\",\n \"from_netcdf\",\n \"to_netcdf\",\n \"load_data\",\n \"save_data\",\n]\n", "path": "arviz/data/__init__.py"}, {"content": "\"\"\"Input and output support for data.\"\"\"\nimport warnings\nfrom .inference_data import InferenceData\nfrom .converters import convert_to_inference_data\n\n\ndef from_netcdf(filename):\n \"\"\"Load netcdf file back into an arviz.InferenceData.\n\n Parameters\n ----------\n filename : str\n name or path of the file to load trace\n \"\"\"\n return InferenceData.from_netcdf(filename)\n\n\ndef to_netcdf(data, filename, *, group=\"posterior\", coords=None, dims=None):\n \"\"\"Save dataset as a netcdf file.\n\n WARNING: Only idempotent in case `data` is InferenceData\n\n Parameters\n ----------\n data : InferenceData, or any object accepted by `convert_to_inference_data`\n Object to be saved\n filename : str\n name or path of the file to load trace\n group : str (optional)\n In case `data` is not InferenceData, this is the group it will be saved to\n coords : dict (optional)\n See `convert_to_inference_data`\n dims : dict (optional)\n See `convert_to_inference_data`\n\n Returns\n -------\n str\n filename saved to\n \"\"\"\n inference_data = convert_to_inference_data(data, group=group, coords=coords, dims=dims)\n file_name = inference_data.to_netcdf(filename)\n return file_name\n\n\ndef load_data(filename):\n \"\"\"Load netcdf file back into an arviz.InferenceData.\n\n Parameters\n ----------\n filename : str\n name or path of the file to load trace\n\n Note\n ----\n This function is deprecated and will be removed in 0.4.\n Use `from_netcdf` instead.\n \"\"\"\n warnings.warn(\n \"The 'load_data' function is deprecated as of 0.3.2, use 'from_netcdf' instead\",\n DeprecationWarning,\n )\n return from_netcdf(filename=filename)\n\n\ndef save_data(data, filename, *, group=\"posterior\", coords=None, dims=None):\n \"\"\"Save dataset as a netcdf file.\n\n WARNING: Only idempotent in case `data` is InferenceData\n\n Parameters\n ----------\n data : InferenceData, or any object accepted by `convert_to_inference_data`\n Object to be saved\n filename : str\n name or path of the file to load trace\n group : str (optional)\n In case `data` is not InferenceData, this is the group it will be saved to\n coords : dict (optional)\n See `convert_to_inference_data`\n dims : dict (optional)\n See `convert_to_inference_data`\n\n Returns\n -------\n str\n filename saved to\n\n Note\n ----\n This function is deprecated and will be removed in 0.4.\n Use `to_netcdf` instead.\n \"\"\"\n warnings.warn(\n \"The 'save_data' function is deprecated as of 0.3.2, use 'to_netcdf' instead\",\n DeprecationWarning,\n )\n return to_netcdf(data=data, filename=filename, group=group, coords=coords, dims=dims)\n", "path": "arviz/data/io_netcdf.py"}], "after_files": [{"content": "\"\"\"Code for loading and manipulating data structures.\"\"\"\nfrom .inference_data import InferenceData, concat\nfrom .io_netcdf import from_netcdf, to_netcdf\nfrom .datasets import load_arviz_data, list_datasets, clear_data_home\nfrom .base import numpy_to_data_array, dict_to_dataset\nfrom .converters import convert_to_dataset, convert_to_inference_data\nfrom .io_cmdstan import from_cmdstan\nfrom .io_dict import from_dict\nfrom .io_pymc3 import from_pymc3\nfrom .io_pystan import from_pystan\nfrom .io_emcee import from_emcee\nfrom .io_pyro import from_pyro\nfrom .io_tfp import from_tfp\n\n__all__ = [\n \"InferenceData\",\n \"concat\",\n \"load_arviz_data\",\n \"list_datasets\",\n \"clear_data_home\",\n \"numpy_to_data_array\",\n \"dict_to_dataset\",\n \"convert_to_dataset\",\n \"convert_to_inference_data\",\n \"from_pymc3\",\n \"from_pystan\",\n \"from_emcee\",\n \"from_cmdstan\",\n \"from_dict\",\n \"from_pyro\",\n \"from_tfp\",\n \"from_netcdf\",\n \"to_netcdf\",\n]\n", "path": "arviz/data/__init__.py"}, {"content": "\"\"\"Input and output support for data.\"\"\"\n\nfrom .inference_data import InferenceData\nfrom .converters import convert_to_inference_data\n\n\ndef from_netcdf(filename):\n \"\"\"Load netcdf file back into an arviz.InferenceData.\n\n Parameters\n ----------\n filename : str\n name or path of the file to load trace\n \"\"\"\n return InferenceData.from_netcdf(filename)\n\n\ndef to_netcdf(data, filename, *, group=\"posterior\", coords=None, dims=None):\n \"\"\"Save dataset as a netcdf file.\n\n WARNING: Only idempotent in case `data` is InferenceData\n\n Parameters\n ----------\n data : InferenceData, or any object accepted by `convert_to_inference_data`\n Object to be saved\n filename : str\n name or path of the file to load trace\n group : str (optional)\n In case `data` is not InferenceData, this is the group it will be saved to\n coords : dict (optional)\n See `convert_to_inference_data`\n dims : dict (optional)\n See `convert_to_inference_data`\n\n Returns\n -------\n str\n filename saved to\n \"\"\"\n inference_data = convert_to_inference_data(data, group=group, coords=coords, dims=dims)\n file_name = inference_data.to_netcdf(filename)\n return file_name\n", "path": "arviz/data/io_netcdf.py"}]} | 1,590 | 782 |
gh_patches_debug_21489 | rasdani/github-patches | git_diff | pyinstaller__pyinstaller-3106 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
OS X ImportError: _sysconfigdata_m_darwin_ not found
# Description
When running a standalone executable that was built with PyInstaller on OS X, I receive the following error:
```
[7943] LOADER: Running pyiboot01_bootstrap.py
[7943] LOADER: Running pyi_rth_multiprocessing.py
[7943] LOADER: Running pyi_rth_pkgres.py
Traceback (most recent call last):
File "site-packages/PyInstaller/loader/rthooks/pyi_rth_pkgres.py", line 11, in <module>
File "/Users/addisonelliott/anaconda3/lib/python3.5/site-packages/PyInstaller/loader/pyimod03_importers.py", line 631, in exec_module
exec(bytecode, module.__dict__)
File "site-packages/pkg_resources/__init__.py", line 995, in <module>
File "site-packages/pkg_resources/__init__.py", line 998, in Environment
File "site-packages/pkg_resources/__init__.py", line 284, in get_supported_platform
File "site-packages/pkg_resources/__init__.py", line 480, in get_build_platform
File "sysconfig.py", line 688, in get_platform
File "sysconfig.py", line 549, in get_config_vars
File "sysconfig.py", line 420, in _init_posix
ImportError: No module named '_sysconfigdata_m_darwin_'
```
Issue is specific to OS X. I tested on Windows as well as Ubuntu (using WSL) and the issue was not present on either OS. Note that I can build the executable but the error occurs on **run-time**.
# Build Setup
* OS: Mac OS X 10.11 El Capitan
* Platform: Darwin-15.6.0-x86_64-i386-64bit
* Python: 3.5.4 using Anaconda
* PyInstaller: Tested with develop(3.4), 3.2.1, and 3.3 and issue occurs on all
# Example Setup
I have found that importing numpy in my case will trigger the error. If you comment out the import, it works fine. Create a Python script, paste the following code and then create an executable from it.
```
import numpy
print('Hello world')
```
And here is the command I am using to compile the code:
```
sudo pyinstaller -y --debug --clean --onedir main.py
```
# What I have discovered
I have discovered that using the hidden-import argument, this fixes the issue. The goal is to create/edit a hook and apply this fix in a more permanent method however.
The failed import module occurs in CPython: https://github.com/python/cpython/blob/master/Lib/sysconfig.py#L339
In Linux, the imported module is _sysconfigdata which is odd because that doesn't match the Github. I looked at sysconfig.py in Linux and it had some outdated code that just imported _sysconfigdata module and did not look at ABI, OS, etc.
Some type of alteration will need to be done to the sysconfig hook most likely: https://github.com/pyinstaller/pyinstaller/blob/develop/PyInstaller/hooks/hook-sysconfig.py
I am going to attempt to create a PR for this. Currently, my questions are:
* What does the sysconfig hook load now?
* Did something change in Python that caused the _sysconfigdata module to be different?
* If so, what version did this occur?
**Edit:**
Here is some of the information I have discovered. For reasons I don't know, the CPython GitHub does not match up with the Python implementation from Anaconda. For example, Python 3.5.4 in Anaconda backports the sysconfig changes in 3.6. This does not match up with CPython on Github because the sysconfig changes are not present until 3.6.
This is ultimately what is causing the issue. The sysconfig hook for PyInstaller assumes that only Python versions 3.6 and up will contain the _get_sysconfigdata_name() function. Since this also occurs in 3.5.4 for Anaconda, there is an issue.
Below is the sysconfig.py file for my Anaconda 3.5.4 environment:
```
def _init_posix(vars):
"""Initialize the module as appropriate for POSIX systems."""
# _sysconfigdata is generated at build time, see _generate_posix_vars()
# _PYTHON_SYSCONFIGDATA_NAME support backported from Python 3.6
name = _get_sysconfigdata_name()
_temp = __import__(name, globals(), locals(), ['build_time_vars'], 0)
build_time_vars = _temp.build_time_vars
vars.update(build_time_vars)
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `PyInstaller/hooks/hook-sysconfig.py`
Content:
```
1 #-----------------------------------------------------------------------------
2 # Copyright (c) 2005-2017, PyInstaller Development Team.
3 #
4 # Distributed under the terms of the GNU General Public License with exception
5 # for distributing bootloader.
6 #
7 # The full license is in the file COPYING.txt, distributed with this software.
8 #-----------------------------------------------------------------------------
9
10
11 # The 'sysconfig' module requires Makefile and pyconfig.h files from
12 # Python installation. 'sysconfig' parses these files to get some
13 # information from them.
14 # TODO Verify that bundling Makefile and pyconfig.h is still required for Python 3.
15
16 import sysconfig
17 import os
18
19 from PyInstaller.utils.hooks import relpath_to_config_or_make
20 from PyInstaller.compat import is_py36, is_win
21
22 _CONFIG_H = sysconfig.get_config_h_filename()
23 if hasattr(sysconfig, 'get_makefile_filename'):
24 # sysconfig.get_makefile_filename is missing in Python < 2.7.9
25 _MAKEFILE = sysconfig.get_makefile_filename()
26 else:
27 _MAKEFILE = sysconfig._get_makefile_filename()
28
29
30 datas = [(_CONFIG_H, relpath_to_config_or_make(_CONFIG_H))]
31
32 # The Makefile does not exist on all platforms, eg. on Windows
33 if os.path.exists(_MAKEFILE):
34 datas.append((_MAKEFILE, relpath_to_config_or_make(_MAKEFILE)))
35
36 if is_py36 and not is_win:
37 # Python 3.6 uses additional modules like
38 # `_sysconfigdata_m_linux_x86_64-linux-gnu`, see
39 # https://github.com/python/cpython/blob/3.6/Lib/sysconfig.py#L417
40 hiddenimports = [sysconfig._get_sysconfigdata_name()]
41
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/PyInstaller/hooks/hook-sysconfig.py b/PyInstaller/hooks/hook-sysconfig.py
--- a/PyInstaller/hooks/hook-sysconfig.py
+++ b/PyInstaller/hooks/hook-sysconfig.py
@@ -17,7 +17,7 @@
import os
from PyInstaller.utils.hooks import relpath_to_config_or_make
-from PyInstaller.compat import is_py36, is_win
+from PyInstaller.compat import is_win
_CONFIG_H = sysconfig.get_config_h_filename()
if hasattr(sysconfig, 'get_makefile_filename'):
@@ -33,8 +33,10 @@
if os.path.exists(_MAKEFILE):
datas.append((_MAKEFILE, relpath_to_config_or_make(_MAKEFILE)))
-if is_py36 and not is_win:
+if not is_win and hasattr(sysconfig, '_get_sysconfigdata_name'):
# Python 3.6 uses additional modules like
# `_sysconfigdata_m_linux_x86_64-linux-gnu`, see
# https://github.com/python/cpython/blob/3.6/Lib/sysconfig.py#L417
+ # Note: Some versions of Anaconda backport this feature to before 3.6.
+ # See issue #3105
hiddenimports = [sysconfig._get_sysconfigdata_name()]
| {"golden_diff": "diff --git a/PyInstaller/hooks/hook-sysconfig.py b/PyInstaller/hooks/hook-sysconfig.py\n--- a/PyInstaller/hooks/hook-sysconfig.py\n+++ b/PyInstaller/hooks/hook-sysconfig.py\n@@ -17,7 +17,7 @@\n import os\n \n from PyInstaller.utils.hooks import relpath_to_config_or_make\n-from PyInstaller.compat import is_py36, is_win\n+from PyInstaller.compat import is_win\n \n _CONFIG_H = sysconfig.get_config_h_filename()\n if hasattr(sysconfig, 'get_makefile_filename'):\n@@ -33,8 +33,10 @@\n if os.path.exists(_MAKEFILE):\n datas.append((_MAKEFILE, relpath_to_config_or_make(_MAKEFILE)))\n \n-if is_py36 and not is_win:\n+if not is_win and hasattr(sysconfig, '_get_sysconfigdata_name'):\n # Python 3.6 uses additional modules like\n # `_sysconfigdata_m_linux_x86_64-linux-gnu`, see\n # https://github.com/python/cpython/blob/3.6/Lib/sysconfig.py#L417\n+ # Note: Some versions of Anaconda backport this feature to before 3.6.\n+ # See issue #3105\n hiddenimports = [sysconfig._get_sysconfigdata_name()]\n", "issue": "OS X ImportError: _sysconfigdata_m_darwin_ not found\n# Description\r\nWhen running a standalone executable that was built with PyInstaller on OS X, I receive the following error:\r\n```\r\n[7943] LOADER: Running pyiboot01_bootstrap.py\r\n[7943] LOADER: Running pyi_rth_multiprocessing.py\r\n[7943] LOADER: Running pyi_rth_pkgres.py\r\nTraceback (most recent call last):\r\n File \"site-packages/PyInstaller/loader/rthooks/pyi_rth_pkgres.py\", line 11, in <module>\r\n File \"/Users/addisonelliott/anaconda3/lib/python3.5/site-packages/PyInstaller/loader/pyimod03_importers.py\", line 631, in exec_module\r\n exec(bytecode, module.__dict__)\r\n File \"site-packages/pkg_resources/__init__.py\", line 995, in <module>\r\n File \"site-packages/pkg_resources/__init__.py\", line 998, in Environment\r\n File \"site-packages/pkg_resources/__init__.py\", line 284, in get_supported_platform\r\n File \"site-packages/pkg_resources/__init__.py\", line 480, in get_build_platform\r\n File \"sysconfig.py\", line 688, in get_platform\r\n File \"sysconfig.py\", line 549, in get_config_vars\r\n File \"sysconfig.py\", line 420, in _init_posix\r\nImportError: No module named '_sysconfigdata_m_darwin_'\r\n```\r\nIssue is specific to OS X. I tested on Windows as well as Ubuntu (using WSL) and the issue was not present on either OS. Note that I can build the executable but the error occurs on **run-time**.\r\n\r\n# Build Setup\r\n* OS: Mac OS X 10.11 El Capitan\r\n* Platform: Darwin-15.6.0-x86_64-i386-64bit\r\n* Python: 3.5.4 using Anaconda\r\n* PyInstaller: Tested with develop(3.4), 3.2.1, and 3.3 and issue occurs on all\r\n\r\n# Example Setup\r\nI have found that importing numpy in my case will trigger the error. If you comment out the import, it works fine. Create a Python script, paste the following code and then create an executable from it.\r\n```\r\nimport numpy\r\n\r\nprint('Hello world')\r\n```\r\n\r\nAnd here is the command I am using to compile the code: \r\n```\r\nsudo pyinstaller -y --debug --clean --onedir main.py\r\n```\r\n\r\n# What I have discovered\r\nI have discovered that using the hidden-import argument, this fixes the issue. The goal is to create/edit a hook and apply this fix in a more permanent method however.\r\n\r\nThe failed import module occurs in CPython: https://github.com/python/cpython/blob/master/Lib/sysconfig.py#L339\r\n\r\nIn Linux, the imported module is _sysconfigdata which is odd because that doesn't match the Github. I looked at sysconfig.py in Linux and it had some outdated code that just imported _sysconfigdata module and did not look at ABI, OS, etc.\r\n\r\nSome type of alteration will need to be done to the sysconfig hook most likely: https://github.com/pyinstaller/pyinstaller/blob/develop/PyInstaller/hooks/hook-sysconfig.py\r\n\r\nI am going to attempt to create a PR for this. Currently, my questions are:\r\n* What does the sysconfig hook load now?\r\n* Did something change in Python that caused the _sysconfigdata module to be different?\r\n* If so, what version did this occur?\r\n\r\n**Edit:**\r\nHere is some of the information I have discovered. For reasons I don't know, the CPython GitHub does not match up with the Python implementation from Anaconda. For example, Python 3.5.4 in Anaconda backports the sysconfig changes in 3.6. This does not match up with CPython on Github because the sysconfig changes are not present until 3.6.\r\n\r\nThis is ultimately what is causing the issue. The sysconfig hook for PyInstaller assumes that only Python versions 3.6 and up will contain the _get_sysconfigdata_name() function. Since this also occurs in 3.5.4 for Anaconda, there is an issue.\r\n\r\nBelow is the sysconfig.py file for my Anaconda 3.5.4 environment:\r\n```\r\ndef _init_posix(vars):\r\n \"\"\"Initialize the module as appropriate for POSIX systems.\"\"\"\r\n # _sysconfigdata is generated at build time, see _generate_posix_vars()\r\n # _PYTHON_SYSCONFIGDATA_NAME support backported from Python 3.6\r\n name = _get_sysconfigdata_name()\r\n _temp = __import__(name, globals(), locals(), ['build_time_vars'], 0)\r\n build_time_vars = _temp.build_time_vars\r\n vars.update(build_time_vars)\r\n```\n", "before_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2005-2017, PyInstaller Development Team.\n#\n# Distributed under the terms of the GNU General Public License with exception\n# for distributing bootloader.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\n\n\n# The 'sysconfig' module requires Makefile and pyconfig.h files from\n# Python installation. 'sysconfig' parses these files to get some\n# information from them.\n# TODO Verify that bundling Makefile and pyconfig.h is still required for Python 3.\n\nimport sysconfig\nimport os\n\nfrom PyInstaller.utils.hooks import relpath_to_config_or_make\nfrom PyInstaller.compat import is_py36, is_win\n\n_CONFIG_H = sysconfig.get_config_h_filename()\nif hasattr(sysconfig, 'get_makefile_filename'):\n # sysconfig.get_makefile_filename is missing in Python < 2.7.9\n _MAKEFILE = sysconfig.get_makefile_filename()\nelse:\n _MAKEFILE = sysconfig._get_makefile_filename()\n\n\ndatas = [(_CONFIG_H, relpath_to_config_or_make(_CONFIG_H))]\n\n# The Makefile does not exist on all platforms, eg. on Windows\nif os.path.exists(_MAKEFILE):\n datas.append((_MAKEFILE, relpath_to_config_or_make(_MAKEFILE)))\n\nif is_py36 and not is_win:\n # Python 3.6 uses additional modules like\n # `_sysconfigdata_m_linux_x86_64-linux-gnu`, see\n # https://github.com/python/cpython/blob/3.6/Lib/sysconfig.py#L417\n hiddenimports = [sysconfig._get_sysconfigdata_name()]\n", "path": "PyInstaller/hooks/hook-sysconfig.py"}], "after_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2005-2017, PyInstaller Development Team.\n#\n# Distributed under the terms of the GNU General Public License with exception\n# for distributing bootloader.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\n\n\n# The 'sysconfig' module requires Makefile and pyconfig.h files from\n# Python installation. 'sysconfig' parses these files to get some\n# information from them.\n# TODO Verify that bundling Makefile and pyconfig.h is still required for Python 3.\n\nimport sysconfig\nimport os\n\nfrom PyInstaller.utils.hooks import relpath_to_config_or_make\nfrom PyInstaller.compat import is_win\n\n_CONFIG_H = sysconfig.get_config_h_filename()\nif hasattr(sysconfig, 'get_makefile_filename'):\n # sysconfig.get_makefile_filename is missing in Python < 2.7.9\n _MAKEFILE = sysconfig.get_makefile_filename()\nelse:\n _MAKEFILE = sysconfig._get_makefile_filename()\n\n\ndatas = [(_CONFIG_H, relpath_to_config_or_make(_CONFIG_H))]\n\n# The Makefile does not exist on all platforms, eg. on Windows\nif os.path.exists(_MAKEFILE):\n datas.append((_MAKEFILE, relpath_to_config_or_make(_MAKEFILE)))\n\nif not is_win and hasattr(sysconfig, '_get_sysconfigdata_name'):\n # Python 3.6 uses additional modules like\n # `_sysconfigdata_m_linux_x86_64-linux-gnu`, see\n # https://github.com/python/cpython/blob/3.6/Lib/sysconfig.py#L417\n # Note: Some versions of Anaconda backport this feature to before 3.6.\n # See issue #3105\n hiddenimports = [sysconfig._get_sysconfigdata_name()]\n", "path": "PyInstaller/hooks/hook-sysconfig.py"}]} | 1,775 | 293 |
gh_patches_debug_15681 | rasdani/github-patches | git_diff | TheAlgorithms__Python-1461 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
DIRECTORY.md not being updated by TravisCI
- [x] .travis.yml isn't updating DIRECTORY.md automatically as it should
- [x] scripts/build_directory_md.py needs can have some minor changes too. #1461
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scripts/build_directory_md.py`
Content:
```
1 #!/usr/bin/env python3
2
3 import os
4 from typing import Iterator
5
6 URL_BASE = "https://github.com/TheAlgorithms/Python/blob/master"
7
8
9 def good_filepaths(top_dir: str = ".") -> Iterator[str]:
10 for dirpath, dirnames, filenames in os.walk(top_dir):
11 dirnames[:] = [d for d in dirnames if d != "scripts" and d[0] not in "._"]
12 for filename in filenames:
13 if filename == "__init__.py":
14 continue
15 if os.path.splitext(filename)[1] in (".py", ".ipynb"):
16 yield os.path.join(dirpath, filename).lstrip("./")
17
18
19 def md_prefix(i):
20 return f"{i * ' '}*" if i else "##"
21
22
23 def print_path(old_path: str, new_path: str) -> str:
24 old_parts = old_path.split(os.sep)
25 for i, new_part in enumerate(new_path.split(os.sep)):
26 if i + 1 > len(old_parts) or old_parts[i] != new_part:
27 if new_part:
28 print(f"{md_prefix(i)} {new_part.replace('_', ' ').title()}")
29 return new_path
30
31
32 def print_directory_md(top_dir: str = ".") -> None:
33 old_path = ""
34 for filepath in sorted(good_filepaths()):
35 filepath, filename = os.path.split(filepath)
36 if filepath != old_path:
37 old_path = print_path(old_path, filepath)
38 indent = (filepath.count(os.sep) + 1) if filepath else 0
39 url = "/".join((URL_BASE, filepath, filename)).replace(" ", "%20")
40 filename = os.path.splitext(filename.replace("_", " "))[0]
41 print(f"{md_prefix(indent)} [{filename}]({url})")
42
43
44 if __name__ == "__main__":
45 print_directory_md(".")
46
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/scripts/build_directory_md.py b/scripts/build_directory_md.py
--- a/scripts/build_directory_md.py
+++ b/scripts/build_directory_md.py
@@ -17,7 +17,7 @@
def md_prefix(i):
- return f"{i * ' '}*" if i else "##"
+ return f"{i * ' '}*" if i else "\n##"
def print_path(old_path: str, new_path: str) -> str:
@@ -37,7 +37,7 @@
old_path = print_path(old_path, filepath)
indent = (filepath.count(os.sep) + 1) if filepath else 0
url = "/".join((URL_BASE, filepath, filename)).replace(" ", "%20")
- filename = os.path.splitext(filename.replace("_", " "))[0]
+ filename = os.path.splitext(filename.replace("_", " ").title())[0]
print(f"{md_prefix(indent)} [{filename}]({url})")
| {"golden_diff": "diff --git a/scripts/build_directory_md.py b/scripts/build_directory_md.py\n--- a/scripts/build_directory_md.py\n+++ b/scripts/build_directory_md.py\n@@ -17,7 +17,7 @@\n \n \n def md_prefix(i):\n- return f\"{i * ' '}*\" if i else \"##\"\n+ return f\"{i * ' '}*\" if i else \"\\n##\"\n \n \n def print_path(old_path: str, new_path: str) -> str:\n@@ -37,7 +37,7 @@\n old_path = print_path(old_path, filepath)\n indent = (filepath.count(os.sep) + 1) if filepath else 0\n url = \"/\".join((URL_BASE, filepath, filename)).replace(\" \", \"%20\")\n- filename = os.path.splitext(filename.replace(\"_\", \" \"))[0]\n+ filename = os.path.splitext(filename.replace(\"_\", \" \").title())[0]\n print(f\"{md_prefix(indent)} [{filename}]({url})\")\n", "issue": "DIRECTORY.md not being updated by TravisCI\n- [x] .travis.yml isn't updating DIRECTORY.md automatically as it should\r\n- [x] scripts/build_directory_md.py needs can have some minor changes too. #1461\n", "before_files": [{"content": "#!/usr/bin/env python3\n\nimport os\nfrom typing import Iterator\n\nURL_BASE = \"https://github.com/TheAlgorithms/Python/blob/master\"\n\n\ndef good_filepaths(top_dir: str = \".\") -> Iterator[str]:\n for dirpath, dirnames, filenames in os.walk(top_dir):\n dirnames[:] = [d for d in dirnames if d != \"scripts\" and d[0] not in \"._\"]\n for filename in filenames:\n if filename == \"__init__.py\":\n continue\n if os.path.splitext(filename)[1] in (\".py\", \".ipynb\"):\n yield os.path.join(dirpath, filename).lstrip(\"./\")\n\n\ndef md_prefix(i):\n return f\"{i * ' '}*\" if i else \"##\"\n\n\ndef print_path(old_path: str, new_path: str) -> str:\n old_parts = old_path.split(os.sep)\n for i, new_part in enumerate(new_path.split(os.sep)):\n if i + 1 > len(old_parts) or old_parts[i] != new_part:\n if new_part:\n print(f\"{md_prefix(i)} {new_part.replace('_', ' ').title()}\")\n return new_path\n\n\ndef print_directory_md(top_dir: str = \".\") -> None:\n old_path = \"\"\n for filepath in sorted(good_filepaths()):\n filepath, filename = os.path.split(filepath)\n if filepath != old_path:\n old_path = print_path(old_path, filepath)\n indent = (filepath.count(os.sep) + 1) if filepath else 0\n url = \"/\".join((URL_BASE, filepath, filename)).replace(\" \", \"%20\")\n filename = os.path.splitext(filename.replace(\"_\", \" \"))[0]\n print(f\"{md_prefix(indent)} [{filename}]({url})\")\n\n\nif __name__ == \"__main__\":\n print_directory_md(\".\")\n", "path": "scripts/build_directory_md.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\nimport os\nfrom typing import Iterator\n\nURL_BASE = \"https://github.com/TheAlgorithms/Python/blob/master\"\n\n\ndef good_filepaths(top_dir: str = \".\") -> Iterator[str]:\n for dirpath, dirnames, filenames in os.walk(top_dir):\n dirnames[:] = [d for d in dirnames if d != \"scripts\" and d[0] not in \"._\"]\n for filename in filenames:\n if filename == \"__init__.py\":\n continue\n if os.path.splitext(filename)[1] in (\".py\", \".ipynb\"):\n yield os.path.join(dirpath, filename).lstrip(\"./\")\n\n\ndef md_prefix(i):\n return f\"{i * ' '}*\" if i else \"\\n##\"\n\n\ndef print_path(old_path: str, new_path: str) -> str:\n old_parts = old_path.split(os.sep)\n for i, new_part in enumerate(new_path.split(os.sep)):\n if i + 1 > len(old_parts) or old_parts[i] != new_part:\n if new_part:\n print(f\"{md_prefix(i)} {new_part.replace('_', ' ').title()}\")\n return new_path\n\n\ndef print_directory_md(top_dir: str = \".\") -> None:\n old_path = \"\"\n for filepath in sorted(good_filepaths()):\n filepath, filename = os.path.split(filepath)\n if filepath != old_path:\n old_path = print_path(old_path, filepath)\n indent = (filepath.count(os.sep) + 1) if filepath else 0\n url = \"/\".join((URL_BASE, filepath, filename)).replace(\" \", \"%20\")\n filename = os.path.splitext(filename.replace(\"_\", \" \").title())[0]\n print(f\"{md_prefix(indent)} [{filename}]({url})\")\n\n\nif __name__ == \"__main__\":\n print_directory_md(\".\")\n", "path": "scripts/build_directory_md.py"}]} | 796 | 213 |
gh_patches_debug_28663 | rasdani/github-patches | git_diff | ray-project__ray-8177 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Ray async api is not working with uvloop.
<!--Please include [tune], [rllib], [autoscaler] etc. in the issue title if relevant-->
### What is the problem?
Current Ray async api uses asyncio event loop's internal attribute to identify if the loop is running in the current current thread.
```python3
loop = asyncio.get_event_loop()
if loop.is_running():
if loop._thread_id != threading.get_ident():
# If the loop is runing outside current thread, we actually need
# to do this to make sure the context is initialized.
asyncio.run_coroutine_threadsafe(_async_init(), loop=loop)
```
This causes a problem when we uses Ray APIs inside Fast API because Fast API uses uvloop as its main event loop, and uvloop doesn't have `_thread_id` attribute.
@simon-mo Any good idea to fix this? It doesn't seem to be trivial. What about we do async_init() whenever asyncio loop is created in a different thread instead of checking if the event loop's thread id? I assume the only use case where asyncio loop is defined in a different thread is only inside async actor?
### Reproduction (REQUIRED)
Please provide a script that can be run to reproduce the issue. The script should have **no external library dependencies** (i.e., use fake or mock data / environments):
```python3
import time
import asyncio
import ray
import psutil
from fastapi import FastAPI, APIRouter
app = FastAPI(
title="API template",
description="Template to build upon for API serving and distributed computation",
version="0.1.0",
openapi_url="/openapi.json",
docs_url="/docs",
)
@app.on_event("startup")
def startup_event():
ray.init(num_cpus=2)
@app.on_event("shutdown")
def shutdown_event():
ray.shutdown()
@app.get('/async')
async def non_seq_async_process():
"""
async distributed execution
"""
@ray.remote
def slow_function(i):
time.sleep(i)
return i
start_time = time.time()
# result_ids = []
# for i in range(10, 60, 10):
# result_ids.append(slow_function.remote(i))
# results = ray.get(result_ids)
results = await asyncio.wait([slow_function.remote(i) for i in range(10, 60, 10)])
duration = time.time() - start_time
out = "Executing the for loop took {:.3f} seconds.\n".format(duration)
out += f"The results are: {results}\n"
```
If we cannot run your script, we cannot fix your issue.
- [x] I have verified my script runs in a clean environment and reproduces the issue.
- [x] I have verified the issue also occurs with the [latest wheels](https://docs.ray.io/en/latest/installation.html).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `python/ray/experimental/async_api.py`
Content:
```
1 # Note: asyncio is only compatible with Python 3
2
3 import asyncio
4 import threading
5
6 import ray
7 from ray.experimental.async_plasma import PlasmaEventHandler
8 from ray.services import logger
9
10 handler = None
11
12
13 async def _async_init():
14 global handler
15 if handler is None:
16 worker = ray.worker.global_worker
17 loop = asyncio.get_event_loop()
18 handler = PlasmaEventHandler(loop, worker)
19 worker.core_worker.set_plasma_added_callback(handler)
20 logger.debug("AsyncPlasma Connection Created!")
21
22
23 def init():
24 """
25 Initialize synchronously.
26 """
27 assert ray.is_initialized(), "Please call ray.init before async_api.init"
28
29 # Noop when handler is set.
30 if handler is not None:
31 return
32
33 loop = asyncio.get_event_loop()
34 if loop.is_running():
35 if loop._thread_id != threading.get_ident():
36 # If the loop is runing outside current thread, we actually need
37 # to do this to make sure the context is initialized.
38 asyncio.run_coroutine_threadsafe(_async_init(), loop=loop)
39 else:
40 async_init_done = asyncio.get_event_loop().create_task(
41 _async_init())
42 # Block until the async init finishes.
43 async_init_done.done()
44 else:
45 asyncio.get_event_loop().run_until_complete(_async_init())
46
47
48 def as_future(object_id):
49 """Turn an object_id into a Future object.
50
51 Args:
52 object_id: A Ray object_id.
53
54 Returns:
55 PlasmaObjectFuture: A future object that waits the object_id.
56 """
57 if handler is None:
58 init()
59 return handler.as_future(object_id)
60
61
62 def shutdown():
63 """Manually shutdown the async API.
64
65 Cancels all related tasks and all the socket transportation.
66 """
67 global handler
68 if handler is not None:
69 handler.close()
70 handler = None
71
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/python/ray/experimental/async_api.py b/python/ray/experimental/async_api.py
--- a/python/ray/experimental/async_api.py
+++ b/python/ray/experimental/async_api.py
@@ -1,7 +1,4 @@
-# Note: asyncio is only compatible with Python 3
-
import asyncio
-import threading
import ray
from ray.experimental.async_plasma import PlasmaEventHandler
@@ -10,7 +7,10 @@
handler = None
-async def _async_init():
+def init():
+ """Initialize plasma event handlers for asyncio support."""
+ assert ray.is_initialized(), "Please call ray.init before async_api.init"
+
global handler
if handler is None:
worker = ray.worker.global_worker
@@ -20,31 +20,6 @@
logger.debug("AsyncPlasma Connection Created!")
-def init():
- """
- Initialize synchronously.
- """
- assert ray.is_initialized(), "Please call ray.init before async_api.init"
-
- # Noop when handler is set.
- if handler is not None:
- return
-
- loop = asyncio.get_event_loop()
- if loop.is_running():
- if loop._thread_id != threading.get_ident():
- # If the loop is runing outside current thread, we actually need
- # to do this to make sure the context is initialized.
- asyncio.run_coroutine_threadsafe(_async_init(), loop=loop)
- else:
- async_init_done = asyncio.get_event_loop().create_task(
- _async_init())
- # Block until the async init finishes.
- async_init_done.done()
- else:
- asyncio.get_event_loop().run_until_complete(_async_init())
-
-
def as_future(object_id):
"""Turn an object_id into a Future object.
| {"golden_diff": "diff --git a/python/ray/experimental/async_api.py b/python/ray/experimental/async_api.py\n--- a/python/ray/experimental/async_api.py\n+++ b/python/ray/experimental/async_api.py\n@@ -1,7 +1,4 @@\n-# Note: asyncio is only compatible with Python 3\n-\n import asyncio\n-import threading\n \n import ray\n from ray.experimental.async_plasma import PlasmaEventHandler\n@@ -10,7 +7,10 @@\n handler = None\n \n \n-async def _async_init():\n+def init():\n+ \"\"\"Initialize plasma event handlers for asyncio support.\"\"\"\n+ assert ray.is_initialized(), \"Please call ray.init before async_api.init\"\n+\n global handler\n if handler is None:\n worker = ray.worker.global_worker\n@@ -20,31 +20,6 @@\n logger.debug(\"AsyncPlasma Connection Created!\")\n \n \n-def init():\n- \"\"\"\n- Initialize synchronously.\n- \"\"\"\n- assert ray.is_initialized(), \"Please call ray.init before async_api.init\"\n-\n- # Noop when handler is set.\n- if handler is not None:\n- return\n-\n- loop = asyncio.get_event_loop()\n- if loop.is_running():\n- if loop._thread_id != threading.get_ident():\n- # If the loop is runing outside current thread, we actually need\n- # to do this to make sure the context is initialized.\n- asyncio.run_coroutine_threadsafe(_async_init(), loop=loop)\n- else:\n- async_init_done = asyncio.get_event_loop().create_task(\n- _async_init())\n- # Block until the async init finishes.\n- async_init_done.done()\n- else:\n- asyncio.get_event_loop().run_until_complete(_async_init())\n-\n-\n def as_future(object_id):\n \"\"\"Turn an object_id into a Future object.\n", "issue": "Ray async api is not working with uvloop.\n<!--Please include [tune], [rllib], [autoscaler] etc. in the issue title if relevant-->\r\n\r\n### What is the problem?\r\n\r\nCurrent Ray async api uses asyncio event loop's internal attribute to identify if the loop is running in the current current thread.\r\n\r\n```python3\r\n loop = asyncio.get_event_loop()\r\n if loop.is_running():\r\n if loop._thread_id != threading.get_ident():\r\n # If the loop is runing outside current thread, we actually need\r\n # to do this to make sure the context is initialized.\r\n asyncio.run_coroutine_threadsafe(_async_init(), loop=loop)\r\n```\r\nThis causes a problem when we uses Ray APIs inside Fast API because Fast API uses uvloop as its main event loop, and uvloop doesn't have `_thread_id` attribute.\r\n\r\n@simon-mo Any good idea to fix this? It doesn't seem to be trivial. What about we do async_init() whenever asyncio loop is created in a different thread instead of checking if the event loop's thread id? I assume the only use case where asyncio loop is defined in a different thread is only inside async actor? \r\n\r\n### Reproduction (REQUIRED)\r\nPlease provide a script that can be run to reproduce the issue. The script should have **no external library dependencies** (i.e., use fake or mock data / environments):\r\n\r\n```python3\r\nimport time\r\nimport asyncio \r\n\u200b\r\nimport ray\r\nimport psutil\r\nfrom fastapi import FastAPI, APIRouter\r\n\u200b\r\n\u200b\r\napp = FastAPI(\r\n title=\"API template\",\r\n description=\"Template to build upon for API serving and distributed computation\",\r\n version=\"0.1.0\",\r\n openapi_url=\"/openapi.json\",\r\n docs_url=\"/docs\",\r\n)\r\n\u200b\r\[email protected]_event(\"startup\")\r\ndef startup_event():\r\n ray.init(num_cpus=2)\r\n\u200b\r\n\u200b\r\n\u200b\r\[email protected]_event(\"shutdown\")\r\ndef shutdown_event():\r\n ray.shutdown()\r\n\u200b\r\n\u200b\r\[email protected]('/async')\r\nasync def non_seq_async_process():\r\n \"\"\"\r\n async distributed execution\r\n \"\"\"\r\n @ray.remote\r\n def slow_function(i):\r\n time.sleep(i)\r\n return i\r\n\u200b\r\n start_time = time.time()\r\n\u200b\r\n # result_ids = []\r\n # for i in range(10, 60, 10):\r\n # result_ids.append(slow_function.remote(i))\r\n \r\n # results = ray.get(result_ids)\r\n\u200b\r\n results = await asyncio.wait([slow_function.remote(i) for i in range(10, 60, 10)])\r\n\u200b\r\n \r\n duration = time.time() - start_time\r\n out = \"Executing the for loop took {:.3f} seconds.\\n\".format(duration)\r\n out += f\"The results are: {results}\\n\"\r\n\r\n```\r\n\r\nIf we cannot run your script, we cannot fix your issue.\r\n\r\n- [x] I have verified my script runs in a clean environment and reproduces the issue.\r\n- [x] I have verified the issue also occurs with the [latest wheels](https://docs.ray.io/en/latest/installation.html).\r\n\n", "before_files": [{"content": "# Note: asyncio is only compatible with Python 3\n\nimport asyncio\nimport threading\n\nimport ray\nfrom ray.experimental.async_plasma import PlasmaEventHandler\nfrom ray.services import logger\n\nhandler = None\n\n\nasync def _async_init():\n global handler\n if handler is None:\n worker = ray.worker.global_worker\n loop = asyncio.get_event_loop()\n handler = PlasmaEventHandler(loop, worker)\n worker.core_worker.set_plasma_added_callback(handler)\n logger.debug(\"AsyncPlasma Connection Created!\")\n\n\ndef init():\n \"\"\"\n Initialize synchronously.\n \"\"\"\n assert ray.is_initialized(), \"Please call ray.init before async_api.init\"\n\n # Noop when handler is set.\n if handler is not None:\n return\n\n loop = asyncio.get_event_loop()\n if loop.is_running():\n if loop._thread_id != threading.get_ident():\n # If the loop is runing outside current thread, we actually need\n # to do this to make sure the context is initialized.\n asyncio.run_coroutine_threadsafe(_async_init(), loop=loop)\n else:\n async_init_done = asyncio.get_event_loop().create_task(\n _async_init())\n # Block until the async init finishes.\n async_init_done.done()\n else:\n asyncio.get_event_loop().run_until_complete(_async_init())\n\n\ndef as_future(object_id):\n \"\"\"Turn an object_id into a Future object.\n\n Args:\n object_id: A Ray object_id.\n\n Returns:\n PlasmaObjectFuture: A future object that waits the object_id.\n \"\"\"\n if handler is None:\n init()\n return handler.as_future(object_id)\n\n\ndef shutdown():\n \"\"\"Manually shutdown the async API.\n\n Cancels all related tasks and all the socket transportation.\n \"\"\"\n global handler\n if handler is not None:\n handler.close()\n handler = None\n", "path": "python/ray/experimental/async_api.py"}], "after_files": [{"content": "import asyncio\n\nimport ray\nfrom ray.experimental.async_plasma import PlasmaEventHandler\nfrom ray.services import logger\n\nhandler = None\n\n\ndef init():\n \"\"\"Initialize plasma event handlers for asyncio support.\"\"\"\n assert ray.is_initialized(), \"Please call ray.init before async_api.init\"\n\n global handler\n if handler is None:\n worker = ray.worker.global_worker\n loop = asyncio.get_event_loop()\n handler = PlasmaEventHandler(loop, worker)\n worker.core_worker.set_plasma_added_callback(handler)\n logger.debug(\"AsyncPlasma Connection Created!\")\n\n\ndef as_future(object_id):\n \"\"\"Turn an object_id into a Future object.\n\n Args:\n object_id: A Ray object_id.\n\n Returns:\n PlasmaObjectFuture: A future object that waits the object_id.\n \"\"\"\n if handler is None:\n init()\n return handler.as_future(object_id)\n\n\ndef shutdown():\n \"\"\"Manually shutdown the async API.\n\n Cancels all related tasks and all the socket transportation.\n \"\"\"\n global handler\n if handler is not None:\n handler.close()\n handler = None\n", "path": "python/ray/experimental/async_api.py"}]} | 1,449 | 399 |
gh_patches_debug_27254 | rasdani/github-patches | git_diff | nextcloud__appstore-272 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Create documentation section for explaining certificates, signing and how it all works together
App devs need a very quick tutorial/walkthrough in the docs on how to generate a new certificate pair, request the public cert to be signed, registering an app id, revoking certs (like registering certs: post it on our issue tracker) and signing apps.
Also some background needs to be provided on how the whole certificate mechanism works.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nextcloudappstore/core/forms.py`
Content:
```
1 from django.forms import Form, CharField, Textarea, ChoiceField, RadioSelect, \
2 BooleanField
3 from django.utils.translation import ugettext_lazy as _ # type: ignore
4
5 from nextcloudappstore.core.models import App, AppRating
6
7 RATING_CHOICES = (
8 (0.0, _('Bad')),
9 (0.5, _('Ok')),
10 (1.0, _('Good'))
11 )
12
13
14 class AppReleaseUploadForm(Form):
15 download = CharField(label=_('Download link (tar.gz)'), max_length=256)
16 signature = CharField(widget=Textarea, label=_('SHA512 signature'),
17 help_text=_(
18 'Hint: can be calculated by executing the '
19 'following command: openssl dgst -sha512 -sign '
20 '/path/to/private-cert.key /path/to/app.tar.gz '
21 '| openssl base64'))
22 nightly = BooleanField(label=_('Nightly'))
23
24
25 class AppRatingForm(Form):
26 def __init__(self, *args, **kwargs):
27 self._id = kwargs.pop('id', None)
28 self._user = kwargs.pop('user', None)
29 self._language_code = kwargs.pop('language_code', None)
30 super().__init__(*args, **kwargs)
31
32 rating = ChoiceField(initial=0.5, choices=RATING_CHOICES,
33 widget=RadioSelect)
34 comment = CharField(widget=Textarea, required=False,
35 label=_('Review'))
36
37 class Meta:
38 fields = ('rating', 'comment')
39
40 def save(self):
41 app = App.objects.get(id=self._id)
42 app_rating, created = AppRating.objects.get_or_create(user=self._user,
43 app=app)
44 app_rating.rating = self.cleaned_data['rating']
45 app_rating.set_current_language(self._language_code)
46 app_rating.comment = self.cleaned_data['comment']
47 app_rating.save()
48
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/nextcloudappstore/core/forms.py b/nextcloudappstore/core/forms.py
--- a/nextcloudappstore/core/forms.py
+++ b/nextcloudappstore/core/forms.py
@@ -13,15 +13,32 @@
class AppReleaseUploadForm(Form):
download = CharField(label=_('Download link (tar.gz)'), max_length=256)
- signature = CharField(widget=Textarea, label=_('SHA512 signature'),
- help_text=_(
- 'Hint: can be calculated by executing the '
- 'following command: openssl dgst -sha512 -sign '
- '/path/to/private-cert.key /path/to/app.tar.gz '
- '| openssl base64'))
+ signature = CharField(
+ widget=Textarea,
+ label=_('SHA512 signature'),
+ help_text=_(
+ 'Hint: can be calculated by executing the '
+ 'following command: openssl dgst -sha512 -sign '
+ '~/.nextcloud/certificates/APP_ID.key '
+ '/path/to/app.tar.gz | openssl base64'))
nightly = BooleanField(label=_('Nightly'))
+class AppRegisterForm(Form):
+ certificate = CharField(
+ widget=Textarea(attrs={'pattern': '-----BEGIN CERTIFICATE-----.*'}),
+ label=_('Public certificate'),
+ help_text=_(
+ 'Usually stored in ~/.nextcloud/certificates/APP_ID.crt'))
+ signature = CharField(
+ widget=Textarea,
+ label=_('SHA512 signature'),
+ help_text=_(
+ 'Hint: can be calculated by executing the '
+ 'following command: echo -n "APP_ID" | openssl dgst -sha512 -sign '
+ '~/.nextcloud/certificates/APP_ID.key | openssl base64'))
+
+
class AppRatingForm(Form):
def __init__(self, *args, **kwargs):
self._id = kwargs.pop('id', None)
| {"golden_diff": "diff --git a/nextcloudappstore/core/forms.py b/nextcloudappstore/core/forms.py\n--- a/nextcloudappstore/core/forms.py\n+++ b/nextcloudappstore/core/forms.py\n@@ -13,15 +13,32 @@\n \n class AppReleaseUploadForm(Form):\n download = CharField(label=_('Download link (tar.gz)'), max_length=256)\n- signature = CharField(widget=Textarea, label=_('SHA512 signature'),\n- help_text=_(\n- 'Hint: can be calculated by executing the '\n- 'following command: openssl dgst -sha512 -sign '\n- '/path/to/private-cert.key /path/to/app.tar.gz '\n- '| openssl base64'))\n+ signature = CharField(\n+ widget=Textarea,\n+ label=_('SHA512 signature'),\n+ help_text=_(\n+ 'Hint: can be calculated by executing the '\n+ 'following command: openssl dgst -sha512 -sign '\n+ '~/.nextcloud/certificates/APP_ID.key '\n+ '/path/to/app.tar.gz | openssl base64'))\n nightly = BooleanField(label=_('Nightly'))\n \n \n+class AppRegisterForm(Form):\n+ certificate = CharField(\n+ widget=Textarea(attrs={'pattern': '-----BEGIN CERTIFICATE-----.*'}),\n+ label=_('Public certificate'),\n+ help_text=_(\n+ 'Usually stored in ~/.nextcloud/certificates/APP_ID.crt'))\n+ signature = CharField(\n+ widget=Textarea,\n+ label=_('SHA512 signature'),\n+ help_text=_(\n+ 'Hint: can be calculated by executing the '\n+ 'following command: echo -n \"APP_ID\" | openssl dgst -sha512 -sign '\n+ '~/.nextcloud/certificates/APP_ID.key | openssl base64'))\n+\n+\n class AppRatingForm(Form):\n def __init__(self, *args, **kwargs):\n self._id = kwargs.pop('id', None)\n", "issue": "Create documentation section for explaining certificates, signing and how it all works together\nApp devs need a very quick tutorial/walkthrough in the docs on how to generate a new certificate pair, request the public cert to be signed, registering an app id, revoking certs (like registering certs: post it on our issue tracker) and signing apps.\n\nAlso some background needs to be provided on how the whole certificate mechanism works.\n\n", "before_files": [{"content": "from django.forms import Form, CharField, Textarea, ChoiceField, RadioSelect, \\\n BooleanField\nfrom django.utils.translation import ugettext_lazy as _ # type: ignore\n\nfrom nextcloudappstore.core.models import App, AppRating\n\nRATING_CHOICES = (\n (0.0, _('Bad')),\n (0.5, _('Ok')),\n (1.0, _('Good'))\n)\n\n\nclass AppReleaseUploadForm(Form):\n download = CharField(label=_('Download link (tar.gz)'), max_length=256)\n signature = CharField(widget=Textarea, label=_('SHA512 signature'),\n help_text=_(\n 'Hint: can be calculated by executing the '\n 'following command: openssl dgst -sha512 -sign '\n '/path/to/private-cert.key /path/to/app.tar.gz '\n '| openssl base64'))\n nightly = BooleanField(label=_('Nightly'))\n\n\nclass AppRatingForm(Form):\n def __init__(self, *args, **kwargs):\n self._id = kwargs.pop('id', None)\n self._user = kwargs.pop('user', None)\n self._language_code = kwargs.pop('language_code', None)\n super().__init__(*args, **kwargs)\n\n rating = ChoiceField(initial=0.5, choices=RATING_CHOICES,\n widget=RadioSelect)\n comment = CharField(widget=Textarea, required=False,\n label=_('Review'))\n\n class Meta:\n fields = ('rating', 'comment')\n\n def save(self):\n app = App.objects.get(id=self._id)\n app_rating, created = AppRating.objects.get_or_create(user=self._user,\n app=app)\n app_rating.rating = self.cleaned_data['rating']\n app_rating.set_current_language(self._language_code)\n app_rating.comment = self.cleaned_data['comment']\n app_rating.save()\n", "path": "nextcloudappstore/core/forms.py"}], "after_files": [{"content": "from django.forms import Form, CharField, Textarea, ChoiceField, RadioSelect, \\\n BooleanField\nfrom django.utils.translation import ugettext_lazy as _ # type: ignore\n\nfrom nextcloudappstore.core.models import App, AppRating\n\nRATING_CHOICES = (\n (0.0, _('Bad')),\n (0.5, _('Ok')),\n (1.0, _('Good'))\n)\n\n\nclass AppReleaseUploadForm(Form):\n download = CharField(label=_('Download link (tar.gz)'), max_length=256)\n signature = CharField(\n widget=Textarea,\n label=_('SHA512 signature'),\n help_text=_(\n 'Hint: can be calculated by executing the '\n 'following command: openssl dgst -sha512 -sign '\n '~/.nextcloud/certificates/APP_ID.key '\n '/path/to/app.tar.gz | openssl base64'))\n nightly = BooleanField(label=_('Nightly'))\n\n\nclass AppRegisterForm(Form):\n certificate = CharField(\n widget=Textarea(attrs={'pattern': '-----BEGIN CERTIFICATE-----.*'}),\n label=_('Public certificate'),\n help_text=_(\n 'Usually stored in ~/.nextcloud/certificates/APP_ID.crt'))\n signature = CharField(\n widget=Textarea,\n label=_('SHA512 signature'),\n help_text=_(\n 'Hint: can be calculated by executing the '\n 'following command: echo -n \"APP_ID\" | openssl dgst -sha512 -sign '\n '~/.nextcloud/certificates/APP_ID.key | openssl base64'))\n\n\nclass AppRatingForm(Form):\n def __init__(self, *args, **kwargs):\n self._id = kwargs.pop('id', None)\n self._user = kwargs.pop('user', None)\n self._language_code = kwargs.pop('language_code', None)\n super().__init__(*args, **kwargs)\n\n rating = ChoiceField(initial=0.5, choices=RATING_CHOICES,\n widget=RadioSelect)\n comment = CharField(widget=Textarea, required=False,\n label=_('Review'))\n\n class Meta:\n fields = ('rating', 'comment')\n\n def save(self):\n app = App.objects.get(id=self._id)\n app_rating, created = AppRating.objects.get_or_create(user=self._user,\n app=app)\n app_rating.rating = self.cleaned_data['rating']\n app_rating.set_current_language(self._language_code)\n app_rating.comment = self.cleaned_data['comment']\n app_rating.save()\n", "path": "nextcloudappstore/core/forms.py"}]} | 828 | 438 |
gh_patches_debug_26500 | rasdani/github-patches | git_diff | pypa__setuptools-555 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
upload command doesn't prompt for password; raises TypeError
# Problem statement
If the `~/.pypirc` file does not contain a password like so:
``` ini
[distutils]
index-servers =
pypitest
[pypitest]
repository = https://testpypi.python.org/pypi
username = my_username
; Note the lack of a password
```
Then uploading the package
```
python setup.py sdist upload -r pypitest
```
Fails to prompt the user for his password and instead raises a TypeError (output truncated)
```
running upload
Traceback (most recent call last):
File "setup.py", line 16, in <module>
keywords=["test", "hello"]
File "/usr/lib/python2.7/distutils/core.py", line 151, in setup
dist.run_commands()
File "/usr/lib/python2.7/distutils/dist.py", line 953, in run_commands
self.run_command(cmd)
File "/usr/lib/python2.7/distutils/dist.py", line 972, in run_command
cmd_obj.run()
File "/usr/lib/python2.7/distutils/command/upload.py", line 60, in run
self.upload_file(command, pyversion, filename)
File "/usr/lib/python2.7/distutils/command/upload.py", line 135, in upload_file
self.password)
TypeError: cannot concatenate 'str' and 'NoneType' objects
```
**This is different** than the behavior of the `register` command, which prompts the user for a password before continuing.
```
python setup.py sdist register -r pypitest
```
(output truncated)
```
Creating tar archive
removing 'HelloPyPi-0.0.1.dev0' (and everything under it)
running register
Password:
```
> Note that the `register` and the `upload` command exhibit the proper behavior **if you store your password in `~/.pypirc`**, but not if the password is omitted.
# Okay, so...?
I am aware that you can run
```
python setup.py sdist register -r pypitest upload -r pypitest
```
As a workaround, but it stands to reason that **if you can register a package without uploading it, then you should also be able to upload a package without registering it**, regardless of if a password has been specified in your `~/.pypirc` file.
# Steps to reproduce
1. Remove your pypi password from `~/.pypirc`
2. Find a project that you wish to upload to a pypi server (I used [my example repository](https://github.com/brookskindle/hellopypi) for this)
3. Run `python setup.py sdist upload -r target_pypi_server`
# Setuptools version
setuptools (20.9.0) -- from `pip list` in my virtualenv
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setuptools/command/upload.py`
Content:
```
1 from distutils.command import upload as orig
2
3
4 class upload(orig.upload):
5 """
6 Override default upload behavior to look up password
7 in the keyring if available.
8 """
9
10 def finalize_options(self):
11 orig.upload.finalize_options(self)
12 self.password or self._load_password_from_keyring()
13
14 def _load_password_from_keyring(self):
15 """
16 Attempt to load password from keyring. Suppress Exceptions.
17 """
18 try:
19 keyring = __import__('keyring')
20 self.password = keyring.get_password(self.repository,
21 self.username)
22 except Exception:
23 pass
24
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setuptools/command/upload.py b/setuptools/command/upload.py
--- a/setuptools/command/upload.py
+++ b/setuptools/command/upload.py
@@ -3,13 +3,18 @@
class upload(orig.upload):
"""
- Override default upload behavior to look up password
- in the keyring if available.
+ Override default upload behavior to obtain password
+ in a variety of different ways.
"""
def finalize_options(self):
orig.upload.finalize_options(self)
- self.password or self._load_password_from_keyring()
+ # Attempt to obtain password. Short circuit evaluation at the first
+ # sign of success.
+ self.password = (
+ self.password or self._load_password_from_keyring() or
+ self._prompt_for_password()
+ )
def _load_password_from_keyring(self):
"""
@@ -17,7 +22,22 @@
"""
try:
keyring = __import__('keyring')
- self.password = keyring.get_password(self.repository,
- self.username)
+ password = keyring.get_password(self.repository, self.username)
except Exception:
- pass
+ password = None
+ finally:
+ return password
+
+ def _prompt_for_password(self):
+ """
+ Prompt for a password on the tty. Suppress Exceptions.
+ """
+ password = None
+ try:
+ import getpass
+ while not password:
+ password = getpass.getpass()
+ except (Exception, KeyboardInterrupt):
+ password = None
+ finally:
+ return password
| {"golden_diff": "diff --git a/setuptools/command/upload.py b/setuptools/command/upload.py\n--- a/setuptools/command/upload.py\n+++ b/setuptools/command/upload.py\n@@ -3,13 +3,18 @@\n \n class upload(orig.upload):\n \"\"\"\n- Override default upload behavior to look up password\n- in the keyring if available.\n+ Override default upload behavior to obtain password\n+ in a variety of different ways.\n \"\"\"\n \n def finalize_options(self):\n orig.upload.finalize_options(self)\n- self.password or self._load_password_from_keyring()\n+ # Attempt to obtain password. Short circuit evaluation at the first\n+ # sign of success.\n+ self.password = (\n+ self.password or self._load_password_from_keyring() or\n+ self._prompt_for_password()\n+ )\n \n def _load_password_from_keyring(self):\n \"\"\"\n@@ -17,7 +22,22 @@\n \"\"\"\n try:\n keyring = __import__('keyring')\n- self.password = keyring.get_password(self.repository,\n- self.username)\n+ password = keyring.get_password(self.repository, self.username)\n except Exception:\n- pass\n+ password = None\n+ finally:\n+ return password\n+\n+ def _prompt_for_password(self):\n+ \"\"\"\n+ Prompt for a password on the tty. Suppress Exceptions.\n+ \"\"\"\n+ password = None\n+ try:\n+ import getpass\n+ while not password:\n+ password = getpass.getpass()\n+ except (Exception, KeyboardInterrupt):\n+ password = None\n+ finally:\n+ return password\n", "issue": "upload command doesn't prompt for password; raises TypeError\n# Problem statement\n\nIf the `~/.pypirc` file does not contain a password like so:\n\n``` ini\n[distutils]\nindex-servers = \n pypitest\n\n[pypitest]\nrepository = https://testpypi.python.org/pypi\nusername = my_username\n; Note the lack of a password\n```\n\nThen uploading the package\n\n```\npython setup.py sdist upload -r pypitest\n```\n\nFails to prompt the user for his password and instead raises a TypeError (output truncated)\n\n```\nrunning upload\nTraceback (most recent call last):\n File \"setup.py\", line 16, in <module>\n keywords=[\"test\", \"hello\"]\n File \"/usr/lib/python2.7/distutils/core.py\", line 151, in setup\n dist.run_commands()\n File \"/usr/lib/python2.7/distutils/dist.py\", line 953, in run_commands\n self.run_command(cmd)\n File \"/usr/lib/python2.7/distutils/dist.py\", line 972, in run_command\n cmd_obj.run()\n File \"/usr/lib/python2.7/distutils/command/upload.py\", line 60, in run\n self.upload_file(command, pyversion, filename)\n File \"/usr/lib/python2.7/distutils/command/upload.py\", line 135, in upload_file\n self.password)\nTypeError: cannot concatenate 'str' and 'NoneType' objects\n```\n\n**This is different** than the behavior of the `register` command, which prompts the user for a password before continuing.\n\n```\npython setup.py sdist register -r pypitest\n```\n\n(output truncated)\n\n```\nCreating tar archive\nremoving 'HelloPyPi-0.0.1.dev0' (and everything under it)\nrunning register\nPassword: \n```\n\n> Note that the `register` and the `upload` command exhibit the proper behavior **if you store your password in `~/.pypirc`**, but not if the password is omitted.\n# Okay, so...?\n\nI am aware that you can run\n\n```\npython setup.py sdist register -r pypitest upload -r pypitest\n```\n\nAs a workaround, but it stands to reason that **if you can register a package without uploading it, then you should also be able to upload a package without registering it**, regardless of if a password has been specified in your `~/.pypirc` file.\n# Steps to reproduce\n1. Remove your pypi password from `~/.pypirc`\n2. Find a project that you wish to upload to a pypi server (I used [my example repository](https://github.com/brookskindle/hellopypi) for this)\n3. Run `python setup.py sdist upload -r target_pypi_server`\n# Setuptools version\n\nsetuptools (20.9.0) -- from `pip list` in my virtualenv\n\n", "before_files": [{"content": "from distutils.command import upload as orig\n\n\nclass upload(orig.upload):\n \"\"\"\n Override default upload behavior to look up password\n in the keyring if available.\n \"\"\"\n\n def finalize_options(self):\n orig.upload.finalize_options(self)\n self.password or self._load_password_from_keyring()\n\n def _load_password_from_keyring(self):\n \"\"\"\n Attempt to load password from keyring. Suppress Exceptions.\n \"\"\"\n try:\n keyring = __import__('keyring')\n self.password = keyring.get_password(self.repository,\n self.username)\n except Exception:\n pass\n", "path": "setuptools/command/upload.py"}], "after_files": [{"content": "from distutils.command import upload as orig\n\n\nclass upload(orig.upload):\n \"\"\"\n Override default upload behavior to obtain password\n in a variety of different ways.\n \"\"\"\n\n def finalize_options(self):\n orig.upload.finalize_options(self)\n # Attempt to obtain password. Short circuit evaluation at the first\n # sign of success.\n self.password = (\n self.password or self._load_password_from_keyring() or\n self._prompt_for_password()\n )\n\n def _load_password_from_keyring(self):\n \"\"\"\n Attempt to load password from keyring. Suppress Exceptions.\n \"\"\"\n try:\n keyring = __import__('keyring')\n password = keyring.get_password(self.repository, self.username)\n except Exception:\n password = None\n finally:\n return password\n\n def _prompt_for_password(self):\n \"\"\"\n Prompt for a password on the tty. Suppress Exceptions.\n \"\"\"\n password = None\n try:\n import getpass\n while not password:\n password = getpass.getpass()\n except (Exception, KeyboardInterrupt):\n password = None\n finally:\n return password\n", "path": "setuptools/command/upload.py"}]} | 1,059 | 353 |
gh_patches_debug_15619 | rasdani/github-patches | git_diff | readthedocs__readthedocs.org-7002 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Better pattern to use PYTEST_OPTIONS
In #4095 we incorporate the usage of `PYTEST_OPTIONS` to define a set of options to be environment-dependent. This way, we can extend/override these options used only in tests from outside (for example, corporate repo).
Although I like it, I had to write a hack to know if we are running in `readthedocs` or `readthedocsinc` to know which of these options has to be respected by `pytest`.
The ugly code is at https://github.com/rtfd/readthedocs.org/pull/4095#discussion_r198927773 and we need to find a better pattern for this.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `readthedocs/conftest.py`
Content:
```
1 import pytest
2 from rest_framework.test import APIClient
3
4
5 try:
6 # TODO: this file is read/executed even when called from ``readthedocsinc``,
7 # so it's overriding the options that we are defining in the ``conftest.py``
8 # from the corporate site. We need to find a better way to avoid this.
9 import readthedocsinc
10 PYTEST_OPTIONS = ()
11 except ImportError:
12 PYTEST_OPTIONS = (
13 # Options to set test environment
14 ('community', True),
15 ('corporate', False),
16 ('environment', 'readthedocs'),
17 )
18
19
20 def pytest_configure(config):
21 for option, value in PYTEST_OPTIONS:
22 setattr(config.option, option, value)
23
24
25 @pytest.fixture(autouse=True)
26 def settings_modification(settings):
27 settings.CELERY_ALWAYS_EAGER = True
28
29
30 @pytest.fixture
31 def api_client():
32 return APIClient()
33
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/readthedocs/conftest.py b/readthedocs/conftest.py
--- a/readthedocs/conftest.py
+++ b/readthedocs/conftest.py
@@ -1,32 +1,6 @@
import pytest
from rest_framework.test import APIClient
-
-try:
- # TODO: this file is read/executed even when called from ``readthedocsinc``,
- # so it's overriding the options that we are defining in the ``conftest.py``
- # from the corporate site. We need to find a better way to avoid this.
- import readthedocsinc
- PYTEST_OPTIONS = ()
-except ImportError:
- PYTEST_OPTIONS = (
- # Options to set test environment
- ('community', True),
- ('corporate', False),
- ('environment', 'readthedocs'),
- )
-
-
-def pytest_configure(config):
- for option, value in PYTEST_OPTIONS:
- setattr(config.option, option, value)
-
-
[email protected](autouse=True)
-def settings_modification(settings):
- settings.CELERY_ALWAYS_EAGER = True
-
-
@pytest.fixture
def api_client():
return APIClient()
| {"golden_diff": "diff --git a/readthedocs/conftest.py b/readthedocs/conftest.py\n--- a/readthedocs/conftest.py\n+++ b/readthedocs/conftest.py\n@@ -1,32 +1,6 @@\n import pytest\n from rest_framework.test import APIClient\n \n-\n-try:\n- # TODO: this file is read/executed even when called from ``readthedocsinc``,\n- # so it's overriding the options that we are defining in the ``conftest.py``\n- # from the corporate site. We need to find a better way to avoid this.\n- import readthedocsinc\n- PYTEST_OPTIONS = ()\n-except ImportError:\n- PYTEST_OPTIONS = (\n- # Options to set test environment\n- ('community', True),\n- ('corporate', False),\n- ('environment', 'readthedocs'),\n- )\n-\n-\n-def pytest_configure(config):\n- for option, value in PYTEST_OPTIONS:\n- setattr(config.option, option, value)\n-\n-\[email protected](autouse=True)\n-def settings_modification(settings):\n- settings.CELERY_ALWAYS_EAGER = True\n-\n-\n @pytest.fixture\n def api_client():\n return APIClient()\n", "issue": "Better pattern to use PYTEST_OPTIONS\nIn #4095 we incorporate the usage of `PYTEST_OPTIONS` to define a set of options to be environment-dependent. This way, we can extend/override these options used only in tests from outside (for example, corporate repo).\r\n\r\nAlthough I like it, I had to write a hack to know if we are running in `readthedocs` or `readthedocsinc` to know which of these options has to be respected by `pytest`.\r\n\r\nThe ugly code is at https://github.com/rtfd/readthedocs.org/pull/4095#discussion_r198927773 and we need to find a better pattern for this.\n", "before_files": [{"content": "import pytest\nfrom rest_framework.test import APIClient\n\n\ntry:\n # TODO: this file is read/executed even when called from ``readthedocsinc``,\n # so it's overriding the options that we are defining in the ``conftest.py``\n # from the corporate site. We need to find a better way to avoid this.\n import readthedocsinc\n PYTEST_OPTIONS = ()\nexcept ImportError:\n PYTEST_OPTIONS = (\n # Options to set test environment\n ('community', True),\n ('corporate', False),\n ('environment', 'readthedocs'),\n )\n\n\ndef pytest_configure(config):\n for option, value in PYTEST_OPTIONS:\n setattr(config.option, option, value)\n\n\[email protected](autouse=True)\ndef settings_modification(settings):\n settings.CELERY_ALWAYS_EAGER = True\n\n\[email protected]\ndef api_client():\n return APIClient()\n", "path": "readthedocs/conftest.py"}], "after_files": [{"content": "import pytest\nfrom rest_framework.test import APIClient\n\[email protected]\ndef api_client():\n return APIClient()\n", "path": "readthedocs/conftest.py"}]} | 664 | 263 |
gh_patches_debug_5965 | rasdani/github-patches | git_diff | wagtail__wagtail-940 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Dropping Python 3.2 support
Python 3.2 is quite old and many projects are dropping support for it (`libsass` and `treebeard` both have already). Should we consider dropping support as well?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2
3 import sys, os
4
5 from wagtail.wagtailcore import __version__
6
7
8 try:
9 from setuptools import setup, find_packages
10 except ImportError:
11 from distutils.core import setup
12
13
14 # Hack to prevent "TypeError: 'NoneType' object is not callable" error
15 # in multiprocessing/util.py _exit_function when setup.py exits
16 # (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
17 try:
18 import multiprocessing
19 except ImportError:
20 pass
21
22
23 # Disable parallel builds, because Pillow 2.5.3 does some crazy monkeypatching of
24 # the build process on multicore systems, which breaks installation of libsass
25 os.environ['MAX_CONCURRENCY'] = '1'
26
27 PY3 = sys.version_info[0] == 3
28
29
30 install_requires = [
31 "Django>=1.7.0,<1.8",
32 "django-compressor>=1.4",
33 "django-libsass>=0.2",
34 "django-modelcluster>=0.4",
35 "django-taggit==0.12.2",
36 "django-treebeard==2.0",
37 "Pillow>=2.6.1",
38 "beautifulsoup4>=4.3.2",
39 "html5lib==0.999",
40 "Unidecode>=0.04.14",
41 "six>=1.7.0",
42 'requests>=2.0.0',
43 "Willow==0.1",
44 ]
45
46
47 if not PY3:
48 install_requires += [
49 "unicodecsv>=0.9.4"
50 ]
51
52
53 setup(
54 name='wagtail',
55 version=__version__,
56 description='A Django content management system focused on flexibility and user experience',
57 author='Matthew Westcott',
58 author_email='[email protected]',
59 url='http://wagtail.io/',
60 packages=find_packages(),
61 include_package_data=True,
62 license='BSD',
63 long_description=open('README.rst').read(),
64 classifiers=[
65 'Development Status :: 5 - Production/Stable',
66 'Environment :: Web Environment',
67 'Intended Audience :: Developers',
68 'License :: OSI Approved :: BSD License',
69 'Operating System :: OS Independent',
70 'Programming Language :: Python',
71 'Programming Language :: Python :: 2',
72 'Programming Language :: Python :: 2.7',
73 'Programming Language :: Python :: 3',
74 'Programming Language :: Python :: 3.2',
75 'Programming Language :: Python :: 3.3',
76 'Programming Language :: Python :: 3.4',
77 'Framework :: Django',
78 'Topic :: Internet :: WWW/HTTP :: Site Management',
79 ],
80 install_requires=install_requires,
81 entry_points="""
82 [console_scripts]
83 wagtail=wagtail.bin.wagtail:main
84 """,
85 zip_safe=False,
86 )
87
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -71,7 +71,6 @@
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -71,7 +71,6 @@\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n- 'Programming Language :: Python :: 3.2',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Framework :: Django',\n", "issue": "Dropping Python 3.2 support\nPython 3.2 is quite old and many projects are dropping support for it (`libsass` and `treebeard` both have already). Should we consider dropping support as well?\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\nimport sys, os\n\nfrom wagtail.wagtailcore import __version__\n\n\ntry:\n from setuptools import setup, find_packages\nexcept ImportError:\n from distutils.core import setup\n\n\n# Hack to prevent \"TypeError: 'NoneType' object is not callable\" error\n# in multiprocessing/util.py _exit_function when setup.py exits\n# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)\ntry:\n import multiprocessing\nexcept ImportError:\n pass\n\n\n# Disable parallel builds, because Pillow 2.5.3 does some crazy monkeypatching of\n# the build process on multicore systems, which breaks installation of libsass\nos.environ['MAX_CONCURRENCY'] = '1'\n\nPY3 = sys.version_info[0] == 3\n\n\ninstall_requires = [\n \"Django>=1.7.0,<1.8\",\n \"django-compressor>=1.4\",\n \"django-libsass>=0.2\",\n \"django-modelcluster>=0.4\",\n \"django-taggit==0.12.2\",\n \"django-treebeard==2.0\",\n \"Pillow>=2.6.1\",\n \"beautifulsoup4>=4.3.2\",\n \"html5lib==0.999\",\n \"Unidecode>=0.04.14\",\n \"six>=1.7.0\",\n 'requests>=2.0.0',\n \"Willow==0.1\",\n]\n\n\nif not PY3:\n install_requires += [\n \"unicodecsv>=0.9.4\"\n ]\n\n\nsetup(\n name='wagtail',\n version=__version__,\n description='A Django content management system focused on flexibility and user experience',\n author='Matthew Westcott',\n author_email='[email protected]',\n url='http://wagtail.io/',\n packages=find_packages(),\n include_package_data=True,\n license='BSD',\n long_description=open('README.rst').read(),\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.2',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Framework :: Django',\n 'Topic :: Internet :: WWW/HTTP :: Site Management',\n ],\n install_requires=install_requires,\n entry_points=\"\"\"\n [console_scripts]\n wagtail=wagtail.bin.wagtail:main\n \"\"\",\n zip_safe=False,\n)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\nimport sys, os\n\nfrom wagtail.wagtailcore import __version__\n\n\ntry:\n from setuptools import setup, find_packages\nexcept ImportError:\n from distutils.core import setup\n\n\n# Hack to prevent \"TypeError: 'NoneType' object is not callable\" error\n# in multiprocessing/util.py _exit_function when setup.py exits\n# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)\ntry:\n import multiprocessing\nexcept ImportError:\n pass\n\n\n# Disable parallel builds, because Pillow 2.5.3 does some crazy monkeypatching of\n# the build process on multicore systems, which breaks installation of libsass\nos.environ['MAX_CONCURRENCY'] = '1'\n\nPY3 = sys.version_info[0] == 3\n\n\ninstall_requires = [\n \"Django>=1.7.0,<1.8\",\n \"django-compressor>=1.4\",\n \"django-libsass>=0.2\",\n \"django-modelcluster>=0.4\",\n \"django-taggit==0.12.2\",\n \"django-treebeard==2.0\",\n \"Pillow>=2.6.1\",\n \"beautifulsoup4>=4.3.2\",\n \"html5lib==0.999\",\n \"Unidecode>=0.04.14\",\n \"six>=1.7.0\",\n 'requests>=2.0.0',\n \"Willow==0.1\",\n]\n\n\nif not PY3:\n install_requires += [\n \"unicodecsv>=0.9.4\"\n ]\n\n\nsetup(\n name='wagtail',\n version=__version__,\n description='A Django content management system focused on flexibility and user experience',\n author='Matthew Westcott',\n author_email='[email protected]',\n url='http://wagtail.io/',\n packages=find_packages(),\n include_package_data=True,\n license='BSD',\n long_description=open('README.rst').read(),\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Framework :: Django',\n 'Topic :: Internet :: WWW/HTTP :: Site Management',\n ],\n install_requires=install_requires,\n entry_points=\"\"\"\n [console_scripts]\n wagtail=wagtail.bin.wagtail:main\n \"\"\",\n zip_safe=False,\n)\n", "path": "setup.py"}]} | 1,110 | 107 |
gh_patches_debug_8392 | rasdani/github-patches | git_diff | PokemonGoF__PokemonGo-Bot-5122 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Incense being used when false
### Expected Behavior
Don't use Incense when set to false in config
### Actual Behavior
Bot using incense when set to false in config
### Your FULL config.json (remove your username, password, gmapkey and any other private info)
http://pastebin.com/YEHMRMiE
### Output when issue occurred
[2016-09-02 15:43:55] [UseIncense] [INFO] [use_incense] Using Ordinary incense. 8 incense remaining
### Steps to Reproduce
Run bot with Incense false in config
### Other Information
OS: Linux
Branch: Dev
Git Commit: 1cc9da7a79c421f11a4b13359f6a6c1abfcd061a
Python Version: 2.7.12
Any other relevant files/configs (eg: path files)
config.json
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pokemongo_bot/cell_workers/use_incense.py`
Content:
```
1 import time
2 from pokemongo_bot.base_task import BaseTask
3 from pokemongo_bot.worker_result import WorkerResult
4 from pokemongo_bot.item_list import Item
5 from pokemongo_bot import inventory
6
7 class UseIncense(BaseTask):
8 SUPPORTED_TASK_API_VERSION = 1
9
10 def initialize(self):
11 self.start_time = 0
12 self.use_incense = self.config.get('use_incense', False)
13 self.use_order = self.config.get('use_order', {})
14 self._update_inventory()
15
16 self.types = {
17 401: "Ordinary",
18 402: "Spicy",
19 403: "Cool",
20 404: "Floral"
21 }
22
23 def _get_type(self):
24 for order in self.use_order:
25 if order == "ordinary" and self.incense_ordinary_count > 0:
26 return Item.ITEM_INCENSE_ORDINARY.value
27 if order == "spicy" and self.incense_spicy_count > 0:
28 return Item.ITEM_INCENSE_SPICY.value
29 if order == "cool" and self.incense_cool_count > 0:
30 return Item.ITEM_INCENSE_COOL.value
31 if order == "floral" and self.incense_floral_count > 0:
32 return Item.ITEM_INCENSE_FLORAL.value
33
34 return Item.ITEM_INCENSE_ORDINARY.value
35
36 def _update_inventory(self):
37 self.incense_ordinary_count = inventory.items().get(Item.ITEM_INCENSE_ORDINARY.value).count
38 self.incense_spicy_count = inventory.items().get(Item.ITEM_INCENSE_SPICY.value).count
39 self.incense_cool_count = inventory.items().get(Item.ITEM_INCENSE_COOL.value).count
40 self.incense_floral_count = inventory.items().get(Item.ITEM_INCENSE_FLORAL.value).count
41
42 def _has_count(self):
43 return self.incense_ordinary_count > 0 or self.incense_spicy_count > 0 or self.incense_cool_count > 0 or self.incense_floral_count > 0
44
45 def _should_run(self):
46 if self._has_count() > 0 and self.start_time == 0:
47 return True
48
49 using_incense = time.time() - self.start_time < 1800
50 if not using_incense:
51 self._update_inventory()
52 if self._has_count() and self.use_incense:
53 return True
54
55 def work(self):
56 if self._should_run():
57 self.start_time = time.time()
58 type = self._get_type()
59 response_dict = self.bot.api.use_incense(incense_type=type)
60 result = response_dict.get('responses', {}).get('USE_INCENSE', {}).get('result', 0)
61 if result is 1:
62 self.emit_event(
63 'use_incense',
64 formatted="Using {type} incense. {incense_count} incense remaining",
65 data={
66 'type': self.types.get(type, 'Unknown'),
67 'incense_count': inventory.items().get(type).count
68 }
69 )
70 else:
71 self.emit_event(
72 'use_incense',
73 formatted="Unable to use incense {type}. {incense_count} incense remaining",
74 data={
75 'type': self.types.get(type, 'Unknown'),
76 'incense_count': inventory.items().get(type).count
77 }
78 )
79
80 return WorkerResult.SUCCESS
81
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pokemongo_bot/cell_workers/use_incense.py b/pokemongo_bot/cell_workers/use_incense.py
--- a/pokemongo_bot/cell_workers/use_incense.py
+++ b/pokemongo_bot/cell_workers/use_incense.py
@@ -42,7 +42,10 @@
def _has_count(self):
return self.incense_ordinary_count > 0 or self.incense_spicy_count > 0 or self.incense_cool_count > 0 or self.incense_floral_count > 0
- def _should_run(self):
+ def _should_run(self):
+ if not self.use_incense:
+ return False
+
if self._has_count() > 0 and self.start_time == 0:
return True
| {"golden_diff": "diff --git a/pokemongo_bot/cell_workers/use_incense.py b/pokemongo_bot/cell_workers/use_incense.py\n--- a/pokemongo_bot/cell_workers/use_incense.py\n+++ b/pokemongo_bot/cell_workers/use_incense.py\n@@ -42,7 +42,10 @@\n def _has_count(self):\n return self.incense_ordinary_count > 0 or self.incense_spicy_count > 0 or self.incense_cool_count > 0 or self.incense_floral_count > 0\n \n- def _should_run(self): \n+ def _should_run(self):\n+ if not self.use_incense:\n+ return False\n+\n if self._has_count() > 0 and self.start_time == 0:\n return True\n", "issue": "Incense being used when false\n### Expected Behavior\n\nDon't use Incense when set to false in config\n### Actual Behavior\n\nBot using incense when set to false in config\n### Your FULL config.json (remove your username, password, gmapkey and any other private info)\n\nhttp://pastebin.com/YEHMRMiE\n### Output when issue occurred\n\n[2016-09-02 15:43:55] [UseIncense] [INFO] [use_incense] Using Ordinary incense. 8 incense remaining\n### Steps to Reproduce\n\nRun bot with Incense false in config\n### Other Information\n\nOS: Linux\nBranch: Dev\nGit Commit: 1cc9da7a79c421f11a4b13359f6a6c1abfcd061a\nPython Version: 2.7.12\nAny other relevant files/configs (eg: path files) \nconfig.json\n\n", "before_files": [{"content": "import time\nfrom pokemongo_bot.base_task import BaseTask\nfrom pokemongo_bot.worker_result import WorkerResult\nfrom pokemongo_bot.item_list import Item\nfrom pokemongo_bot import inventory\n\nclass UseIncense(BaseTask):\n SUPPORTED_TASK_API_VERSION = 1\n\n def initialize(self):\n self.start_time = 0\n self.use_incense = self.config.get('use_incense', False)\n self.use_order = self.config.get('use_order', {})\n self._update_inventory()\n \n self.types = {\n 401: \"Ordinary\",\n 402: \"Spicy\",\n 403: \"Cool\",\n 404: \"Floral\"\n }\n \n def _get_type(self):\n for order in self.use_order:\n if order == \"ordinary\" and self.incense_ordinary_count > 0:\n return Item.ITEM_INCENSE_ORDINARY.value\n if order == \"spicy\" and self.incense_spicy_count > 0:\n return Item.ITEM_INCENSE_SPICY.value\n if order == \"cool\" and self.incense_cool_count > 0:\n return Item.ITEM_INCENSE_COOL.value\n if order == \"floral\" and self.incense_floral_count > 0:\n return Item.ITEM_INCENSE_FLORAL.value\n \n return Item.ITEM_INCENSE_ORDINARY.value \n \n def _update_inventory(self):\n self.incense_ordinary_count = inventory.items().get(Item.ITEM_INCENSE_ORDINARY.value).count \n self.incense_spicy_count = inventory.items().get(Item.ITEM_INCENSE_SPICY.value).count\n self.incense_cool_count = inventory.items().get(Item.ITEM_INCENSE_COOL.value).count \n self.incense_floral_count = inventory.items().get(Item.ITEM_INCENSE_FLORAL.value).count \n \n def _has_count(self):\n return self.incense_ordinary_count > 0 or self.incense_spicy_count > 0 or self.incense_cool_count > 0 or self.incense_floral_count > 0\n \n def _should_run(self): \n if self._has_count() > 0 and self.start_time == 0:\n return True \n \n using_incense = time.time() - self.start_time < 1800\n if not using_incense: \n self._update_inventory()\n if self._has_count() and self.use_incense:\n return True\n\n def work(self):\n if self._should_run():\n self.start_time = time.time()\n type = self._get_type() \n response_dict = self.bot.api.use_incense(incense_type=type)\n result = response_dict.get('responses', {}).get('USE_INCENSE', {}).get('result', 0)\n if result is 1:\n self.emit_event(\n 'use_incense',\n formatted=\"Using {type} incense. {incense_count} incense remaining\",\n data={\n 'type': self.types.get(type, 'Unknown'),\n 'incense_count': inventory.items().get(type).count\n }\n )\n else:\n self.emit_event(\n 'use_incense',\n formatted=\"Unable to use incense {type}. {incense_count} incense remaining\",\n data={\n 'type': self.types.get(type, 'Unknown'),\n 'incense_count': inventory.items().get(type).count\n }\n )\n \n return WorkerResult.SUCCESS\n", "path": "pokemongo_bot/cell_workers/use_incense.py"}], "after_files": [{"content": "import time\nfrom pokemongo_bot.base_task import BaseTask\nfrom pokemongo_bot.worker_result import WorkerResult\nfrom pokemongo_bot.item_list import Item\nfrom pokemongo_bot import inventory\n\nclass UseIncense(BaseTask):\n SUPPORTED_TASK_API_VERSION = 1\n\n def initialize(self):\n self.start_time = 0\n self.use_incense = self.config.get('use_incense', False)\n self.use_order = self.config.get('use_order', {})\n self._update_inventory()\n \n self.types = {\n 401: \"Ordinary\",\n 402: \"Spicy\",\n 403: \"Cool\",\n 404: \"Floral\"\n }\n \n def _get_type(self):\n for order in self.use_order:\n if order == \"ordinary\" and self.incense_ordinary_count > 0:\n return Item.ITEM_INCENSE_ORDINARY.value\n if order == \"spicy\" and self.incense_spicy_count > 0:\n return Item.ITEM_INCENSE_SPICY.value\n if order == \"cool\" and self.incense_cool_count > 0:\n return Item.ITEM_INCENSE_COOL.value\n if order == \"floral\" and self.incense_floral_count > 0:\n return Item.ITEM_INCENSE_FLORAL.value\n \n return Item.ITEM_INCENSE_ORDINARY.value \n \n def _update_inventory(self):\n self.incense_ordinary_count = inventory.items().get(Item.ITEM_INCENSE_ORDINARY.value).count \n self.incense_spicy_count = inventory.items().get(Item.ITEM_INCENSE_SPICY.value).count\n self.incense_cool_count = inventory.items().get(Item.ITEM_INCENSE_COOL.value).count \n self.incense_floral_count = inventory.items().get(Item.ITEM_INCENSE_FLORAL.value).count \n \n def _has_count(self):\n return self.incense_ordinary_count > 0 or self.incense_spicy_count > 0 or self.incense_cool_count > 0 or self.incense_floral_count > 0\n \n def _should_run(self):\n if not self.use_incense:\n return False\n\n if self._has_count() > 0 and self.start_time == 0:\n return True \n \n using_incense = time.time() - self.start_time < 1800\n if not using_incense: \n self._update_inventory()\n if self._has_count() and self.use_incense:\n return True\n\n def work(self):\n if self._should_run():\n self.start_time = time.time()\n type = self._get_type() \n response_dict = self.bot.api.use_incense(incense_type=type)\n result = response_dict.get('responses', {}).get('USE_INCENSE', {}).get('result', 0)\n if result is 1:\n self.emit_event(\n 'use_incense',\n formatted=\"Using {type} incense. {incense_count} incense remaining\",\n data={\n 'type': self.types.get(type, 'Unknown'),\n 'incense_count': inventory.items().get(type).count\n }\n )\n else:\n self.emit_event(\n 'use_incense',\n formatted=\"Unable to use incense {type}. {incense_count} incense remaining\",\n data={\n 'type': self.types.get(type, 'Unknown'),\n 'incense_count': inventory.items().get(type).count\n }\n )\n \n return WorkerResult.SUCCESS\n", "path": "pokemongo_bot/cell_workers/use_incense.py"}]} | 1,378 | 177 |
gh_patches_debug_32415 | rasdani/github-patches | git_diff | vllm-project__vllm-4368 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Feature]: Cannot use FlashAttention backend for Volta and Turing GPUs. (but FlashAttention v1.0.9 supports Turing GPU.)
### 🚀 The feature, motivation and pitch
Turing GPU can use FlashAttention v1.0.9 which can reduce use of vram significantly.
FlashAttention has no plan to support Turing GPU in FlashAttention v2 actually.
so please support FlashAttention v1.0.9. thanks a lot!
many friends having 8*2080ti need this help.
### Alternatives
_No response_
### Additional context
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `vllm/attention/selector.py`
Content:
```
1 import enum
2 import os
3 from functools import lru_cache
4 from typing import Type
5
6 import torch
7
8 from vllm.attention.backends.abstract import AttentionBackend
9 from vllm.logger import init_logger
10 from vllm.utils import is_cpu, is_hip
11
12 logger = init_logger(__name__)
13
14 VLLM_ATTENTION_BACKEND = "VLLM_ATTENTION_BACKEND"
15
16
17 class _Backend(enum.Enum):
18 FLASH_ATTN = enum.auto()
19 XFORMERS = enum.auto()
20 ROCM_FLASH = enum.auto()
21 TORCH_SDPA = enum.auto()
22
23
24 @lru_cache(maxsize=None)
25 def get_attn_backend(dtype: torch.dtype) -> Type[AttentionBackend]:
26 backend = _which_attn_to_use(dtype)
27 if backend == _Backend.FLASH_ATTN:
28 logger.info("Using FlashAttention backend.")
29 from vllm.attention.backends.flash_attn import ( # noqa: F401
30 FlashAttentionBackend)
31 return FlashAttentionBackend
32 elif backend == _Backend.XFORMERS:
33 logger.info("Using XFormers backend.")
34 from vllm.attention.backends.xformers import ( # noqa: F401
35 XFormersBackend)
36 return XFormersBackend
37 elif backend == _Backend.ROCM_FLASH:
38 logger.info("Using ROCmFlashAttention backend.")
39 from vllm.attention.backends.rocm_flash_attn import ( # noqa: F401
40 ROCmFlashAttentionBackend)
41 return ROCmFlashAttentionBackend
42 elif backend == _Backend.TORCH_SDPA:
43 logger.info("Using Torch SDPA backend.")
44 from vllm.attention.backends.torch_sdpa import TorchSDPABackend
45 return TorchSDPABackend
46 else:
47 raise ValueError("Invalid attention backend.")
48
49
50 def _which_attn_to_use(dtype: torch.dtype) -> _Backend:
51 """Returns which flash attention backend to use."""
52 if is_cpu():
53 return _Backend.TORCH_SDPA
54
55 if is_hip():
56 # AMD GPUs.
57 if torch.cuda.get_device_capability()[0] != 9:
58 # not Instinct series GPUs.
59 logger.info("flash_atten is not supported on NAVI GPUs.")
60 return _Backend.ROCM_FLASH
61
62 # NVIDIA GPUs.
63 if torch.cuda.get_device_capability()[0] < 8:
64 # Volta and Turing NVIDIA GPUs.
65 logger.info("Cannot use FlashAttention backend for Volta and Turing "
66 "GPUs.")
67 return _Backend.XFORMERS
68
69 if dtype not in (torch.float16, torch.bfloat16):
70 logger.info("Cannot use FlashAttention backend for dtype other than "
71 "torch.float16 or torch.bfloat16.")
72 return _Backend.XFORMERS
73
74 try:
75 import flash_attn # noqa: F401
76 except ImportError:
77 logger.info(
78 "Cannot use FlashAttention backend because the flash_attn package "
79 "is not found. Please install it for better performance.")
80 return _Backend.XFORMERS
81
82 backend_by_env_var = os.getenv(VLLM_ATTENTION_BACKEND)
83 if backend_by_env_var is not None:
84 return _Backend[backend_by_env_var]
85
86 # Default case.
87 return _Backend.FLASH_ATTN
88
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/vllm/attention/selector.py b/vllm/attention/selector.py
--- a/vllm/attention/selector.py
+++ b/vllm/attention/selector.py
@@ -25,7 +25,7 @@
def get_attn_backend(dtype: torch.dtype) -> Type[AttentionBackend]:
backend = _which_attn_to_use(dtype)
if backend == _Backend.FLASH_ATTN:
- logger.info("Using FlashAttention backend.")
+ logger.info("Using FlashAttention-2 backend.")
from vllm.attention.backends.flash_attn import ( # noqa: F401
FlashAttentionBackend)
return FlashAttentionBackend
@@ -62,12 +62,12 @@
# NVIDIA GPUs.
if torch.cuda.get_device_capability()[0] < 8:
# Volta and Turing NVIDIA GPUs.
- logger.info("Cannot use FlashAttention backend for Volta and Turing "
+ logger.info("Cannot use FlashAttention-2 backend for Volta and Turing "
"GPUs.")
return _Backend.XFORMERS
if dtype not in (torch.float16, torch.bfloat16):
- logger.info("Cannot use FlashAttention backend for dtype other than "
+ logger.info("Cannot use FlashAttention-2 backend for dtype other than "
"torch.float16 or torch.bfloat16.")
return _Backend.XFORMERS
@@ -75,8 +75,8 @@
import flash_attn # noqa: F401
except ImportError:
logger.info(
- "Cannot use FlashAttention backend because the flash_attn package "
- "is not found. Please install it for better performance.")
+ "Cannot use FlashAttention-2 backend because the flash_attn "
+ "package is not found. Please install it for better performance.")
return _Backend.XFORMERS
backend_by_env_var = os.getenv(VLLM_ATTENTION_BACKEND)
| {"golden_diff": "diff --git a/vllm/attention/selector.py b/vllm/attention/selector.py\n--- a/vllm/attention/selector.py\n+++ b/vllm/attention/selector.py\n@@ -25,7 +25,7 @@\n def get_attn_backend(dtype: torch.dtype) -> Type[AttentionBackend]:\n backend = _which_attn_to_use(dtype)\n if backend == _Backend.FLASH_ATTN:\n- logger.info(\"Using FlashAttention backend.\")\n+ logger.info(\"Using FlashAttention-2 backend.\")\n from vllm.attention.backends.flash_attn import ( # noqa: F401\n FlashAttentionBackend)\n return FlashAttentionBackend\n@@ -62,12 +62,12 @@\n # NVIDIA GPUs.\n if torch.cuda.get_device_capability()[0] < 8:\n # Volta and Turing NVIDIA GPUs.\n- logger.info(\"Cannot use FlashAttention backend for Volta and Turing \"\n+ logger.info(\"Cannot use FlashAttention-2 backend for Volta and Turing \"\n \"GPUs.\")\n return _Backend.XFORMERS\n \n if dtype not in (torch.float16, torch.bfloat16):\n- logger.info(\"Cannot use FlashAttention backend for dtype other than \"\n+ logger.info(\"Cannot use FlashAttention-2 backend for dtype other than \"\n \"torch.float16 or torch.bfloat16.\")\n return _Backend.XFORMERS\n \n@@ -75,8 +75,8 @@\n import flash_attn # noqa: F401\n except ImportError:\n logger.info(\n- \"Cannot use FlashAttention backend because the flash_attn package \"\n- \"is not found. Please install it for better performance.\")\n+ \"Cannot use FlashAttention-2 backend because the flash_attn \"\n+ \"package is not found. Please install it for better performance.\")\n return _Backend.XFORMERS\n \n backend_by_env_var = os.getenv(VLLM_ATTENTION_BACKEND)\n", "issue": "[Feature]: Cannot use FlashAttention backend for Volta and Turing GPUs. (but FlashAttention v1.0.9 supports Turing GPU.)\n### \ud83d\ude80 The feature, motivation and pitch\r\n\r\nTuring GPU can use FlashAttention v1.0.9 which can reduce use of vram significantly.\r\n\r\nFlashAttention has no plan to support Turing GPU in FlashAttention v2 actually.\r\nso please support FlashAttention v1.0.9. thanks a lot!\r\n\r\nmany friends having 8*2080ti need this help.\r\n\r\n### Alternatives\r\n\r\n_No response_\r\n\r\n### Additional context\r\n\r\n_No response_\n", "before_files": [{"content": "import enum\nimport os\nfrom functools import lru_cache\nfrom typing import Type\n\nimport torch\n\nfrom vllm.attention.backends.abstract import AttentionBackend\nfrom vllm.logger import init_logger\nfrom vllm.utils import is_cpu, is_hip\n\nlogger = init_logger(__name__)\n\nVLLM_ATTENTION_BACKEND = \"VLLM_ATTENTION_BACKEND\"\n\n\nclass _Backend(enum.Enum):\n FLASH_ATTN = enum.auto()\n XFORMERS = enum.auto()\n ROCM_FLASH = enum.auto()\n TORCH_SDPA = enum.auto()\n\n\n@lru_cache(maxsize=None)\ndef get_attn_backend(dtype: torch.dtype) -> Type[AttentionBackend]:\n backend = _which_attn_to_use(dtype)\n if backend == _Backend.FLASH_ATTN:\n logger.info(\"Using FlashAttention backend.\")\n from vllm.attention.backends.flash_attn import ( # noqa: F401\n FlashAttentionBackend)\n return FlashAttentionBackend\n elif backend == _Backend.XFORMERS:\n logger.info(\"Using XFormers backend.\")\n from vllm.attention.backends.xformers import ( # noqa: F401\n XFormersBackend)\n return XFormersBackend\n elif backend == _Backend.ROCM_FLASH:\n logger.info(\"Using ROCmFlashAttention backend.\")\n from vllm.attention.backends.rocm_flash_attn import ( # noqa: F401\n ROCmFlashAttentionBackend)\n return ROCmFlashAttentionBackend\n elif backend == _Backend.TORCH_SDPA:\n logger.info(\"Using Torch SDPA backend.\")\n from vllm.attention.backends.torch_sdpa import TorchSDPABackend\n return TorchSDPABackend\n else:\n raise ValueError(\"Invalid attention backend.\")\n\n\ndef _which_attn_to_use(dtype: torch.dtype) -> _Backend:\n \"\"\"Returns which flash attention backend to use.\"\"\"\n if is_cpu():\n return _Backend.TORCH_SDPA\n\n if is_hip():\n # AMD GPUs.\n if torch.cuda.get_device_capability()[0] != 9:\n # not Instinct series GPUs.\n logger.info(\"flash_atten is not supported on NAVI GPUs.\")\n return _Backend.ROCM_FLASH\n\n # NVIDIA GPUs.\n if torch.cuda.get_device_capability()[0] < 8:\n # Volta and Turing NVIDIA GPUs.\n logger.info(\"Cannot use FlashAttention backend for Volta and Turing \"\n \"GPUs.\")\n return _Backend.XFORMERS\n\n if dtype not in (torch.float16, torch.bfloat16):\n logger.info(\"Cannot use FlashAttention backend for dtype other than \"\n \"torch.float16 or torch.bfloat16.\")\n return _Backend.XFORMERS\n\n try:\n import flash_attn # noqa: F401\n except ImportError:\n logger.info(\n \"Cannot use FlashAttention backend because the flash_attn package \"\n \"is not found. Please install it for better performance.\")\n return _Backend.XFORMERS\n\n backend_by_env_var = os.getenv(VLLM_ATTENTION_BACKEND)\n if backend_by_env_var is not None:\n return _Backend[backend_by_env_var]\n\n # Default case.\n return _Backend.FLASH_ATTN\n", "path": "vllm/attention/selector.py"}], "after_files": [{"content": "import enum\nimport os\nfrom functools import lru_cache\nfrom typing import Type\n\nimport torch\n\nfrom vllm.attention.backends.abstract import AttentionBackend\nfrom vllm.logger import init_logger\nfrom vllm.utils import is_cpu, is_hip\n\nlogger = init_logger(__name__)\n\nVLLM_ATTENTION_BACKEND = \"VLLM_ATTENTION_BACKEND\"\n\n\nclass _Backend(enum.Enum):\n FLASH_ATTN = enum.auto()\n XFORMERS = enum.auto()\n ROCM_FLASH = enum.auto()\n TORCH_SDPA = enum.auto()\n\n\n@lru_cache(maxsize=None)\ndef get_attn_backend(dtype: torch.dtype) -> Type[AttentionBackend]:\n backend = _which_attn_to_use(dtype)\n if backend == _Backend.FLASH_ATTN:\n logger.info(\"Using FlashAttention-2 backend.\")\n from vllm.attention.backends.flash_attn import ( # noqa: F401\n FlashAttentionBackend)\n return FlashAttentionBackend\n elif backend == _Backend.XFORMERS:\n logger.info(\"Using XFormers backend.\")\n from vllm.attention.backends.xformers import ( # noqa: F401\n XFormersBackend)\n return XFormersBackend\n elif backend == _Backend.ROCM_FLASH:\n logger.info(\"Using ROCmFlashAttention backend.\")\n from vllm.attention.backends.rocm_flash_attn import ( # noqa: F401\n ROCmFlashAttentionBackend)\n return ROCmFlashAttentionBackend\n elif backend == _Backend.TORCH_SDPA:\n logger.info(\"Using Torch SDPA backend.\")\n from vllm.attention.backends.torch_sdpa import TorchSDPABackend\n return TorchSDPABackend\n else:\n raise ValueError(\"Invalid attention backend.\")\n\n\ndef _which_attn_to_use(dtype: torch.dtype) -> _Backend:\n \"\"\"Returns which flash attention backend to use.\"\"\"\n if is_cpu():\n return _Backend.TORCH_SDPA\n\n if is_hip():\n # AMD GPUs.\n if torch.cuda.get_device_capability()[0] != 9:\n # not Instinct series GPUs.\n logger.info(\"flash_atten is not supported on NAVI GPUs.\")\n return _Backend.ROCM_FLASH\n\n # NVIDIA GPUs.\n if torch.cuda.get_device_capability()[0] < 8:\n # Volta and Turing NVIDIA GPUs.\n logger.info(\"Cannot use FlashAttention-2 backend for Volta and Turing \"\n \"GPUs.\")\n return _Backend.XFORMERS\n\n if dtype not in (torch.float16, torch.bfloat16):\n logger.info(\"Cannot use FlashAttention-2 backend for dtype other than \"\n \"torch.float16 or torch.bfloat16.\")\n return _Backend.XFORMERS\n\n try:\n import flash_attn # noqa: F401\n except ImportError:\n logger.info(\n \"Cannot use FlashAttention-2 backend because the flash_attn \"\n \"package is not found. Please install it for better performance.\")\n return _Backend.XFORMERS\n\n backend_by_env_var = os.getenv(VLLM_ATTENTION_BACKEND)\n if backend_by_env_var is not None:\n return _Backend[backend_by_env_var]\n\n # Default case.\n return _Backend.FLASH_ATTN\n", "path": "vllm/attention/selector.py"}]} | 1,268 | 426 |
gh_patches_debug_22037 | rasdani/github-patches | git_diff | netbox-community__netbox-9826 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add Contacts field to Virtual Machines table view
### NetBox version
v3.2.7
### Feature type
Change to existing functionality
### Proposed functionality
I would suggest to add contacts field to Virtual Machines table view/export, similarly to what we have in Devices.
Currently in Devices in the "Configure Table" it's possible to select "Contacts" as a column, but it's not available in Virtual Machines.
### Use case
When browsing through or exporting Virtual Machines it would be nice to be able to see who the owner/contact is.
### Database changes
_No response_
### External dependencies
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `netbox/virtualization/tables/virtualmachines.py`
Content:
```
1 import django_tables2 as tables
2
3 from dcim.tables.devices import BaseInterfaceTable
4 from netbox.tables import NetBoxTable, columns
5 from tenancy.tables import TenancyColumnsMixin
6 from virtualization.models import VirtualMachine, VMInterface
7
8 __all__ = (
9 'VirtualMachineTable',
10 'VirtualMachineVMInterfaceTable',
11 'VMInterfaceTable',
12 )
13
14 VMINTERFACE_BUTTONS = """
15 {% if perms.ipam.add_ipaddress %}
16 <a href="{% url 'ipam:ipaddress_add' %}?vminterface={{ record.pk }}&return_url={% url 'virtualization:virtualmachine_interfaces' pk=object.pk %}" class="btn btn-sm btn-success" title="Add IP Address">
17 <i class="mdi mdi-plus-thick" aria-hidden="true"></i>
18 </a>
19 {% endif %}
20 """
21
22
23 #
24 # Virtual machines
25 #
26
27 class VirtualMachineTable(TenancyColumnsMixin, NetBoxTable):
28 name = tables.Column(
29 order_by=('_name',),
30 linkify=True
31 )
32 status = columns.ChoiceFieldColumn()
33 cluster = tables.Column(
34 linkify=True
35 )
36 role = columns.ColoredLabelColumn()
37 comments = columns.MarkdownColumn()
38 primary_ip4 = tables.Column(
39 linkify=True,
40 verbose_name='IPv4 Address'
41 )
42 primary_ip6 = tables.Column(
43 linkify=True,
44 verbose_name='IPv6 Address'
45 )
46 primary_ip = tables.Column(
47 linkify=True,
48 order_by=('primary_ip4', 'primary_ip6'),
49 verbose_name='IP Address'
50 )
51 tags = columns.TagColumn(
52 url_name='virtualization:virtualmachine_list'
53 )
54
55 class Meta(NetBoxTable.Meta):
56 model = VirtualMachine
57 fields = (
58 'pk', 'id', 'name', 'status', 'cluster', 'role', 'tenant', 'tenant_group', 'platform', 'vcpus', 'memory', 'disk',
59 'primary_ip4', 'primary_ip6', 'primary_ip', 'comments', 'tags', 'created', 'last_updated',
60 )
61 default_columns = (
62 'pk', 'name', 'status', 'cluster', 'role', 'tenant', 'vcpus', 'memory', 'disk', 'primary_ip',
63 )
64
65
66 #
67 # VM components
68 #
69
70 class VMInterfaceTable(BaseInterfaceTable):
71 virtual_machine = tables.Column(
72 linkify=True
73 )
74 name = tables.Column(
75 linkify=True
76 )
77 vrf = tables.Column(
78 linkify=True
79 )
80 contacts = columns.ManyToManyColumn(
81 linkify_item=True
82 )
83 tags = columns.TagColumn(
84 url_name='virtualization:vminterface_list'
85 )
86
87 class Meta(NetBoxTable.Meta):
88 model = VMInterface
89 fields = (
90 'pk', 'id', 'name', 'virtual_machine', 'enabled', 'mac_address', 'mtu', 'mode', 'description', 'tags',
91 'vrf', 'ip_addresses', 'fhrp_groups', 'untagged_vlan', 'tagged_vlans', 'contacts', 'created',
92 'last_updated',
93 )
94 default_columns = ('pk', 'name', 'virtual_machine', 'enabled', 'description')
95
96
97 class VirtualMachineVMInterfaceTable(VMInterfaceTable):
98 parent = tables.Column(
99 linkify=True
100 )
101 bridge = tables.Column(
102 linkify=True
103 )
104 actions = columns.ActionsColumn(
105 actions=('edit', 'delete'),
106 extra_buttons=VMINTERFACE_BUTTONS
107 )
108
109 class Meta(NetBoxTable.Meta):
110 model = VMInterface
111 fields = (
112 'pk', 'id', 'name', 'enabled', 'parent', 'bridge', 'mac_address', 'mtu', 'mode', 'description', 'tags',
113 'ip_addresses', 'fhrp_groups', 'untagged_vlan', 'tagged_vlans', 'actions',
114 )
115 default_columns = ('pk', 'name', 'enabled', 'mac_address', 'mtu', 'mode', 'description', 'ip_addresses')
116 row_attrs = {
117 'data-name': lambda record: record.name,
118 }
119
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/netbox/virtualization/tables/virtualmachines.py b/netbox/virtualization/tables/virtualmachines.py
--- a/netbox/virtualization/tables/virtualmachines.py
+++ b/netbox/virtualization/tables/virtualmachines.py
@@ -48,6 +48,9 @@
order_by=('primary_ip4', 'primary_ip6'),
verbose_name='IP Address'
)
+ contacts = columns.ManyToManyColumn(
+ linkify_item=True
+ )
tags = columns.TagColumn(
url_name='virtualization:virtualmachine_list'
)
@@ -56,7 +59,7 @@
model = VirtualMachine
fields = (
'pk', 'id', 'name', 'status', 'cluster', 'role', 'tenant', 'tenant_group', 'platform', 'vcpus', 'memory', 'disk',
- 'primary_ip4', 'primary_ip6', 'primary_ip', 'comments', 'tags', 'created', 'last_updated',
+ 'primary_ip4', 'primary_ip6', 'primary_ip', 'comments', 'contacts', 'tags', 'created', 'last_updated',
)
default_columns = (
'pk', 'name', 'status', 'cluster', 'role', 'tenant', 'vcpus', 'memory', 'disk', 'primary_ip',
| {"golden_diff": "diff --git a/netbox/virtualization/tables/virtualmachines.py b/netbox/virtualization/tables/virtualmachines.py\n--- a/netbox/virtualization/tables/virtualmachines.py\n+++ b/netbox/virtualization/tables/virtualmachines.py\n@@ -48,6 +48,9 @@\n order_by=('primary_ip4', 'primary_ip6'),\n verbose_name='IP Address'\n )\n+ contacts = columns.ManyToManyColumn(\n+ linkify_item=True\n+ )\n tags = columns.TagColumn(\n url_name='virtualization:virtualmachine_list'\n )\n@@ -56,7 +59,7 @@\n model = VirtualMachine\n fields = (\n 'pk', 'id', 'name', 'status', 'cluster', 'role', 'tenant', 'tenant_group', 'platform', 'vcpus', 'memory', 'disk',\n- 'primary_ip4', 'primary_ip6', 'primary_ip', 'comments', 'tags', 'created', 'last_updated',\n+ 'primary_ip4', 'primary_ip6', 'primary_ip', 'comments', 'contacts', 'tags', 'created', 'last_updated',\n )\n default_columns = (\n 'pk', 'name', 'status', 'cluster', 'role', 'tenant', 'vcpus', 'memory', 'disk', 'primary_ip',\n", "issue": "Add Contacts field to Virtual Machines table view\n### NetBox version\n\nv3.2.7\n\n### Feature type\n\nChange to existing functionality\n\n### Proposed functionality\n\nI would suggest to add contacts field to Virtual Machines table view/export, similarly to what we have in Devices. \r\nCurrently in Devices in the \"Configure Table\" it's possible to select \"Contacts\" as a column, but it's not available in Virtual Machines. \n\n### Use case\n\nWhen browsing through or exporting Virtual Machines it would be nice to be able to see who the owner/contact is. \n\n### Database changes\n\n_No response_\n\n### External dependencies\n\n_No response_\n", "before_files": [{"content": "import django_tables2 as tables\n\nfrom dcim.tables.devices import BaseInterfaceTable\nfrom netbox.tables import NetBoxTable, columns\nfrom tenancy.tables import TenancyColumnsMixin\nfrom virtualization.models import VirtualMachine, VMInterface\n\n__all__ = (\n 'VirtualMachineTable',\n 'VirtualMachineVMInterfaceTable',\n 'VMInterfaceTable',\n)\n\nVMINTERFACE_BUTTONS = \"\"\"\n{% if perms.ipam.add_ipaddress %}\n <a href=\"{% url 'ipam:ipaddress_add' %}?vminterface={{ record.pk }}&return_url={% url 'virtualization:virtualmachine_interfaces' pk=object.pk %}\" class=\"btn btn-sm btn-success\" title=\"Add IP Address\">\n <i class=\"mdi mdi-plus-thick\" aria-hidden=\"true\"></i>\n </a>\n{% endif %}\n\"\"\"\n\n\n#\n# Virtual machines\n#\n\nclass VirtualMachineTable(TenancyColumnsMixin, NetBoxTable):\n name = tables.Column(\n order_by=('_name',),\n linkify=True\n )\n status = columns.ChoiceFieldColumn()\n cluster = tables.Column(\n linkify=True\n )\n role = columns.ColoredLabelColumn()\n comments = columns.MarkdownColumn()\n primary_ip4 = tables.Column(\n linkify=True,\n verbose_name='IPv4 Address'\n )\n primary_ip6 = tables.Column(\n linkify=True,\n verbose_name='IPv6 Address'\n )\n primary_ip = tables.Column(\n linkify=True,\n order_by=('primary_ip4', 'primary_ip6'),\n verbose_name='IP Address'\n )\n tags = columns.TagColumn(\n url_name='virtualization:virtualmachine_list'\n )\n\n class Meta(NetBoxTable.Meta):\n model = VirtualMachine\n fields = (\n 'pk', 'id', 'name', 'status', 'cluster', 'role', 'tenant', 'tenant_group', 'platform', 'vcpus', 'memory', 'disk',\n 'primary_ip4', 'primary_ip6', 'primary_ip', 'comments', 'tags', 'created', 'last_updated',\n )\n default_columns = (\n 'pk', 'name', 'status', 'cluster', 'role', 'tenant', 'vcpus', 'memory', 'disk', 'primary_ip',\n )\n\n\n#\n# VM components\n#\n\nclass VMInterfaceTable(BaseInterfaceTable):\n virtual_machine = tables.Column(\n linkify=True\n )\n name = tables.Column(\n linkify=True\n )\n vrf = tables.Column(\n linkify=True\n )\n contacts = columns.ManyToManyColumn(\n linkify_item=True\n )\n tags = columns.TagColumn(\n url_name='virtualization:vminterface_list'\n )\n\n class Meta(NetBoxTable.Meta):\n model = VMInterface\n fields = (\n 'pk', 'id', 'name', 'virtual_machine', 'enabled', 'mac_address', 'mtu', 'mode', 'description', 'tags',\n 'vrf', 'ip_addresses', 'fhrp_groups', 'untagged_vlan', 'tagged_vlans', 'contacts', 'created',\n 'last_updated',\n )\n default_columns = ('pk', 'name', 'virtual_machine', 'enabled', 'description')\n\n\nclass VirtualMachineVMInterfaceTable(VMInterfaceTable):\n parent = tables.Column(\n linkify=True\n )\n bridge = tables.Column(\n linkify=True\n )\n actions = columns.ActionsColumn(\n actions=('edit', 'delete'),\n extra_buttons=VMINTERFACE_BUTTONS\n )\n\n class Meta(NetBoxTable.Meta):\n model = VMInterface\n fields = (\n 'pk', 'id', 'name', 'enabled', 'parent', 'bridge', 'mac_address', 'mtu', 'mode', 'description', 'tags',\n 'ip_addresses', 'fhrp_groups', 'untagged_vlan', 'tagged_vlans', 'actions',\n )\n default_columns = ('pk', 'name', 'enabled', 'mac_address', 'mtu', 'mode', 'description', 'ip_addresses')\n row_attrs = {\n 'data-name': lambda record: record.name,\n }\n", "path": "netbox/virtualization/tables/virtualmachines.py"}], "after_files": [{"content": "import django_tables2 as tables\n\nfrom dcim.tables.devices import BaseInterfaceTable\nfrom netbox.tables import NetBoxTable, columns\nfrom tenancy.tables import TenancyColumnsMixin\nfrom virtualization.models import VirtualMachine, VMInterface\n\n__all__ = (\n 'VirtualMachineTable',\n 'VirtualMachineVMInterfaceTable',\n 'VMInterfaceTable',\n)\n\nVMINTERFACE_BUTTONS = \"\"\"\n{% if perms.ipam.add_ipaddress %}\n <a href=\"{% url 'ipam:ipaddress_add' %}?vminterface={{ record.pk }}&return_url={% url 'virtualization:virtualmachine_interfaces' pk=object.pk %}\" class=\"btn btn-sm btn-success\" title=\"Add IP Address\">\n <i class=\"mdi mdi-plus-thick\" aria-hidden=\"true\"></i>\n </a>\n{% endif %}\n\"\"\"\n\n\n#\n# Virtual machines\n#\n\nclass VirtualMachineTable(TenancyColumnsMixin, NetBoxTable):\n name = tables.Column(\n order_by=('_name',),\n linkify=True\n )\n status = columns.ChoiceFieldColumn()\n cluster = tables.Column(\n linkify=True\n )\n role = columns.ColoredLabelColumn()\n comments = columns.MarkdownColumn()\n primary_ip4 = tables.Column(\n linkify=True,\n verbose_name='IPv4 Address'\n )\n primary_ip6 = tables.Column(\n linkify=True,\n verbose_name='IPv6 Address'\n )\n primary_ip = tables.Column(\n linkify=True,\n order_by=('primary_ip4', 'primary_ip6'),\n verbose_name='IP Address'\n )\n contacts = columns.ManyToManyColumn(\n linkify_item=True\n )\n tags = columns.TagColumn(\n url_name='virtualization:virtualmachine_list'\n )\n\n class Meta(NetBoxTable.Meta):\n model = VirtualMachine\n fields = (\n 'pk', 'id', 'name', 'status', 'cluster', 'role', 'tenant', 'tenant_group', 'platform', 'vcpus', 'memory', 'disk',\n 'primary_ip4', 'primary_ip6', 'primary_ip', 'comments', 'contacts', 'tags', 'created', 'last_updated',\n )\n default_columns = (\n 'pk', 'name', 'status', 'cluster', 'role', 'tenant', 'vcpus', 'memory', 'disk', 'primary_ip',\n )\n\n\n#\n# VM components\n#\n\nclass VMInterfaceTable(BaseInterfaceTable):\n virtual_machine = tables.Column(\n linkify=True\n )\n name = tables.Column(\n linkify=True\n )\n vrf = tables.Column(\n linkify=True\n )\n contacts = columns.ManyToManyColumn(\n linkify_item=True\n )\n tags = columns.TagColumn(\n url_name='virtualization:vminterface_list'\n )\n\n class Meta(NetBoxTable.Meta):\n model = VMInterface\n fields = (\n 'pk', 'id', 'name', 'virtual_machine', 'enabled', 'mac_address', 'mtu', 'mode', 'description', 'tags',\n 'vrf', 'ip_addresses', 'fhrp_groups', 'untagged_vlan', 'tagged_vlans', 'contacts', 'created',\n 'last_updated',\n )\n default_columns = ('pk', 'name', 'virtual_machine', 'enabled', 'description')\n\n\nclass VirtualMachineVMInterfaceTable(VMInterfaceTable):\n parent = tables.Column(\n linkify=True\n )\n bridge = tables.Column(\n linkify=True\n )\n actions = columns.ActionsColumn(\n actions=('edit', 'delete'),\n extra_buttons=VMINTERFACE_BUTTONS\n )\n\n class Meta(NetBoxTable.Meta):\n model = VMInterface\n fields = (\n 'pk', 'id', 'name', 'enabled', 'parent', 'bridge', 'mac_address', 'mtu', 'mode', 'description', 'tags',\n 'ip_addresses', 'fhrp_groups', 'untagged_vlan', 'tagged_vlans', 'actions',\n )\n default_columns = ('pk', 'name', 'enabled', 'mac_address', 'mtu', 'mode', 'description', 'ip_addresses')\n row_attrs = {\n 'data-name': lambda record: record.name,\n }\n", "path": "netbox/virtualization/tables/virtualmachines.py"}]} | 1,539 | 296 |
gh_patches_debug_26762 | rasdani/github-patches | git_diff | pytorch__ignite-1312 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Improve Canberra metric
## 🚀 Feature
Actual implementation of Canberra metric does not use absolute value on terms in denominator. Moreover, `sklearn` can be used in test.
See https://arxiv.org/pdf/1411.7474.pdf
See https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.DistanceMetric.html
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ignite/contrib/metrics/regression/canberra_metric.py`
Content:
```
1 import torch
2
3 from ignite.contrib.metrics.regression._base import _BaseRegression
4
5
6 class CanberraMetric(_BaseRegression):
7 r"""
8 Calculates the Canberra Metric.
9
10 :math:`\text{CM} = \sum_{j=1}^n\frac{|A_j - P_j|}{A_j + P_j}`
11
12 where, :math:`A_j` is the ground truth and :math:`P_j` is the predicted value.
13
14 More details can be found in `Botchkarev 2018`__.
15
16 - ``update`` must receive output of the form ``(y_pred, y)`` or ``{'y_pred': y_pred, 'y': y}``.
17 - `y` and `y_pred` must be of same shape `(N, )` or `(N, 1)`.
18
19 __ https://arxiv.org/abs/1809.03006
20 """
21
22 def reset(self):
23 self._sum_of_errors = 0.0
24
25 def _update(self, output):
26 y_pred, y = output
27 errors = torch.abs(y.view_as(y_pred) - y_pred) / (y_pred + y.view_as(y_pred))
28 self._sum_of_errors += torch.sum(errors).item()
29
30 def compute(self):
31 return self._sum_of_errors
32
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/ignite/contrib/metrics/regression/canberra_metric.py b/ignite/contrib/metrics/regression/canberra_metric.py
--- a/ignite/contrib/metrics/regression/canberra_metric.py
+++ b/ignite/contrib/metrics/regression/canberra_metric.py
@@ -7,16 +7,19 @@
r"""
Calculates the Canberra Metric.
- :math:`\text{CM} = \sum_{j=1}^n\frac{|A_j - P_j|}{A_j + P_j}`
+ :math:`\text{CM} = \sum_{j=1}^n\frac{|A_j - P_j|}{|A_j| + |P_j|}`
where, :math:`A_j` is the ground truth and :math:`P_j` is the predicted value.
- More details can be found in `Botchkarev 2018`__.
+ More details can be found in `Botchkarev 2018`_ or `scikit-learn distance metrics`_
- ``update`` must receive output of the form ``(y_pred, y)`` or ``{'y_pred': y_pred, 'y': y}``.
- `y` and `y_pred` must be of same shape `(N, )` or `(N, 1)`.
- __ https://arxiv.org/abs/1809.03006
+ .. _Botchkarev 2018: https://arxiv.org/abs/1809.03006
+ .. _scikit-learn distance metrics:
+ https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.DistanceMetric.html
+
"""
def reset(self):
@@ -24,7 +27,7 @@
def _update(self, output):
y_pred, y = output
- errors = torch.abs(y.view_as(y_pred) - y_pred) / (y_pred + y.view_as(y_pred))
+ errors = torch.abs(y.view_as(y_pred) - y_pred) / (torch.abs(y_pred) + torch.abs(y.view_as(y_pred)))
self._sum_of_errors += torch.sum(errors).item()
def compute(self):
| {"golden_diff": "diff --git a/ignite/contrib/metrics/regression/canberra_metric.py b/ignite/contrib/metrics/regression/canberra_metric.py\n--- a/ignite/contrib/metrics/regression/canberra_metric.py\n+++ b/ignite/contrib/metrics/regression/canberra_metric.py\n@@ -7,16 +7,19 @@\n r\"\"\"\n Calculates the Canberra Metric.\n \n- :math:`\\text{CM} = \\sum_{j=1}^n\\frac{|A_j - P_j|}{A_j + P_j}`\n+ :math:`\\text{CM} = \\sum_{j=1}^n\\frac{|A_j - P_j|}{|A_j| + |P_j|}`\n \n where, :math:`A_j` is the ground truth and :math:`P_j` is the predicted value.\n \n- More details can be found in `Botchkarev 2018`__.\n+ More details can be found in `Botchkarev 2018`_ or `scikit-learn distance metrics`_\n \n - ``update`` must receive output of the form ``(y_pred, y)`` or ``{'y_pred': y_pred, 'y': y}``.\n - `y` and `y_pred` must be of same shape `(N, )` or `(N, 1)`.\n \n- __ https://arxiv.org/abs/1809.03006\n+ .. _Botchkarev 2018: https://arxiv.org/abs/1809.03006\n+ .. _scikit-learn distance metrics:\n+ https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.DistanceMetric.html\n+\n \"\"\"\n \n def reset(self):\n@@ -24,7 +27,7 @@\n \n def _update(self, output):\n y_pred, y = output\n- errors = torch.abs(y.view_as(y_pred) - y_pred) / (y_pred + y.view_as(y_pred))\n+ errors = torch.abs(y.view_as(y_pred) - y_pred) / (torch.abs(y_pred) + torch.abs(y.view_as(y_pred)))\n self._sum_of_errors += torch.sum(errors).item()\n \n def compute(self):\n", "issue": "Improve Canberra metric\n## \ud83d\ude80 Feature\r\n\r\nActual implementation of Canberra metric does not use absolute value on terms in denominator. Moreover, `sklearn` can be used in test.\r\n\r\nSee https://arxiv.org/pdf/1411.7474.pdf \r\n\r\nSee https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.DistanceMetric.html\r\n\n", "before_files": [{"content": "import torch\n\nfrom ignite.contrib.metrics.regression._base import _BaseRegression\n\n\nclass CanberraMetric(_BaseRegression):\n r\"\"\"\n Calculates the Canberra Metric.\n\n :math:`\\text{CM} = \\sum_{j=1}^n\\frac{|A_j - P_j|}{A_j + P_j}`\n\n where, :math:`A_j` is the ground truth and :math:`P_j` is the predicted value.\n\n More details can be found in `Botchkarev 2018`__.\n\n - ``update`` must receive output of the form ``(y_pred, y)`` or ``{'y_pred': y_pred, 'y': y}``.\n - `y` and `y_pred` must be of same shape `(N, )` or `(N, 1)`.\n\n __ https://arxiv.org/abs/1809.03006\n \"\"\"\n\n def reset(self):\n self._sum_of_errors = 0.0\n\n def _update(self, output):\n y_pred, y = output\n errors = torch.abs(y.view_as(y_pred) - y_pred) / (y_pred + y.view_as(y_pred))\n self._sum_of_errors += torch.sum(errors).item()\n\n def compute(self):\n return self._sum_of_errors\n", "path": "ignite/contrib/metrics/regression/canberra_metric.py"}], "after_files": [{"content": "import torch\n\nfrom ignite.contrib.metrics.regression._base import _BaseRegression\n\n\nclass CanberraMetric(_BaseRegression):\n r\"\"\"\n Calculates the Canberra Metric.\n\n :math:`\\text{CM} = \\sum_{j=1}^n\\frac{|A_j - P_j|}{|A_j| + |P_j|}`\n\n where, :math:`A_j` is the ground truth and :math:`P_j` is the predicted value.\n\n More details can be found in `Botchkarev 2018`_ or `scikit-learn distance metrics`_\n\n - ``update`` must receive output of the form ``(y_pred, y)`` or ``{'y_pred': y_pred, 'y': y}``.\n - `y` and `y_pred` must be of same shape `(N, )` or `(N, 1)`.\n\n .. _Botchkarev 2018: https://arxiv.org/abs/1809.03006\n .. _scikit-learn distance metrics:\n https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.DistanceMetric.html\n\n \"\"\"\n\n def reset(self):\n self._sum_of_errors = 0.0\n\n def _update(self, output):\n y_pred, y = output\n errors = torch.abs(y.view_as(y_pred) - y_pred) / (torch.abs(y_pred) + torch.abs(y.view_as(y_pred)))\n self._sum_of_errors += torch.sum(errors).item()\n\n def compute(self):\n return self._sum_of_errors\n", "path": "ignite/contrib/metrics/regression/canberra_metric.py"}]} | 691 | 506 |
gh_patches_debug_27545 | rasdani/github-patches | git_diff | encode__uvicorn-227 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error integrating with Channels if 'lifespan' is not specified in router
I'm not entirely sure if I should be posting this here or on `channels`.
I'm using v0.3.12 which I believe has already introduced the new `lifespan` protocol defined in asgiref. But this causes an error with `channels`' router
```bash
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/uvicorn/lifespan.py", line 29, in run
await self.asgi(self.receive, self.send)
File "/usr/local/lib/python3.6/site-packages/uvicorn/middleware/message_logger.py", line 51, in __call__
inner = self.app(self.scope)
File "/usr/local/lib/python3.6/site-packages/channels/routing.py", line 58, in __call__
raise ValueError("No application configured for scope type %r" % scope["type"])
ValueError: No application configured for scope type 'lifespan'
```
My `routing.py` file looks like this:
```python
application = ProtocolTypeRouter({
# Empty for now (http->django views is added by default)
'websocket': JWTWebsocketMiddleware(
URLRouter(urlpatterns)
)
})
```
**EDIT**: Sorry my workaround wasn't actually working as you'll need at least one `path` in the `URLRouter`, so I've removed it.
To temporarily get around this, I had to downgrade to `v0.3.9`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `uvicorn/middleware/message_logger.py`
Content:
```
1 import logging
2
3 PLACEHOLDER_FORMAT = {
4 'body': '<{length} bytes>',
5 'bytes': '<{length} bytes>',
6 'text': '<{length} chars>',
7 'headers': '<...>',
8 }
9
10
11 def message_with_placeholders(message):
12 """
13 Return an ASGI message, with any body-type content omitted and replaced
14 with a placeholder.
15 """
16 new_message = message.copy()
17 for attr in PLACEHOLDER_FORMAT.keys():
18 if message.get(attr) is not None:
19 content = message[attr]
20 placeholder = PLACEHOLDER_FORMAT[attr].format(length=len(content))
21 new_message[attr] = placeholder
22 return new_message
23
24
25 class MessageLoggerMiddleware:
26 def __init__(self, app):
27 self.task_counter = 0
28 self.app = app
29 self.logger = logging.getLogger("uvicorn")
30
31 def __call__(self, scope):
32 self.task_counter += 1
33 return MessageLoggerResponder(scope, self.app, self.logger, self.task_counter)
34
35
36 class MessageLoggerResponder:
37 def __init__(self, scope, app, logger, task_counter):
38 self.scope = scope
39 self.app = app
40 self.logger = logger
41 self.task_counter = task_counter
42 self.client_addr = scope.get('client')
43
44 async def __call__(self, receive, send):
45 self._receive = receive
46 self._send = send
47 logged_scope = message_with_placeholders(self.scope)
48 log_text = '%s - ASGI [%d] Started %s'
49 self.logger.debug(log_text, self.client_addr, self.task_counter, logged_scope)
50 try:
51 inner = self.app(self.scope)
52 await inner(self.receive, self.send)
53 except:
54 log_text = '%s - ASGI [%d] Raised exception'
55 self.logger.debug(log_text, self.client_addr, self.task_counter)
56 raise
57 else:
58 log_text = '%s - ASGI [%d] Completed'
59 self.logger.debug(log_text, self.client_addr, self.task_counter)
60
61 async def receive(self):
62 message = await self._receive()
63 logged_message = message_with_placeholders(message)
64 log_text = '%s - ASGI [%d] Sent %s'
65 self.logger.debug(log_text, self.client_addr, self.task_counter, logged_message)
66 return message
67
68 async def send(self, message):
69 logged_message = message_with_placeholders(message)
70 log_text = '%s - ASGI [%d] Received %s'
71 self.logger.debug(log_text, self.client_addr, self.task_counter, logged_message)
72 await self._send(message)
73
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/uvicorn/middleware/message_logger.py b/uvicorn/middleware/message_logger.py
--- a/uvicorn/middleware/message_logger.py
+++ b/uvicorn/middleware/message_logger.py
@@ -36,20 +36,27 @@
class MessageLoggerResponder:
def __init__(self, scope, app, logger, task_counter):
self.scope = scope
- self.app = app
self.logger = logger
self.task_counter = task_counter
self.client_addr = scope.get('client')
+ logged_scope = message_with_placeholders(scope)
+ log_text = '%s - ASGI [%d] Initialized %s'
+ self.logger.debug(log_text, self.client_addr, self.task_counter, logged_scope)
+ try:
+ self.inner = app(scope)
+ except:
+ log_text = '%s - ASGI [%d] Raised exception'
+ self.logger.debug(log_text, self.client_addr, self.task_counter)
+ raise
+
async def __call__(self, receive, send):
self._receive = receive
self._send = send
- logged_scope = message_with_placeholders(self.scope)
- log_text = '%s - ASGI [%d] Started %s'
- self.logger.debug(log_text, self.client_addr, self.task_counter, logged_scope)
+ log_text = '%s - ASGI [%d] Started task'
+ self.logger.debug(log_text, self.client_addr, self.task_counter)
try:
- inner = self.app(self.scope)
- await inner(self.receive, self.send)
+ await self.inner(self.receive, self.send)
except:
log_text = '%s - ASGI [%d] Raised exception'
self.logger.debug(log_text, self.client_addr, self.task_counter)
| {"golden_diff": "diff --git a/uvicorn/middleware/message_logger.py b/uvicorn/middleware/message_logger.py\n--- a/uvicorn/middleware/message_logger.py\n+++ b/uvicorn/middleware/message_logger.py\n@@ -36,20 +36,27 @@\n class MessageLoggerResponder:\n def __init__(self, scope, app, logger, task_counter):\n self.scope = scope\n- self.app = app\n self.logger = logger\n self.task_counter = task_counter\n self.client_addr = scope.get('client')\n \n+ logged_scope = message_with_placeholders(scope)\n+ log_text = '%s - ASGI [%d] Initialized %s'\n+ self.logger.debug(log_text, self.client_addr, self.task_counter, logged_scope)\n+ try:\n+ self.inner = app(scope)\n+ except:\n+ log_text = '%s - ASGI [%d] Raised exception'\n+ self.logger.debug(log_text, self.client_addr, self.task_counter)\n+ raise\n+\n async def __call__(self, receive, send):\n self._receive = receive\n self._send = send\n- logged_scope = message_with_placeholders(self.scope)\n- log_text = '%s - ASGI [%d] Started %s'\n- self.logger.debug(log_text, self.client_addr, self.task_counter, logged_scope)\n+ log_text = '%s - ASGI [%d] Started task'\n+ self.logger.debug(log_text, self.client_addr, self.task_counter)\n try:\n- inner = self.app(self.scope)\n- await inner(self.receive, self.send)\n+ await self.inner(self.receive, self.send)\n except:\n log_text = '%s - ASGI [%d] Raised exception'\n self.logger.debug(log_text, self.client_addr, self.task_counter)\n", "issue": "Error integrating with Channels if 'lifespan' is not specified in router\nI'm not entirely sure if I should be posting this here or on `channels`.\r\n\r\nI'm using v0.3.12 which I believe has already introduced the new `lifespan` protocol defined in asgiref. But this causes an error with `channels`' router\r\n\r\n```bash\r\nTraceback (most recent call last):\r\n File \"/usr/local/lib/python3.6/site-packages/uvicorn/lifespan.py\", line 29, in run\r\n await self.asgi(self.receive, self.send)\r\n File \"/usr/local/lib/python3.6/site-packages/uvicorn/middleware/message_logger.py\", line 51, in __call__\r\n inner = self.app(self.scope)\r\n File \"/usr/local/lib/python3.6/site-packages/channels/routing.py\", line 58, in __call__\r\n raise ValueError(\"No application configured for scope type %r\" % scope[\"type\"])\r\nValueError: No application configured for scope type 'lifespan'\r\n```\r\n\r\nMy `routing.py` file looks like this:\r\n\r\n```python\r\napplication = ProtocolTypeRouter({\r\n # Empty for now (http->django views is added by default)\r\n 'websocket': JWTWebsocketMiddleware(\r\n URLRouter(urlpatterns)\r\n )\r\n})\r\n```\r\n\r\n**EDIT**: Sorry my workaround wasn't actually working as you'll need at least one `path` in the `URLRouter`, so I've removed it.\r\n\r\nTo temporarily get around this, I had to downgrade to `v0.3.9`.\n", "before_files": [{"content": "import logging\n\nPLACEHOLDER_FORMAT = {\n 'body': '<{length} bytes>',\n 'bytes': '<{length} bytes>',\n 'text': '<{length} chars>',\n 'headers': '<...>',\n}\n\n\ndef message_with_placeholders(message):\n \"\"\"\n Return an ASGI message, with any body-type content omitted and replaced\n with a placeholder.\n \"\"\"\n new_message = message.copy()\n for attr in PLACEHOLDER_FORMAT.keys():\n if message.get(attr) is not None:\n content = message[attr]\n placeholder = PLACEHOLDER_FORMAT[attr].format(length=len(content))\n new_message[attr] = placeholder\n return new_message\n\n\nclass MessageLoggerMiddleware:\n def __init__(self, app):\n self.task_counter = 0\n self.app = app\n self.logger = logging.getLogger(\"uvicorn\")\n\n def __call__(self, scope):\n self.task_counter += 1\n return MessageLoggerResponder(scope, self.app, self.logger, self.task_counter)\n\n\nclass MessageLoggerResponder:\n def __init__(self, scope, app, logger, task_counter):\n self.scope = scope\n self.app = app\n self.logger = logger\n self.task_counter = task_counter\n self.client_addr = scope.get('client')\n\n async def __call__(self, receive, send):\n self._receive = receive\n self._send = send\n logged_scope = message_with_placeholders(self.scope)\n log_text = '%s - ASGI [%d] Started %s'\n self.logger.debug(log_text, self.client_addr, self.task_counter, logged_scope)\n try:\n inner = self.app(self.scope)\n await inner(self.receive, self.send)\n except:\n log_text = '%s - ASGI [%d] Raised exception'\n self.logger.debug(log_text, self.client_addr, self.task_counter)\n raise\n else:\n log_text = '%s - ASGI [%d] Completed'\n self.logger.debug(log_text, self.client_addr, self.task_counter)\n\n async def receive(self):\n message = await self._receive()\n logged_message = message_with_placeholders(message)\n log_text = '%s - ASGI [%d] Sent %s'\n self.logger.debug(log_text, self.client_addr, self.task_counter, logged_message)\n return message\n\n async def send(self, message):\n logged_message = message_with_placeholders(message)\n log_text = '%s - ASGI [%d] Received %s'\n self.logger.debug(log_text, self.client_addr, self.task_counter, logged_message)\n await self._send(message)\n", "path": "uvicorn/middleware/message_logger.py"}], "after_files": [{"content": "import logging\n\nPLACEHOLDER_FORMAT = {\n 'body': '<{length} bytes>',\n 'bytes': '<{length} bytes>',\n 'text': '<{length} chars>',\n 'headers': '<...>',\n}\n\n\ndef message_with_placeholders(message):\n \"\"\"\n Return an ASGI message, with any body-type content omitted and replaced\n with a placeholder.\n \"\"\"\n new_message = message.copy()\n for attr in PLACEHOLDER_FORMAT.keys():\n if message.get(attr) is not None:\n content = message[attr]\n placeholder = PLACEHOLDER_FORMAT[attr].format(length=len(content))\n new_message[attr] = placeholder\n return new_message\n\n\nclass MessageLoggerMiddleware:\n def __init__(self, app):\n self.task_counter = 0\n self.app = app\n self.logger = logging.getLogger(\"uvicorn\")\n\n def __call__(self, scope):\n self.task_counter += 1\n return MessageLoggerResponder(scope, self.app, self.logger, self.task_counter)\n\n\nclass MessageLoggerResponder:\n def __init__(self, scope, app, logger, task_counter):\n self.scope = scope\n self.logger = logger\n self.task_counter = task_counter\n self.client_addr = scope.get('client')\n\n logged_scope = message_with_placeholders(scope)\n log_text = '%s - ASGI [%d] Initialized %s'\n self.logger.debug(log_text, self.client_addr, self.task_counter, logged_scope)\n try:\n self.inner = app(scope)\n except:\n log_text = '%s - ASGI [%d] Raised exception'\n self.logger.debug(log_text, self.client_addr, self.task_counter)\n raise\n\n async def __call__(self, receive, send):\n self._receive = receive\n self._send = send\n log_text = '%s - ASGI [%d] Started task'\n self.logger.debug(log_text, self.client_addr, self.task_counter)\n try:\n await self.inner(self.receive, self.send)\n except:\n log_text = '%s - ASGI [%d] Raised exception'\n self.logger.debug(log_text, self.client_addr, self.task_counter)\n raise\n else:\n log_text = '%s - ASGI [%d] Completed'\n self.logger.debug(log_text, self.client_addr, self.task_counter)\n\n async def receive(self):\n message = await self._receive()\n logged_message = message_with_placeholders(message)\n log_text = '%s - ASGI [%d] Sent %s'\n self.logger.debug(log_text, self.client_addr, self.task_counter, logged_message)\n return message\n\n async def send(self, message):\n logged_message = message_with_placeholders(message)\n log_text = '%s - ASGI [%d] Received %s'\n self.logger.debug(log_text, self.client_addr, self.task_counter, logged_message)\n await self._send(message)\n", "path": "uvicorn/middleware/message_logger.py"}]} | 1,285 | 389 |
gh_patches_debug_2522 | rasdani/github-patches | git_diff | googleapis__python-bigquery-189 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Packaging: prep for 1.0.0 release of `google-resumable-media-python`.
See: https://github.com/googleapis/google-resumable-media-python/issues/138
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 # Copyright 2018 Google LLC
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import io
16 import os
17
18 import setuptools
19
20
21 # Package metadata.
22
23 name = "google-cloud-bigquery"
24 description = "Google BigQuery API client library"
25 version = "1.26.0"
26 # Should be one of:
27 # 'Development Status :: 3 - Alpha'
28 # 'Development Status :: 4 - Beta'
29 # 'Development Status :: 5 - Production/Stable'
30 release_status = "Development Status :: 5 - Production/Stable"
31 dependencies = [
32 'enum34; python_version < "3.4"',
33 "google-api-core >= 1.21.0, < 2.0dev",
34 "google-cloud-core >= 1.1.0, < 2.0dev",
35 "google-resumable-media >= 0.5.0, < 0.6dev",
36 "six >=1.13.0,< 2.0.0dev",
37 ]
38 extras = {
39 "bqstorage": [
40 "google-cloud-bigquery-storage >= 1.0.0, <2.0.0dev",
41 # Due to an issue in pip's dependency resolver, the `grpc` extra is not
42 # installed, even though `google-cloud-bigquery-storage` specifies it
43 # as `google-api-core[grpc]`. We thus need to explicitly specify it here.
44 # See: https://github.com/googleapis/python-bigquery/issues/83
45 "grpcio >= 1.8.2, < 2.0dev",
46 "pyarrow>=0.16.0, < 2.0dev",
47 ],
48 "pandas": ["pandas>=0.17.1"],
49 # Exclude PyArrow dependency from Windows Python 2.7.
50 'pyarrow: platform_system != "Windows" or python_version >= "3.4"': [
51 # Bad Linux release for 0.14.0.
52 # https://issues.apache.org/jira/browse/ARROW-5868
53 "pyarrow>=0.4.1, != 0.14.0"
54 ],
55 "tqdm": ["tqdm >= 4.0.0, <5.0.0dev"],
56 "fastparquet": [
57 "fastparquet",
58 "python-snappy",
59 # llvmlite >= 0.32.0 cannot be installed on Python 3.5 and below
60 # (building the wheel fails), thus needs to be restricted.
61 # See: https://github.com/googleapis/python-bigquery/issues/78
62 "llvmlite <= 0.31.0",
63 ],
64 }
65
66 all_extras = []
67
68 for extra in extras:
69 if extra == "fastparquet":
70 # Skip fastparquet from "all" because it is redundant with pyarrow and
71 # creates a dependency on pre-release versions of numpy. See:
72 # https://github.com/googleapis/google-cloud-python/issues/8549
73 continue
74 all_extras.extend(extras[extra])
75
76 extras["all"] = all_extras
77
78 # Setup boilerplate below this line.
79
80 package_root = os.path.abspath(os.path.dirname(__file__))
81
82 readme_filename = os.path.join(package_root, "README.rst")
83 with io.open(readme_filename, encoding="utf-8") as readme_file:
84 readme = readme_file.read()
85
86 # Only include packages under the 'google' namespace. Do not include tests,
87 # benchmarks, etc.
88 packages = [
89 package for package in setuptools.find_packages() if package.startswith("google")
90 ]
91
92 # Determine which namespaces are needed.
93 namespaces = ["google"]
94 if "google.cloud" in packages:
95 namespaces.append("google.cloud")
96
97
98 setuptools.setup(
99 name=name,
100 version=version,
101 description=description,
102 long_description=readme,
103 author="Google LLC",
104 author_email="[email protected]",
105 license="Apache 2.0",
106 url="https://github.com/googleapis/python-bigquery",
107 classifiers=[
108 release_status,
109 "Intended Audience :: Developers",
110 "License :: OSI Approved :: Apache Software License",
111 "Programming Language :: Python",
112 "Programming Language :: Python :: 2",
113 "Programming Language :: Python :: 2.7",
114 "Programming Language :: Python :: 3",
115 "Programming Language :: Python :: 3.5",
116 "Programming Language :: Python :: 3.6",
117 "Programming Language :: Python :: 3.7",
118 "Programming Language :: Python :: 3.8",
119 "Operating System :: OS Independent",
120 "Topic :: Internet",
121 ],
122 platforms="Posix; MacOS X; Windows",
123 packages=packages,
124 namespace_packages=namespaces,
125 install_requires=dependencies,
126 extras_require=extras,
127 python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
128 include_package_data=True,
129 zip_safe=False,
130 )
131
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -32,7 +32,7 @@
'enum34; python_version < "3.4"',
"google-api-core >= 1.21.0, < 2.0dev",
"google-cloud-core >= 1.1.0, < 2.0dev",
- "google-resumable-media >= 0.5.0, < 0.6dev",
+ "google-resumable-media >= 0.5.0, < 2.0dev",
"six >=1.13.0,< 2.0.0dev",
]
extras = {
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -32,7 +32,7 @@\n 'enum34; python_version < \"3.4\"',\n \"google-api-core >= 1.21.0, < 2.0dev\",\n \"google-cloud-core >= 1.1.0, < 2.0dev\",\n- \"google-resumable-media >= 0.5.0, < 0.6dev\",\n+ \"google-resumable-media >= 0.5.0, < 2.0dev\",\n \"six >=1.13.0,< 2.0.0dev\",\n ]\n extras = {\n", "issue": "Packaging: prep for 1.0.0 release of `google-resumable-media-python`.\nSee: https://github.com/googleapis/google-resumable-media-python/issues/138\n", "before_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport io\nimport os\n\nimport setuptools\n\n\n# Package metadata.\n\nname = \"google-cloud-bigquery\"\ndescription = \"Google BigQuery API client library\"\nversion = \"1.26.0\"\n# Should be one of:\n# 'Development Status :: 3 - Alpha'\n# 'Development Status :: 4 - Beta'\n# 'Development Status :: 5 - Production/Stable'\nrelease_status = \"Development Status :: 5 - Production/Stable\"\ndependencies = [\n 'enum34; python_version < \"3.4\"',\n \"google-api-core >= 1.21.0, < 2.0dev\",\n \"google-cloud-core >= 1.1.0, < 2.0dev\",\n \"google-resumable-media >= 0.5.0, < 0.6dev\",\n \"six >=1.13.0,< 2.0.0dev\",\n]\nextras = {\n \"bqstorage\": [\n \"google-cloud-bigquery-storage >= 1.0.0, <2.0.0dev\",\n # Due to an issue in pip's dependency resolver, the `grpc` extra is not\n # installed, even though `google-cloud-bigquery-storage` specifies it\n # as `google-api-core[grpc]`. We thus need to explicitly specify it here.\n # See: https://github.com/googleapis/python-bigquery/issues/83\n \"grpcio >= 1.8.2, < 2.0dev\",\n \"pyarrow>=0.16.0, < 2.0dev\",\n ],\n \"pandas\": [\"pandas>=0.17.1\"],\n # Exclude PyArrow dependency from Windows Python 2.7.\n 'pyarrow: platform_system != \"Windows\" or python_version >= \"3.4\"': [\n # Bad Linux release for 0.14.0.\n # https://issues.apache.org/jira/browse/ARROW-5868\n \"pyarrow>=0.4.1, != 0.14.0\"\n ],\n \"tqdm\": [\"tqdm >= 4.0.0, <5.0.0dev\"],\n \"fastparquet\": [\n \"fastparquet\",\n \"python-snappy\",\n # llvmlite >= 0.32.0 cannot be installed on Python 3.5 and below\n # (building the wheel fails), thus needs to be restricted.\n # See: https://github.com/googleapis/python-bigquery/issues/78\n \"llvmlite <= 0.31.0\",\n ],\n}\n\nall_extras = []\n\nfor extra in extras:\n if extra == \"fastparquet\":\n # Skip fastparquet from \"all\" because it is redundant with pyarrow and\n # creates a dependency on pre-release versions of numpy. See:\n # https://github.com/googleapis/google-cloud-python/issues/8549\n continue\n all_extras.extend(extras[extra])\n\nextras[\"all\"] = all_extras\n\n# Setup boilerplate below this line.\n\npackage_root = os.path.abspath(os.path.dirname(__file__))\n\nreadme_filename = os.path.join(package_root, \"README.rst\")\nwith io.open(readme_filename, encoding=\"utf-8\") as readme_file:\n readme = readme_file.read()\n\n# Only include packages under the 'google' namespace. Do not include tests,\n# benchmarks, etc.\npackages = [\n package for package in setuptools.find_packages() if package.startswith(\"google\")\n]\n\n# Determine which namespaces are needed.\nnamespaces = [\"google\"]\nif \"google.cloud\" in packages:\n namespaces.append(\"google.cloud\")\n\n\nsetuptools.setup(\n name=name,\n version=version,\n description=description,\n long_description=readme,\n author=\"Google LLC\",\n author_email=\"[email protected]\",\n license=\"Apache 2.0\",\n url=\"https://github.com/googleapis/python-bigquery\",\n classifiers=[\n release_status,\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Operating System :: OS Independent\",\n \"Topic :: Internet\",\n ],\n platforms=\"Posix; MacOS X; Windows\",\n packages=packages,\n namespace_packages=namespaces,\n install_requires=dependencies,\n extras_require=extras,\n python_requires=\">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*\",\n include_package_data=True,\n zip_safe=False,\n)\n", "path": "setup.py"}], "after_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport io\nimport os\n\nimport setuptools\n\n\n# Package metadata.\n\nname = \"google-cloud-bigquery\"\ndescription = \"Google BigQuery API client library\"\nversion = \"1.26.0\"\n# Should be one of:\n# 'Development Status :: 3 - Alpha'\n# 'Development Status :: 4 - Beta'\n# 'Development Status :: 5 - Production/Stable'\nrelease_status = \"Development Status :: 5 - Production/Stable\"\ndependencies = [\n 'enum34; python_version < \"3.4\"',\n \"google-api-core >= 1.21.0, < 2.0dev\",\n \"google-cloud-core >= 1.1.0, < 2.0dev\",\n \"google-resumable-media >= 0.5.0, < 2.0dev\",\n \"six >=1.13.0,< 2.0.0dev\",\n]\nextras = {\n \"bqstorage\": [\n \"google-cloud-bigquery-storage >= 1.0.0, <2.0.0dev\",\n # Due to an issue in pip's dependency resolver, the `grpc` extra is not\n # installed, even though `google-cloud-bigquery-storage` specifies it\n # as `google-api-core[grpc]`. We thus need to explicitly specify it here.\n # See: https://github.com/googleapis/python-bigquery/issues/83\n \"grpcio >= 1.8.2, < 2.0dev\",\n \"pyarrow>=0.16.0, < 2.0dev\",\n ],\n \"pandas\": [\"pandas>=0.17.1\"],\n # Exclude PyArrow dependency from Windows Python 2.7.\n 'pyarrow: platform_system != \"Windows\" or python_version >= \"3.4\"': [\n # Bad Linux release for 0.14.0.\n # https://issues.apache.org/jira/browse/ARROW-5868\n \"pyarrow>=0.4.1, != 0.14.0\"\n ],\n \"tqdm\": [\"tqdm >= 4.0.0, <5.0.0dev\"],\n \"fastparquet\": [\n \"fastparquet\",\n \"python-snappy\",\n # llvmlite >= 0.32.0 cannot be installed on Python 3.5 and below\n # (building the wheel fails), thus needs to be restricted.\n # See: https://github.com/googleapis/python-bigquery/issues/78\n \"llvmlite <= 0.31.0\",\n ],\n}\n\nall_extras = []\n\nfor extra in extras:\n if extra == \"fastparquet\":\n # Skip fastparquet from \"all\" because it is redundant with pyarrow and\n # creates a dependency on pre-release versions of numpy. See:\n # https://github.com/googleapis/google-cloud-python/issues/8549\n continue\n all_extras.extend(extras[extra])\n\nextras[\"all\"] = all_extras\n\n# Setup boilerplate below this line.\n\npackage_root = os.path.abspath(os.path.dirname(__file__))\n\nreadme_filename = os.path.join(package_root, \"README.rst\")\nwith io.open(readme_filename, encoding=\"utf-8\") as readme_file:\n readme = readme_file.read()\n\n# Only include packages under the 'google' namespace. Do not include tests,\n# benchmarks, etc.\npackages = [\n package for package in setuptools.find_packages() if package.startswith(\"google\")\n]\n\n# Determine which namespaces are needed.\nnamespaces = [\"google\"]\nif \"google.cloud\" in packages:\n namespaces.append(\"google.cloud\")\n\n\nsetuptools.setup(\n name=name,\n version=version,\n description=description,\n long_description=readme,\n author=\"Google LLC\",\n author_email=\"[email protected]\",\n license=\"Apache 2.0\",\n url=\"https://github.com/googleapis/python-bigquery\",\n classifiers=[\n release_status,\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Operating System :: OS Independent\",\n \"Topic :: Internet\",\n ],\n platforms=\"Posix; MacOS X; Windows\",\n packages=packages,\n namespace_packages=namespaces,\n install_requires=dependencies,\n extras_require=extras,\n python_requires=\">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*\",\n include_package_data=True,\n zip_safe=False,\n)\n", "path": "setup.py"}]} | 1,780 | 156 |
gh_patches_debug_6181 | rasdani/github-patches | git_diff | scrapy__scrapy-2816 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
DNSCACHE_ENABLED=False not working
Originally reported by @softwarevamp on [StackOverflow](https://stackoverflow.com/questions/44877296/scrapy-with-dnscache-enabled-false-not-working):
> When i run scrapy shell with `DNSCACHE_ENABLED=False` got
```
KeyError: 'dictionary is empty'
twisted.internet.error.DNSLookupError: DNS lookup failed: no results for hostname lookup: www.mydomain.com.
```
```
2017-07-03 03:09:12 [twisted] CRITICAL: while looking up www.mydomain.com with <scrapy.resolver.CachingThreadedResolver object at 0x3fd0050>
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/twisted/internet/defer.py", line 653, in _runCallbacks
current.result = callback(current.result, *args, **kw)
File "/usr/lib64/python2.7/site-packages/scrapy/resolver.py", line 29, in _cache_result
dnscache[name] = result
File "/usr/lib64/python2.7/site-packages/scrapy/utils/datatypes.py", line 305, in __setitem__
self.popitem(last=False)
File "/usr/lib64/python2.7/collections.py", line 159, in popitem
raise KeyError('dictionary is empty')
KeyError: 'dictionary is empty'
2017-07-03 03:09:12 [scrapy.downloadermiddlewares.retry] DEBUG: Gave up retrying <GET //www.mydomain.com/> (failed 3 times): DNS lookup failed: no results for hostname lookup: www.mydomain.com.
Traceback (most recent call last):
File "/usr/bin/scrapy", line 11, in <module>
sys.exit(execute())
File "/usr/lib64/python2.7/site-packages/scrapy/cmdline.py", line 149, in execute
_run_print_help(parser, _run_command, cmd, args, opts)
File "/usr/lib64/python2.7/site-packages/scrapy/cmdline.py", line 89, in _run_print_help
func(*a, **kw)
File "/usr/lib64/python2.7/site-packages/scrapy/cmdline.py", line 156, in _run_command
cmd.run(args, opts)
File "/usr/lib64/python2.7/site-packages/scrapy/commands/shell.py", line 73, in run
shell.start(url=url, redirect=not opts.no_redirect)
File "/usr/lib64/python2.7/site-packages/scrapy/shell.py", line 48, in start
self.fetch(url, spider, redirect=redirect)
File "/usr/lib64/python2.7/site-packages/scrapy/shell.py", line 115, in fetch
reactor, self._schedule, request, spider)
File "/usr/lib64/python2.7/site-packages/twisted/internet/threads.py", line 122, in blockingCallFromThread
result.raiseException()
File "<string>", line 2, in raiseException
twisted.internet.error.DNSLookupError: DNS lookup failed: no results for hostname lookup: www.mydomain.com.
```
> Any thoughts welcome
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scrapy/resolver.py`
Content:
```
1 from twisted.internet import defer
2 from twisted.internet.base import ThreadedResolver
3
4 from scrapy.utils.datatypes import LocalCache
5
6 # TODO: cache misses
7
8 dnscache = LocalCache(10000)
9
10 class CachingThreadedResolver(ThreadedResolver):
11 def __init__(self, reactor, cache_size, timeout):
12 super(CachingThreadedResolver, self).__init__(reactor)
13 dnscache.limit = cache_size
14 self.timeout = timeout
15
16 def getHostByName(self, name, timeout=None):
17 if name in dnscache:
18 return defer.succeed(dnscache[name])
19 # in Twisted<=16.6, getHostByName() is always called with
20 # a default timeout of 60s (actually passed as (1, 3, 11, 45) tuple),
21 # so the input argument above is simply overridden
22 # to enforce Scrapy's DNS_TIMEOUT setting's value
23 timeout = (self.timeout,)
24 d = super(CachingThreadedResolver, self).getHostByName(name, timeout)
25 d.addCallback(self._cache_result, name)
26 return d
27
28 def _cache_result(self, result, name):
29 dnscache[name] = result
30 return result
31
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/scrapy/resolver.py b/scrapy/resolver.py
--- a/scrapy/resolver.py
+++ b/scrapy/resolver.py
@@ -22,7 +22,8 @@
# to enforce Scrapy's DNS_TIMEOUT setting's value
timeout = (self.timeout,)
d = super(CachingThreadedResolver, self).getHostByName(name, timeout)
- d.addCallback(self._cache_result, name)
+ if dnscache.limit:
+ d.addCallback(self._cache_result, name)
return d
def _cache_result(self, result, name):
| {"golden_diff": "diff --git a/scrapy/resolver.py b/scrapy/resolver.py\n--- a/scrapy/resolver.py\n+++ b/scrapy/resolver.py\n@@ -22,7 +22,8 @@\n # to enforce Scrapy's DNS_TIMEOUT setting's value\n timeout = (self.timeout,)\n d = super(CachingThreadedResolver, self).getHostByName(name, timeout)\n- d.addCallback(self._cache_result, name)\n+ if dnscache.limit:\n+ d.addCallback(self._cache_result, name)\n return d\n \n def _cache_result(self, result, name):\n", "issue": "DNSCACHE_ENABLED=False not working\nOriginally reported by @softwarevamp on [StackOverflow](https://stackoverflow.com/questions/44877296/scrapy-with-dnscache-enabled-false-not-working):\r\n\r\n> When i run scrapy shell with `DNSCACHE_ENABLED=False` got\r\n```\r\nKeyError: 'dictionary is empty'\r\ntwisted.internet.error.DNSLookupError: DNS lookup failed: no results for hostname lookup: www.mydomain.com.\r\n```\r\n\r\n```\r\n 2017-07-03 03:09:12 [twisted] CRITICAL: while looking up www.mydomain.com with <scrapy.resolver.CachingThreadedResolver object at 0x3fd0050>\r\n Traceback (most recent call last):\r\n File \"/usr/lib64/python2.7/site-packages/twisted/internet/defer.py\", line 653, in _runCallbacks\r\n current.result = callback(current.result, *args, **kw)\r\n File \"/usr/lib64/python2.7/site-packages/scrapy/resolver.py\", line 29, in _cache_result\r\n dnscache[name] = result\r\n File \"/usr/lib64/python2.7/site-packages/scrapy/utils/datatypes.py\", line 305, in __setitem__\r\n self.popitem(last=False)\r\n File \"/usr/lib64/python2.7/collections.py\", line 159, in popitem\r\n raise KeyError('dictionary is empty')\r\n KeyError: 'dictionary is empty'\r\n 2017-07-03 03:09:12 [scrapy.downloadermiddlewares.retry] DEBUG: Gave up retrying <GET //www.mydomain.com/> (failed 3 times): DNS lookup failed: no results for hostname lookup: www.mydomain.com.\r\n Traceback (most recent call last):\r\n File \"/usr/bin/scrapy\", line 11, in <module>\r\n sys.exit(execute())\r\n File \"/usr/lib64/python2.7/site-packages/scrapy/cmdline.py\", line 149, in execute\r\n _run_print_help(parser, _run_command, cmd, args, opts)\r\n File \"/usr/lib64/python2.7/site-packages/scrapy/cmdline.py\", line 89, in _run_print_help\r\n func(*a, **kw)\r\n File \"/usr/lib64/python2.7/site-packages/scrapy/cmdline.py\", line 156, in _run_command\r\n cmd.run(args, opts)\r\n File \"/usr/lib64/python2.7/site-packages/scrapy/commands/shell.py\", line 73, in run\r\n shell.start(url=url, redirect=not opts.no_redirect)\r\n File \"/usr/lib64/python2.7/site-packages/scrapy/shell.py\", line 48, in start\r\n self.fetch(url, spider, redirect=redirect)\r\n File \"/usr/lib64/python2.7/site-packages/scrapy/shell.py\", line 115, in fetch\r\n reactor, self._schedule, request, spider)\r\n File \"/usr/lib64/python2.7/site-packages/twisted/internet/threads.py\", line 122, in blockingCallFromThread\r\n result.raiseException()\r\n File \"<string>\", line 2, in raiseException\r\n twisted.internet.error.DNSLookupError: DNS lookup failed: no results for hostname lookup: www.mydomain.com.\r\n```\r\n\r\n> Any thoughts welcome\n", "before_files": [{"content": "from twisted.internet import defer\nfrom twisted.internet.base import ThreadedResolver\n\nfrom scrapy.utils.datatypes import LocalCache\n\n# TODO: cache misses\n\ndnscache = LocalCache(10000)\n\nclass CachingThreadedResolver(ThreadedResolver):\n def __init__(self, reactor, cache_size, timeout):\n super(CachingThreadedResolver, self).__init__(reactor)\n dnscache.limit = cache_size\n self.timeout = timeout\n\n def getHostByName(self, name, timeout=None):\n if name in dnscache:\n return defer.succeed(dnscache[name])\n # in Twisted<=16.6, getHostByName() is always called with\n # a default timeout of 60s (actually passed as (1, 3, 11, 45) tuple),\n # so the input argument above is simply overridden\n # to enforce Scrapy's DNS_TIMEOUT setting's value\n timeout = (self.timeout,)\n d = super(CachingThreadedResolver, self).getHostByName(name, timeout)\n d.addCallback(self._cache_result, name)\n return d\n\n def _cache_result(self, result, name):\n dnscache[name] = result\n return result\n", "path": "scrapy/resolver.py"}], "after_files": [{"content": "from twisted.internet import defer\nfrom twisted.internet.base import ThreadedResolver\n\nfrom scrapy.utils.datatypes import LocalCache\n\n# TODO: cache misses\n\ndnscache = LocalCache(10000)\n\nclass CachingThreadedResolver(ThreadedResolver):\n def __init__(self, reactor, cache_size, timeout):\n super(CachingThreadedResolver, self).__init__(reactor)\n dnscache.limit = cache_size\n self.timeout = timeout\n\n def getHostByName(self, name, timeout=None):\n if name in dnscache:\n return defer.succeed(dnscache[name])\n # in Twisted<=16.6, getHostByName() is always called with\n # a default timeout of 60s (actually passed as (1, 3, 11, 45) tuple),\n # so the input argument above is simply overridden\n # to enforce Scrapy's DNS_TIMEOUT setting's value\n timeout = (self.timeout,)\n d = super(CachingThreadedResolver, self).getHostByName(name, timeout)\n if dnscache.limit:\n d.addCallback(self._cache_result, name)\n return d\n\n def _cache_result(self, result, name):\n dnscache[name] = result\n return result\n", "path": "scrapy/resolver.py"}]} | 1,334 | 130 |
gh_patches_debug_24499 | rasdani/github-patches | git_diff | pre-commit__pre-commit-797 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
python_venv language fails to use python3 interpreter and is using python2.7 instead
Apparently pre-commit failed to use python3 interpreter when I tried to add a hook and thus failed because venv module was not installed on default python2.7!
```
$ pre-commit try-repo ../python-license-check [19:55:27]
[INFO] Initializing environment for ../python-license-check.
===============================================================================
Using config:
===============================================================================
repos:
- repo: ../python-license-check
rev: 4048cf3844dbbf45690c153a7da7f532585ec87c
hooks:
- id: liccheck
===============================================================================
[INFO] Installing environment for ../python-license-check.
[INFO] Once installed this environment will be reused.
[INFO] This may take a few minutes...
An unexpected error has occurred: CalledProcessError: Command: ('/Users/ssbarnea/.pyenv/versions/2.7.14/bin/python2.7', '-mvenv', '/var/folders/br/99tfdvcs3vvfwdk69z7f0xmc0000gn/T/tmpayl0P5/repoHa7_qe/py_venv-python2.7')
Return code: 1
Expected return code: 0
Output: (none)
Errors:
/Users/ssbarnea/.pyenv/versions/2.7.14/bin/python2.7: No module named venv
Check the log at /Users/ssbarnea/.cache/pre-commit/pre-commit.log
FAIL: 1
ssbarnea@smac: ~/os/jira master ⚡ $ cat ../python-license-check/.pre-commit-hooks.yaml [19:55:34]
- id: liccheck
name: Validates dependency licenses for Python packages
description: This validator validates a pre-commit hooks manifest file
entry: liccheck -s setup.cfg -r requirements.txt
language: python_venv
```
Based on the documentation I was expecting to see pre-commit using the `python3` executable for calling venv module.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pre_commit/languages/python_venv.py`
Content:
```
1 from __future__ import unicode_literals
2
3 import os.path
4
5 from pre_commit.languages import python
6 from pre_commit.util import CalledProcessError
7 from pre_commit.util import cmd_output
8
9
10 ENVIRONMENT_DIR = 'py_venv'
11
12
13 def orig_py_exe(exe): # pragma: no cover (platform specific)
14 """A -mvenv virtualenv made from a -mvirtualenv virtualenv installs
15 packages to the incorrect location. Attempt to find the _original_ exe
16 and invoke `-mvenv` from there.
17
18 See:
19 - https://github.com/pre-commit/pre-commit/issues/755
20 - https://github.com/pypa/virtualenv/issues/1095
21 - https://bugs.python.org/issue30811
22 """
23 try:
24 prefix_script = 'import sys; print(sys.real_prefix)'
25 _, prefix, _ = cmd_output(exe, '-c', prefix_script)
26 prefix = prefix.strip()
27 except CalledProcessError:
28 # not created from -mvirtualenv
29 return exe
30
31 if os.name == 'nt':
32 expected = os.path.join(prefix, 'python.exe')
33 else:
34 expected = os.path.join(prefix, 'bin', os.path.basename(exe))
35
36 if os.path.exists(expected):
37 return expected
38 else:
39 return exe
40
41
42 def make_venv(envdir, python):
43 cmd_output(orig_py_exe(python), '-mvenv', envdir, cwd='/')
44
45
46 get_default_version = python.get_default_version
47 _interface = python.py_interface(ENVIRONMENT_DIR, make_venv)
48 in_env, healthy, run_hook, install_environment = _interface
49
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pre_commit/languages/python_venv.py b/pre_commit/languages/python_venv.py
--- a/pre_commit/languages/python_venv.py
+++ b/pre_commit/languages/python_venv.py
@@ -1,6 +1,7 @@
from __future__ import unicode_literals
import os.path
+import sys
from pre_commit.languages import python
from pre_commit.util import CalledProcessError
@@ -10,6 +11,13 @@
ENVIRONMENT_DIR = 'py_venv'
+def get_default_version(): # pragma: no cover (version specific)
+ if sys.version_info < (3,):
+ return 'python3'
+ else:
+ return python.get_default_version()
+
+
def orig_py_exe(exe): # pragma: no cover (platform specific)
"""A -mvenv virtualenv made from a -mvirtualenv virtualenv installs
packages to the incorrect location. Attempt to find the _original_ exe
@@ -43,6 +51,5 @@
cmd_output(orig_py_exe(python), '-mvenv', envdir, cwd='/')
-get_default_version = python.get_default_version
_interface = python.py_interface(ENVIRONMENT_DIR, make_venv)
in_env, healthy, run_hook, install_environment = _interface
| {"golden_diff": "diff --git a/pre_commit/languages/python_venv.py b/pre_commit/languages/python_venv.py\n--- a/pre_commit/languages/python_venv.py\n+++ b/pre_commit/languages/python_venv.py\n@@ -1,6 +1,7 @@\n from __future__ import unicode_literals\n \n import os.path\n+import sys\n \n from pre_commit.languages import python\n from pre_commit.util import CalledProcessError\n@@ -10,6 +11,13 @@\n ENVIRONMENT_DIR = 'py_venv'\n \n \n+def get_default_version(): # pragma: no cover (version specific)\n+ if sys.version_info < (3,):\n+ return 'python3'\n+ else:\n+ return python.get_default_version()\n+\n+\n def orig_py_exe(exe): # pragma: no cover (platform specific)\n \"\"\"A -mvenv virtualenv made from a -mvirtualenv virtualenv installs\n packages to the incorrect location. Attempt to find the _original_ exe\n@@ -43,6 +51,5 @@\n cmd_output(orig_py_exe(python), '-mvenv', envdir, cwd='/')\n \n \n-get_default_version = python.get_default_version\n _interface = python.py_interface(ENVIRONMENT_DIR, make_venv)\n in_env, healthy, run_hook, install_environment = _interface\n", "issue": "python_venv language fails to use python3 interpreter and is using python2.7 instead\nApparently pre-commit failed to use python3 interpreter when I tried to add a hook and thus failed because venv module was not installed on default python2.7!\r\n\r\n```\r\n$ pre-commit try-repo ../python-license-check [19:55:27]\r\n[INFO] Initializing environment for ../python-license-check.\r\n===============================================================================\r\nUsing config:\r\n===============================================================================\r\nrepos:\r\n- repo: ../python-license-check\r\n rev: 4048cf3844dbbf45690c153a7da7f532585ec87c\r\n hooks:\r\n - id: liccheck\r\n===============================================================================\r\n[INFO] Installing environment for ../python-license-check.\r\n[INFO] Once installed this environment will be reused.\r\n[INFO] This may take a few minutes...\r\nAn unexpected error has occurred: CalledProcessError: Command: ('/Users/ssbarnea/.pyenv/versions/2.7.14/bin/python2.7', '-mvenv', '/var/folders/br/99tfdvcs3vvfwdk69z7f0xmc0000gn/T/tmpayl0P5/repoHa7_qe/py_venv-python2.7')\r\nReturn code: 1\r\nExpected return code: 0\r\nOutput: (none)\r\nErrors:\r\n /Users/ssbarnea/.pyenv/versions/2.7.14/bin/python2.7: No module named venv\r\n\r\n\r\nCheck the log at /Users/ssbarnea/.cache/pre-commit/pre-commit.log\r\nFAIL: 1\r\nssbarnea@smac: ~/os/jira master \u26a1 $ cat ../python-license-check/.pre-commit-hooks.yaml [19:55:34]\r\n- id: liccheck\r\n name: Validates dependency licenses for Python packages\r\n description: This validator validates a pre-commit hooks manifest file\r\n entry: liccheck -s setup.cfg -r requirements.txt\r\n language: python_venv\r\n```\r\n\r\nBased on the documentation I was expecting to see pre-commit using the `python3` executable for calling venv module. \n", "before_files": [{"content": "from __future__ import unicode_literals\n\nimport os.path\n\nfrom pre_commit.languages import python\nfrom pre_commit.util import CalledProcessError\nfrom pre_commit.util import cmd_output\n\n\nENVIRONMENT_DIR = 'py_venv'\n\n\ndef orig_py_exe(exe): # pragma: no cover (platform specific)\n \"\"\"A -mvenv virtualenv made from a -mvirtualenv virtualenv installs\n packages to the incorrect location. Attempt to find the _original_ exe\n and invoke `-mvenv` from there.\n\n See:\n - https://github.com/pre-commit/pre-commit/issues/755\n - https://github.com/pypa/virtualenv/issues/1095\n - https://bugs.python.org/issue30811\n \"\"\"\n try:\n prefix_script = 'import sys; print(sys.real_prefix)'\n _, prefix, _ = cmd_output(exe, '-c', prefix_script)\n prefix = prefix.strip()\n except CalledProcessError:\n # not created from -mvirtualenv\n return exe\n\n if os.name == 'nt':\n expected = os.path.join(prefix, 'python.exe')\n else:\n expected = os.path.join(prefix, 'bin', os.path.basename(exe))\n\n if os.path.exists(expected):\n return expected\n else:\n return exe\n\n\ndef make_venv(envdir, python):\n cmd_output(orig_py_exe(python), '-mvenv', envdir, cwd='/')\n\n\nget_default_version = python.get_default_version\n_interface = python.py_interface(ENVIRONMENT_DIR, make_venv)\nin_env, healthy, run_hook, install_environment = _interface\n", "path": "pre_commit/languages/python_venv.py"}], "after_files": [{"content": "from __future__ import unicode_literals\n\nimport os.path\nimport sys\n\nfrom pre_commit.languages import python\nfrom pre_commit.util import CalledProcessError\nfrom pre_commit.util import cmd_output\n\n\nENVIRONMENT_DIR = 'py_venv'\n\n\ndef get_default_version(): # pragma: no cover (version specific)\n if sys.version_info < (3,):\n return 'python3'\n else:\n return python.get_default_version()\n\n\ndef orig_py_exe(exe): # pragma: no cover (platform specific)\n \"\"\"A -mvenv virtualenv made from a -mvirtualenv virtualenv installs\n packages to the incorrect location. Attempt to find the _original_ exe\n and invoke `-mvenv` from there.\n\n See:\n - https://github.com/pre-commit/pre-commit/issues/755\n - https://github.com/pypa/virtualenv/issues/1095\n - https://bugs.python.org/issue30811\n \"\"\"\n try:\n prefix_script = 'import sys; print(sys.real_prefix)'\n _, prefix, _ = cmd_output(exe, '-c', prefix_script)\n prefix = prefix.strip()\n except CalledProcessError:\n # not created from -mvirtualenv\n return exe\n\n if os.name == 'nt':\n expected = os.path.join(prefix, 'python.exe')\n else:\n expected = os.path.join(prefix, 'bin', os.path.basename(exe))\n\n if os.path.exists(expected):\n return expected\n else:\n return exe\n\n\ndef make_venv(envdir, python):\n cmd_output(orig_py_exe(python), '-mvenv', envdir, cwd='/')\n\n\n_interface = python.py_interface(ENVIRONMENT_DIR, make_venv)\nin_env, healthy, run_hook, install_environment = _interface\n", "path": "pre_commit/languages/python_venv.py"}]} | 1,195 | 288 |
gh_patches_debug_18504 | rasdani/github-patches | git_diff | open-mmlab__mmdetection-2296 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ImportError: cannot import name 'CARAFENAIVE' from 'mmdet.ops.carafe'
The module name 'CARAFENAIVE' in file 'mmdet.ops.carafe.grad_check.py' shoud be 'CARAFENaive'. When I run this command 'python mmdet/ops/carafe/grad_check.py', the following error is reported: ImportError: cannot import name 'CARAFENAIVE' from 'mmdet.ops.carafe'.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mmdet/ops/carafe/grad_check.py`
Content:
```
1 import os.path as osp
2 import sys
3
4 import mmcv
5 import torch
6 from torch.autograd import gradcheck
7
8 sys.path.append(osp.abspath(osp.join(__file__, '../../')))
9 from mmdet.ops.carafe import CARAFENAIVE # noqa: E402, isort:skip
10 from mmdet.ops.carafe import carafe_naive # noqa: E402, isort:skip
11 from mmdet.ops.carafe import carafe, CARAFE # noqa: E402, isort:skip
12
13 feat = torch.randn(2, 64, 3, 3, requires_grad=True, device='cuda:0').double()
14 mask = torch.randn(
15 2, 100, 6, 6, requires_grad=True, device='cuda:0').sigmoid().double()
16
17 print('Gradcheck for carafe...')
18 test = gradcheck(CARAFE(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)
19 print(test)
20
21 print('Gradcheck for carafe naive...')
22 test = gradcheck(CARAFENAIVE(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)
23 print(test)
24
25 feat = torch.randn(
26 2, 1024, 100, 100, requires_grad=True, device='cuda:0').float()
27 mask = torch.randn(
28 2, 25, 200, 200, requires_grad=True, device='cuda:0').sigmoid().float()
29 loop_num = 500
30
31 time_forward = 0
32 time_backward = 0
33 bar = mmcv.ProgressBar(loop_num)
34 timer = mmcv.Timer()
35 for i in range(loop_num):
36 x = carafe(feat.clone(), mask.clone(), 5, 1, 2)
37 torch.cuda.synchronize()
38 time_forward += timer.since_last_check()
39 x.sum().backward(retain_graph=True)
40 torch.cuda.synchronize()
41 time_backward += timer.since_last_check()
42 bar.update()
43 print('\nCARAFE time forward: {} ms/iter | time backward: {} ms/iter'.format(
44 (time_forward + 1e-3) * 1e3 / loop_num,
45 (time_backward + 1e-3) * 1e3 / loop_num))
46
47 time_naive_forward = 0
48 time_naive_backward = 0
49 bar = mmcv.ProgressBar(loop_num)
50 timer = mmcv.Timer()
51 for i in range(loop_num):
52 x = carafe_naive(feat.clone(), mask.clone(), 5, 1, 2)
53 torch.cuda.synchronize()
54 time_naive_forward += timer.since_last_check()
55 x.sum().backward(retain_graph=True)
56 torch.cuda.synchronize()
57 time_naive_backward += timer.since_last_check()
58 bar.update()
59 print('\nCARAFE naive time forward: {} ms/iter | time backward: {} ms/iter'.
60 format((time_naive_forward + 1e-3) * 1e3 / loop_num,
61 (time_naive_backward + 1e-3) * 1e3 / loop_num))
62
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mmdet/ops/carafe/grad_check.py b/mmdet/ops/carafe/grad_check.py
--- a/mmdet/ops/carafe/grad_check.py
+++ b/mmdet/ops/carafe/grad_check.py
@@ -6,9 +6,8 @@
from torch.autograd import gradcheck
sys.path.append(osp.abspath(osp.join(__file__, '../../')))
-from mmdet.ops.carafe import CARAFENAIVE # noqa: E402, isort:skip
-from mmdet.ops.carafe import carafe_naive # noqa: E402, isort:skip
-from mmdet.ops.carafe import carafe, CARAFE # noqa: E402, isort:skip
+from mmdet.ops.carafe import CARAFE, CARAFENaive # noqa: E402, isort:skip
+from mmdet.ops.carafe import carafe, carafe_naive # noqa: E402, isort:skip
feat = torch.randn(2, 64, 3, 3, requires_grad=True, device='cuda:0').double()
mask = torch.randn(
@@ -19,7 +18,7 @@
print(test)
print('Gradcheck for carafe naive...')
-test = gradcheck(CARAFENAIVE(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)
+test = gradcheck(CARAFENaive(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)
print(test)
feat = torch.randn(
| {"golden_diff": "diff --git a/mmdet/ops/carafe/grad_check.py b/mmdet/ops/carafe/grad_check.py\n--- a/mmdet/ops/carafe/grad_check.py\n+++ b/mmdet/ops/carafe/grad_check.py\n@@ -6,9 +6,8 @@\n from torch.autograd import gradcheck\n \n sys.path.append(osp.abspath(osp.join(__file__, '../../')))\n-from mmdet.ops.carafe import CARAFENAIVE # noqa: E402, isort:skip\n-from mmdet.ops.carafe import carafe_naive # noqa: E402, isort:skip\n-from mmdet.ops.carafe import carafe, CARAFE # noqa: E402, isort:skip\n+from mmdet.ops.carafe import CARAFE, CARAFENaive # noqa: E402, isort:skip\n+from mmdet.ops.carafe import carafe, carafe_naive # noqa: E402, isort:skip\n \n feat = torch.randn(2, 64, 3, 3, requires_grad=True, device='cuda:0').double()\n mask = torch.randn(\n@@ -19,7 +18,7 @@\n print(test)\n \n print('Gradcheck for carafe naive...')\n-test = gradcheck(CARAFENAIVE(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)\n+test = gradcheck(CARAFENaive(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)\n print(test)\n \n feat = torch.randn(\n", "issue": "ImportError: cannot import name 'CARAFENAIVE' from 'mmdet.ops.carafe'\nThe module name 'CARAFENAIVE' in file 'mmdet.ops.carafe.grad_check.py' shoud be 'CARAFENaive'. When I run this command 'python mmdet/ops/carafe/grad_check.py', the following error is reported: ImportError: cannot import name 'CARAFENAIVE' from 'mmdet.ops.carafe'.\n", "before_files": [{"content": "import os.path as osp\nimport sys\n\nimport mmcv\nimport torch\nfrom torch.autograd import gradcheck\n\nsys.path.append(osp.abspath(osp.join(__file__, '../../')))\nfrom mmdet.ops.carafe import CARAFENAIVE # noqa: E402, isort:skip\nfrom mmdet.ops.carafe import carafe_naive # noqa: E402, isort:skip\nfrom mmdet.ops.carafe import carafe, CARAFE # noqa: E402, isort:skip\n\nfeat = torch.randn(2, 64, 3, 3, requires_grad=True, device='cuda:0').double()\nmask = torch.randn(\n 2, 100, 6, 6, requires_grad=True, device='cuda:0').sigmoid().double()\n\nprint('Gradcheck for carafe...')\ntest = gradcheck(CARAFE(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)\nprint(test)\n\nprint('Gradcheck for carafe naive...')\ntest = gradcheck(CARAFENAIVE(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)\nprint(test)\n\nfeat = torch.randn(\n 2, 1024, 100, 100, requires_grad=True, device='cuda:0').float()\nmask = torch.randn(\n 2, 25, 200, 200, requires_grad=True, device='cuda:0').sigmoid().float()\nloop_num = 500\n\ntime_forward = 0\ntime_backward = 0\nbar = mmcv.ProgressBar(loop_num)\ntimer = mmcv.Timer()\nfor i in range(loop_num):\n x = carafe(feat.clone(), mask.clone(), 5, 1, 2)\n torch.cuda.synchronize()\n time_forward += timer.since_last_check()\n x.sum().backward(retain_graph=True)\n torch.cuda.synchronize()\n time_backward += timer.since_last_check()\n bar.update()\nprint('\\nCARAFE time forward: {} ms/iter | time backward: {} ms/iter'.format(\n (time_forward + 1e-3) * 1e3 / loop_num,\n (time_backward + 1e-3) * 1e3 / loop_num))\n\ntime_naive_forward = 0\ntime_naive_backward = 0\nbar = mmcv.ProgressBar(loop_num)\ntimer = mmcv.Timer()\nfor i in range(loop_num):\n x = carafe_naive(feat.clone(), mask.clone(), 5, 1, 2)\n torch.cuda.synchronize()\n time_naive_forward += timer.since_last_check()\n x.sum().backward(retain_graph=True)\n torch.cuda.synchronize()\n time_naive_backward += timer.since_last_check()\n bar.update()\nprint('\\nCARAFE naive time forward: {} ms/iter | time backward: {} ms/iter'.\n format((time_naive_forward + 1e-3) * 1e3 / loop_num,\n (time_naive_backward + 1e-3) * 1e3 / loop_num))\n", "path": "mmdet/ops/carafe/grad_check.py"}], "after_files": [{"content": "import os.path as osp\nimport sys\n\nimport mmcv\nimport torch\nfrom torch.autograd import gradcheck\n\nsys.path.append(osp.abspath(osp.join(__file__, '../../')))\nfrom mmdet.ops.carafe import CARAFE, CARAFENaive # noqa: E402, isort:skip\nfrom mmdet.ops.carafe import carafe, carafe_naive # noqa: E402, isort:skip\n\nfeat = torch.randn(2, 64, 3, 3, requires_grad=True, device='cuda:0').double()\nmask = torch.randn(\n 2, 100, 6, 6, requires_grad=True, device='cuda:0').sigmoid().double()\n\nprint('Gradcheck for carafe...')\ntest = gradcheck(CARAFE(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)\nprint(test)\n\nprint('Gradcheck for carafe naive...')\ntest = gradcheck(CARAFENaive(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)\nprint(test)\n\nfeat = torch.randn(\n 2, 1024, 100, 100, requires_grad=True, device='cuda:0').float()\nmask = torch.randn(\n 2, 25, 200, 200, requires_grad=True, device='cuda:0').sigmoid().float()\nloop_num = 500\n\ntime_forward = 0\ntime_backward = 0\nbar = mmcv.ProgressBar(loop_num)\ntimer = mmcv.Timer()\nfor i in range(loop_num):\n x = carafe(feat.clone(), mask.clone(), 5, 1, 2)\n torch.cuda.synchronize()\n time_forward += timer.since_last_check()\n x.sum().backward(retain_graph=True)\n torch.cuda.synchronize()\n time_backward += timer.since_last_check()\n bar.update()\nprint('\\nCARAFE time forward: {} ms/iter | time backward: {} ms/iter'.format(\n (time_forward + 1e-3) * 1e3 / loop_num,\n (time_backward + 1e-3) * 1e3 / loop_num))\n\ntime_naive_forward = 0\ntime_naive_backward = 0\nbar = mmcv.ProgressBar(loop_num)\ntimer = mmcv.Timer()\nfor i in range(loop_num):\n x = carafe_naive(feat.clone(), mask.clone(), 5, 1, 2)\n torch.cuda.synchronize()\n time_naive_forward += timer.since_last_check()\n x.sum().backward(retain_graph=True)\n torch.cuda.synchronize()\n time_naive_backward += timer.since_last_check()\n bar.update()\nprint('\\nCARAFE naive time forward: {} ms/iter | time backward: {} ms/iter'.\n format((time_naive_forward + 1e-3) * 1e3 / loop_num,\n (time_naive_backward + 1e-3) * 1e3 / loop_num))\n", "path": "mmdet/ops/carafe/grad_check.py"}]} | 1,191 | 381 |
gh_patches_debug_19435 | rasdani/github-patches | git_diff | Pylons__pyramid-3457 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
pyramid.exceptions.ConfigurationConflictError: <exception str() failed>
**Describe the bug**
While building an app I caused an error who's traceback ending the the following line:
```
pyramid.exceptions.ConfigurationConflictError: <exception str() failed>
```
What caused the error in the first place was that I copied a python package containing my views to a new package called controllers and forgot to delete the original views package. I assume that the configurator failed while trying to commit the config.scan(). Since I couldn't find any information online about the above traceback message I assume that this is probably just some internal problem with Pyramid.
**To Reproduce**
download from: https://github.com/benkawecki/pypi/tree/error
after setting up run
```
pserve development.ini
```
**Expected behavior**
I expect there to be an error message.
**Screenshots**
Screenshot of the error message:
<img width="489" alt="screen shot 2019-01-15 at 10 02 44 pm" src="https://user-images.githubusercontent.com/39999125/51224413-c57eb800-1913-11e9-9e0f-b25878a479f5.png">
Screenshot of installed packages:
<img width="488" alt="screen shot 2019-01-15 at 10 24 42 pm" src="https://user-images.githubusercontent.com/39999125/51224563-8b61e600-1914-11e9-9b04-42936f94d4bd.png">
**Additional context**
I'm looking to help out in open-source more this year so if this is an easy fix I would love to see if I can do it!
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/pyramid/exceptions.py`
Content:
```
1 from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPForbidden
2
3 NotFound = HTTPNotFound # bw compat
4 Forbidden = HTTPForbidden # bw compat
5
6 CR = '\n'
7
8
9 class BadCSRFOrigin(HTTPBadRequest):
10 """
11 This exception indicates the request has failed cross-site request forgery
12 origin validation.
13 """
14
15 title = "Bad CSRF Origin"
16 explanation = (
17 "Access is denied. This server can not verify that the origin or "
18 "referrer of your request matches the current site. Either your "
19 "browser supplied the wrong Origin or Referrer or it did not supply "
20 "one at all."
21 )
22
23
24 class BadCSRFToken(HTTPBadRequest):
25 """
26 This exception indicates the request has failed cross-site request
27 forgery token validation.
28 """
29
30 title = 'Bad CSRF Token'
31 explanation = (
32 'Access is denied. This server can not verify that your cross-site '
33 'request forgery token belongs to your login session. Either you '
34 'supplied the wrong cross-site request forgery token or your session '
35 'no longer exists. This may be due to session timeout or because '
36 'browser is not supplying the credentials required, as can happen '
37 'when the browser has cookies turned off.'
38 )
39
40
41 class PredicateMismatch(HTTPNotFound):
42 """
43 This exception is raised by multiviews when no view matches
44 all given predicates.
45
46 This exception subclasses the :class:`HTTPNotFound` exception for a
47 specific reason: if it reaches the main exception handler, it should
48 be treated as :class:`HTTPNotFound`` by any exception view
49 registrations. Thus, typically, this exception will not be seen
50 publicly.
51
52 However, this exception will be raised if the predicates of all
53 views configured to handle another exception context cannot be
54 successfully matched. For instance, if a view is configured to
55 handle a context of ``HTTPForbidden`` and the configured with
56 additional predicates, then :class:`PredicateMismatch` will be
57 raised if:
58
59 * An original view callable has raised :class:`HTTPForbidden` (thus
60 invoking an exception view); and
61 * The given request fails to match all predicates for said
62 exception view associated with :class:`HTTPForbidden`.
63
64 The same applies to any type of exception being handled by an
65 exception view.
66 """
67
68
69 class URLDecodeError(UnicodeDecodeError):
70 """
71 This exception is raised when :app:`Pyramid` cannot
72 successfully decode a URL or a URL path segment. This exception
73 behaves just like the Python builtin
74 :exc:`UnicodeDecodeError`. It is a subclass of the builtin
75 :exc:`UnicodeDecodeError` exception only for identity purposes,
76 mostly so an exception view can be registered when a URL cannot be
77 decoded.
78 """
79
80
81 class ConfigurationError(Exception):
82 """ Raised when inappropriate input values are supplied to an API
83 method of a :term:`Configurator`"""
84
85
86 class ConfigurationConflictError(ConfigurationError):
87 """ Raised when a configuration conflict is detected during action
88 processing"""
89
90 def __init__(self, conflicts):
91 self._conflicts = conflicts
92
93 def __str__(self):
94 r = ["Conflicting configuration actions"]
95 items = sorted(self._conflicts.items())
96 for discriminator, infos in items:
97 r.append(" For: %s" % (discriminator,))
98 for info in infos:
99 for line in str(info).rstrip().split(CR):
100 r.append(" " + line)
101
102 return CR.join(r)
103
104
105 class ConfigurationExecutionError(ConfigurationError):
106 """An error occurred during execution of a configuration action
107 """
108
109 def __init__(self, etype, evalue, info):
110 self.etype, self.evalue, self.info = etype, evalue, info
111
112 def __str__(self):
113 return "%s: %s\n in:\n %s" % (self.etype, self.evalue, self.info)
114
115
116 class CyclicDependencyError(Exception):
117 """ The exception raised when the Pyramid topological sorter detects a
118 cyclic dependency."""
119
120 def __init__(self, cycles):
121 self.cycles = cycles
122
123 def __str__(self):
124 L = []
125 cycles = self.cycles
126 for cycle in cycles:
127 dependent = cycle
128 dependees = cycles[cycle]
129 L.append('%r sorts before %r' % (dependent, dependees))
130 msg = 'Implicit ordering cycle:' + '; '.join(L)
131 return msg
132
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/pyramid/exceptions.py b/src/pyramid/exceptions.py
--- a/src/pyramid/exceptions.py
+++ b/src/pyramid/exceptions.py
@@ -3,8 +3,6 @@
NotFound = HTTPNotFound # bw compat
Forbidden = HTTPForbidden # bw compat
-CR = '\n'
-
class BadCSRFOrigin(HTTPBadRequest):
"""
@@ -92,14 +90,13 @@
def __str__(self):
r = ["Conflicting configuration actions"]
- items = sorted(self._conflicts.items())
- for discriminator, infos in items:
+ for discriminator, infos in self._conflicts.items():
r.append(" For: %s" % (discriminator,))
for info in infos:
- for line in str(info).rstrip().split(CR):
+ for line in str(info).rstrip().split('\n'):
r.append(" " + line)
- return CR.join(r)
+ return '\n'.join(r)
class ConfigurationExecutionError(ConfigurationError):
| {"golden_diff": "diff --git a/src/pyramid/exceptions.py b/src/pyramid/exceptions.py\n--- a/src/pyramid/exceptions.py\n+++ b/src/pyramid/exceptions.py\n@@ -3,8 +3,6 @@\n NotFound = HTTPNotFound # bw compat\n Forbidden = HTTPForbidden # bw compat\n \n-CR = '\\n'\n-\n \n class BadCSRFOrigin(HTTPBadRequest):\n \"\"\"\n@@ -92,14 +90,13 @@\n \n def __str__(self):\n r = [\"Conflicting configuration actions\"]\n- items = sorted(self._conflicts.items())\n- for discriminator, infos in items:\n+ for discriminator, infos in self._conflicts.items():\n r.append(\" For: %s\" % (discriminator,))\n for info in infos:\n- for line in str(info).rstrip().split(CR):\n+ for line in str(info).rstrip().split('\\n'):\n r.append(\" \" + line)\n \n- return CR.join(r)\n+ return '\\n'.join(r)\n \n \n class ConfigurationExecutionError(ConfigurationError):\n", "issue": "pyramid.exceptions.ConfigurationConflictError: <exception str() failed>\n**Describe the bug**\r\nWhile building an app I caused an error who's traceback ending the the following line:\r\n```\r\npyramid.exceptions.ConfigurationConflictError: <exception str() failed>\r\n```\r\nWhat caused the error in the first place was that I copied a python package containing my views to a new package called controllers and forgot to delete the original views package. I assume that the configurator failed while trying to commit the config.scan(). Since I couldn't find any information online about the above traceback message I assume that this is probably just some internal problem with Pyramid. \r\n**To Reproduce**\r\ndownload from: https://github.com/benkawecki/pypi/tree/error\r\nafter setting up run \r\n``` \r\npserve development.ini\r\n```\r\n\r\n**Expected behavior**\r\nI expect there to be an error message. \r\n\r\n**Screenshots**\r\nScreenshot of the error message:\r\n<img width=\"489\" alt=\"screen shot 2019-01-15 at 10 02 44 pm\" src=\"https://user-images.githubusercontent.com/39999125/51224413-c57eb800-1913-11e9-9e0f-b25878a479f5.png\">\r\nScreenshot of installed packages:\r\n<img width=\"488\" alt=\"screen shot 2019-01-15 at 10 24 42 pm\" src=\"https://user-images.githubusercontent.com/39999125/51224563-8b61e600-1914-11e9-9b04-42936f94d4bd.png\">\r\n\r\n\r\n\r\n**Additional context**\r\nI'm looking to help out in open-source more this year so if this is an easy fix I would love to see if I can do it!\r\n\n", "before_files": [{"content": "from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPForbidden\n\nNotFound = HTTPNotFound # bw compat\nForbidden = HTTPForbidden # bw compat\n\nCR = '\\n'\n\n\nclass BadCSRFOrigin(HTTPBadRequest):\n \"\"\"\n This exception indicates the request has failed cross-site request forgery\n origin validation.\n \"\"\"\n\n title = \"Bad CSRF Origin\"\n explanation = (\n \"Access is denied. This server can not verify that the origin or \"\n \"referrer of your request matches the current site. Either your \"\n \"browser supplied the wrong Origin or Referrer or it did not supply \"\n \"one at all.\"\n )\n\n\nclass BadCSRFToken(HTTPBadRequest):\n \"\"\"\n This exception indicates the request has failed cross-site request\n forgery token validation.\n \"\"\"\n\n title = 'Bad CSRF Token'\n explanation = (\n 'Access is denied. This server can not verify that your cross-site '\n 'request forgery token belongs to your login session. Either you '\n 'supplied the wrong cross-site request forgery token or your session '\n 'no longer exists. This may be due to session timeout or because '\n 'browser is not supplying the credentials required, as can happen '\n 'when the browser has cookies turned off.'\n )\n\n\nclass PredicateMismatch(HTTPNotFound):\n \"\"\"\n This exception is raised by multiviews when no view matches\n all given predicates.\n\n This exception subclasses the :class:`HTTPNotFound` exception for a\n specific reason: if it reaches the main exception handler, it should\n be treated as :class:`HTTPNotFound`` by any exception view\n registrations. Thus, typically, this exception will not be seen\n publicly.\n\n However, this exception will be raised if the predicates of all\n views configured to handle another exception context cannot be\n successfully matched. For instance, if a view is configured to\n handle a context of ``HTTPForbidden`` and the configured with\n additional predicates, then :class:`PredicateMismatch` will be\n raised if:\n\n * An original view callable has raised :class:`HTTPForbidden` (thus\n invoking an exception view); and\n * The given request fails to match all predicates for said\n exception view associated with :class:`HTTPForbidden`.\n\n The same applies to any type of exception being handled by an\n exception view.\n \"\"\"\n\n\nclass URLDecodeError(UnicodeDecodeError):\n \"\"\"\n This exception is raised when :app:`Pyramid` cannot\n successfully decode a URL or a URL path segment. This exception\n behaves just like the Python builtin\n :exc:`UnicodeDecodeError`. It is a subclass of the builtin\n :exc:`UnicodeDecodeError` exception only for identity purposes,\n mostly so an exception view can be registered when a URL cannot be\n decoded.\n \"\"\"\n\n\nclass ConfigurationError(Exception):\n \"\"\" Raised when inappropriate input values are supplied to an API\n method of a :term:`Configurator`\"\"\"\n\n\nclass ConfigurationConflictError(ConfigurationError):\n \"\"\" Raised when a configuration conflict is detected during action\n processing\"\"\"\n\n def __init__(self, conflicts):\n self._conflicts = conflicts\n\n def __str__(self):\n r = [\"Conflicting configuration actions\"]\n items = sorted(self._conflicts.items())\n for discriminator, infos in items:\n r.append(\" For: %s\" % (discriminator,))\n for info in infos:\n for line in str(info).rstrip().split(CR):\n r.append(\" \" + line)\n\n return CR.join(r)\n\n\nclass ConfigurationExecutionError(ConfigurationError):\n \"\"\"An error occurred during execution of a configuration action\n \"\"\"\n\n def __init__(self, etype, evalue, info):\n self.etype, self.evalue, self.info = etype, evalue, info\n\n def __str__(self):\n return \"%s: %s\\n in:\\n %s\" % (self.etype, self.evalue, self.info)\n\n\nclass CyclicDependencyError(Exception):\n \"\"\" The exception raised when the Pyramid topological sorter detects a\n cyclic dependency.\"\"\"\n\n def __init__(self, cycles):\n self.cycles = cycles\n\n def __str__(self):\n L = []\n cycles = self.cycles\n for cycle in cycles:\n dependent = cycle\n dependees = cycles[cycle]\n L.append('%r sorts before %r' % (dependent, dependees))\n msg = 'Implicit ordering cycle:' + '; '.join(L)\n return msg\n", "path": "src/pyramid/exceptions.py"}], "after_files": [{"content": "from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPForbidden\n\nNotFound = HTTPNotFound # bw compat\nForbidden = HTTPForbidden # bw compat\n\n\nclass BadCSRFOrigin(HTTPBadRequest):\n \"\"\"\n This exception indicates the request has failed cross-site request forgery\n origin validation.\n \"\"\"\n\n title = \"Bad CSRF Origin\"\n explanation = (\n \"Access is denied. This server can not verify that the origin or \"\n \"referrer of your request matches the current site. Either your \"\n \"browser supplied the wrong Origin or Referrer or it did not supply \"\n \"one at all.\"\n )\n\n\nclass BadCSRFToken(HTTPBadRequest):\n \"\"\"\n This exception indicates the request has failed cross-site request\n forgery token validation.\n \"\"\"\n\n title = 'Bad CSRF Token'\n explanation = (\n 'Access is denied. This server can not verify that your cross-site '\n 'request forgery token belongs to your login session. Either you '\n 'supplied the wrong cross-site request forgery token or your session '\n 'no longer exists. This may be due to session timeout or because '\n 'browser is not supplying the credentials required, as can happen '\n 'when the browser has cookies turned off.'\n )\n\n\nclass PredicateMismatch(HTTPNotFound):\n \"\"\"\n This exception is raised by multiviews when no view matches\n all given predicates.\n\n This exception subclasses the :class:`HTTPNotFound` exception for a\n specific reason: if it reaches the main exception handler, it should\n be treated as :class:`HTTPNotFound`` by any exception view\n registrations. Thus, typically, this exception will not be seen\n publicly.\n\n However, this exception will be raised if the predicates of all\n views configured to handle another exception context cannot be\n successfully matched. For instance, if a view is configured to\n handle a context of ``HTTPForbidden`` and the configured with\n additional predicates, then :class:`PredicateMismatch` will be\n raised if:\n\n * An original view callable has raised :class:`HTTPForbidden` (thus\n invoking an exception view); and\n * The given request fails to match all predicates for said\n exception view associated with :class:`HTTPForbidden`.\n\n The same applies to any type of exception being handled by an\n exception view.\n \"\"\"\n\n\nclass URLDecodeError(UnicodeDecodeError):\n \"\"\"\n This exception is raised when :app:`Pyramid` cannot\n successfully decode a URL or a URL path segment. This exception\n behaves just like the Python builtin\n :exc:`UnicodeDecodeError`. It is a subclass of the builtin\n :exc:`UnicodeDecodeError` exception only for identity purposes,\n mostly so an exception view can be registered when a URL cannot be\n decoded.\n \"\"\"\n\n\nclass ConfigurationError(Exception):\n \"\"\" Raised when inappropriate input values are supplied to an API\n method of a :term:`Configurator`\"\"\"\n\n\nclass ConfigurationConflictError(ConfigurationError):\n \"\"\" Raised when a configuration conflict is detected during action\n processing\"\"\"\n\n def __init__(self, conflicts):\n self._conflicts = conflicts\n\n def __str__(self):\n r = [\"Conflicting configuration actions\"]\n for discriminator, infos in self._conflicts.items():\n r.append(\" For: %s\" % (discriminator,))\n for info in infos:\n for line in str(info).rstrip().split('\\n'):\n r.append(\" \" + line)\n\n return '\\n'.join(r)\n\n\nclass ConfigurationExecutionError(ConfigurationError):\n \"\"\"An error occurred during execution of a configuration action\n \"\"\"\n\n def __init__(self, etype, evalue, info):\n self.etype, self.evalue, self.info = etype, evalue, info\n\n def __str__(self):\n return \"%s: %s\\n in:\\n %s\" % (self.etype, self.evalue, self.info)\n\n\nclass CyclicDependencyError(Exception):\n \"\"\" The exception raised when the Pyramid topological sorter detects a\n cyclic dependency.\"\"\"\n\n def __init__(self, cycles):\n self.cycles = cycles\n\n def __str__(self):\n L = []\n cycles = self.cycles\n for cycle in cycles:\n dependent = cycle\n dependees = cycles[cycle]\n L.append('%r sorts before %r' % (dependent, dependees))\n msg = 'Implicit ordering cycle:' + '; '.join(L)\n return msg\n", "path": "src/pyramid/exceptions.py"}]} | 1,953 | 229 |
gh_patches_debug_9269 | rasdani/github-patches | git_diff | autogluon__autogluon-2915 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Update scikit-learn-intelex version
- [ ] Check if scikit-learn-intelex can be upgraded.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tabular/setup.py`
Content:
```
1 #!/usr/bin/env python
2 ###########################
3 # This code block is a HACK (!), but is necessary to avoid code duplication. Do NOT alter these lines.
4 import os
5 from setuptools import setup
6 import importlib.util
7 filepath = os.path.abspath(os.path.dirname(__file__))
8 filepath_import = os.path.join(filepath, '..', 'core', 'src', 'autogluon', 'core', '_setup_utils.py')
9 spec = importlib.util.spec_from_file_location("ag_min_dependencies", filepath_import)
10 ag = importlib.util.module_from_spec(spec)
11 # Identical to `from autogluon.core import _setup_utils as ag`, but works without `autogluon.core` being installed.
12 spec.loader.exec_module(ag)
13 ###########################
14
15 import sys
16
17 version = ag.load_version_file()
18 version = ag.update_version(version)
19
20 submodule = 'tabular'
21 install_requires = [
22 # version ranges added in ag.get_dependency_version_ranges()
23 'numpy', # version range defined in `core/_setup_utils.py`
24 'scipy', # version range defined in `core/_setup_utils.py`
25 'pandas', # version range defined in `core/_setup_utils.py`
26 'scikit-learn', # version range defined in `core/_setup_utils.py`
27 'networkx', # version range defined in `core/_setup_utils.py`
28 f'{ag.PACKAGE_NAME}.core=={version}',
29 f'{ag.PACKAGE_NAME}.features=={version}',
30 ]
31
32 extras_require = {
33 'lightgbm': [
34 'lightgbm>=3.3,<3.4',
35 ],
36 'catboost': [
37 'catboost>=1.0,<1.2',
38 ],
39 # FIXME: Debug why xgboost 1.6 has 4x+ slower inference on multiclass datasets compared to 1.4
40 # It is possibly only present on MacOS, haven't tested linux.
41 # XGBoost made API breaking changes in 1.6 with custom metric and callback support, so we don't support older versions.
42 'xgboost': [
43 'xgboost>=1.6,<1.8',
44 ],
45 'fastai': [
46 'torch>=1.9,<1.14',
47 'fastai>=2.3.1,<2.8',
48 ],
49 'ray': [
50 f'{ag.PACKAGE_NAME}.core[all]=={version}',
51 ],
52 'skex': [
53 'scikit-learn-intelex>=2021.6,<2021.8',
54 ],
55 'imodels': [
56 'imodels>=1.3.10,<1.4.0', # 1.3.8/1.3.9 either remove/renamed attribute `complexity_` causing failures. https://github.com/csinva/imodels/issues/147
57 ],
58 'vowpalwabbit': [
59 # FIXME: 9.5+ causes VW to save an empty model which always predicts 0. Confirmed on MacOS (Intel CPU). Unknown how to fix.
60 'vowpalwabbit>=9,<9.5',
61 ],
62 'skl2onnx': [
63 'skl2onnx>=1.13.0,<1.14.0',
64 # For macOS, there isn't a onnxruntime-gpu package installed with skl2onnx.
65 # Therefore, we install onnxruntime explicitly here just for macOS.
66 'onnxruntime>=1.13.0,<1.14.0'
67 ] if sys.platform == 'darwin' else [
68 'skl2onnx>=1.13.0,<1.14.0'
69 ]
70 }
71
72 all_requires = []
73 # TODO: Consider adding 'skex' to 'all'
74 for extra_package in ['lightgbm', 'catboost', 'xgboost', 'fastai', 'ray']:
75 all_requires += extras_require[extra_package]
76 all_requires = list(set(all_requires))
77 extras_require['all'] = all_requires
78
79
80 test_requires = []
81 for test_package in ['imodels', 'vowpalwabbit', 'skl2onnx']:
82 test_requires += extras_require[test_package]
83 extras_require['tests'] = test_requires
84 install_requires = ag.get_dependency_version_ranges(install_requires)
85
86 if __name__ == '__main__':
87 ag.create_version_file(version=version, submodule=submodule)
88 setup_args = ag.default_setup_args(version=version, submodule=submodule)
89 setup(
90 install_requires=install_requires,
91 extras_require=extras_require,
92 **setup_args,
93 )
94
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/tabular/setup.py b/tabular/setup.py
--- a/tabular/setup.py
+++ b/tabular/setup.py
@@ -50,7 +50,8 @@
f'{ag.PACKAGE_NAME}.core[all]=={version}',
],
'skex': [
- 'scikit-learn-intelex>=2021.6,<2021.8',
+ # Note: 2021.7 released on Sep 2022, version 2022.x doesn't exist (went directly from 2021.7 to 2023.0)
+ 'scikit-learn-intelex>=2021.7,<2023.1',
],
'imodels': [
'imodels>=1.3.10,<1.4.0', # 1.3.8/1.3.9 either remove/renamed attribute `complexity_` causing failures. https://github.com/csinva/imodels/issues/147
| {"golden_diff": "diff --git a/tabular/setup.py b/tabular/setup.py\n--- a/tabular/setup.py\n+++ b/tabular/setup.py\n@@ -50,7 +50,8 @@\n f'{ag.PACKAGE_NAME}.core[all]=={version}',\n ],\n 'skex': [\n- 'scikit-learn-intelex>=2021.6,<2021.8',\n+ # Note: 2021.7 released on Sep 2022, version 2022.x doesn't exist (went directly from 2021.7 to 2023.0)\n+ 'scikit-learn-intelex>=2021.7,<2023.1',\n ],\n 'imodels': [\n 'imodels>=1.3.10,<1.4.0', # 1.3.8/1.3.9 either remove/renamed attribute `complexity_` causing failures. https://github.com/csinva/imodels/issues/147\n", "issue": "Update scikit-learn-intelex version\n- [ ] Check if scikit-learn-intelex can be upgraded.\n", "before_files": [{"content": "#!/usr/bin/env python\n###########################\n# This code block is a HACK (!), but is necessary to avoid code duplication. Do NOT alter these lines.\nimport os\nfrom setuptools import setup\nimport importlib.util\nfilepath = os.path.abspath(os.path.dirname(__file__))\nfilepath_import = os.path.join(filepath, '..', 'core', 'src', 'autogluon', 'core', '_setup_utils.py')\nspec = importlib.util.spec_from_file_location(\"ag_min_dependencies\", filepath_import)\nag = importlib.util.module_from_spec(spec)\n# Identical to `from autogluon.core import _setup_utils as ag`, but works without `autogluon.core` being installed.\nspec.loader.exec_module(ag)\n###########################\n\nimport sys\n\nversion = ag.load_version_file()\nversion = ag.update_version(version)\n\nsubmodule = 'tabular'\ninstall_requires = [\n # version ranges added in ag.get_dependency_version_ranges()\n 'numpy', # version range defined in `core/_setup_utils.py`\n 'scipy', # version range defined in `core/_setup_utils.py`\n 'pandas', # version range defined in `core/_setup_utils.py`\n 'scikit-learn', # version range defined in `core/_setup_utils.py`\n 'networkx', # version range defined in `core/_setup_utils.py`\n f'{ag.PACKAGE_NAME}.core=={version}',\n f'{ag.PACKAGE_NAME}.features=={version}',\n]\n\nextras_require = {\n 'lightgbm': [\n 'lightgbm>=3.3,<3.4',\n ],\n 'catboost': [\n 'catboost>=1.0,<1.2',\n ],\n # FIXME: Debug why xgboost 1.6 has 4x+ slower inference on multiclass datasets compared to 1.4\n # It is possibly only present on MacOS, haven't tested linux.\n # XGBoost made API breaking changes in 1.6 with custom metric and callback support, so we don't support older versions.\n 'xgboost': [\n 'xgboost>=1.6,<1.8',\n ],\n 'fastai': [\n 'torch>=1.9,<1.14',\n 'fastai>=2.3.1,<2.8',\n ],\n 'ray': [\n f'{ag.PACKAGE_NAME}.core[all]=={version}',\n ],\n 'skex': [\n 'scikit-learn-intelex>=2021.6,<2021.8',\n ],\n 'imodels': [\n 'imodels>=1.3.10,<1.4.0', # 1.3.8/1.3.9 either remove/renamed attribute `complexity_` causing failures. https://github.com/csinva/imodels/issues/147\n ],\n 'vowpalwabbit': [\n # FIXME: 9.5+ causes VW to save an empty model which always predicts 0. Confirmed on MacOS (Intel CPU). Unknown how to fix.\n 'vowpalwabbit>=9,<9.5',\n ],\n 'skl2onnx': [\n 'skl2onnx>=1.13.0,<1.14.0',\n # For macOS, there isn't a onnxruntime-gpu package installed with skl2onnx.\n # Therefore, we install onnxruntime explicitly here just for macOS.\n 'onnxruntime>=1.13.0,<1.14.0'\n ] if sys.platform == 'darwin' else [\n 'skl2onnx>=1.13.0,<1.14.0'\n ]\n}\n\nall_requires = []\n# TODO: Consider adding 'skex' to 'all'\nfor extra_package in ['lightgbm', 'catboost', 'xgboost', 'fastai', 'ray']:\n all_requires += extras_require[extra_package]\nall_requires = list(set(all_requires))\nextras_require['all'] = all_requires\n\n\ntest_requires = []\nfor test_package in ['imodels', 'vowpalwabbit', 'skl2onnx']:\n test_requires += extras_require[test_package]\nextras_require['tests'] = test_requires\ninstall_requires = ag.get_dependency_version_ranges(install_requires)\n\nif __name__ == '__main__':\n ag.create_version_file(version=version, submodule=submodule)\n setup_args = ag.default_setup_args(version=version, submodule=submodule)\n setup(\n install_requires=install_requires,\n extras_require=extras_require,\n **setup_args,\n )\n", "path": "tabular/setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n###########################\n# This code block is a HACK (!), but is necessary to avoid code duplication. Do NOT alter these lines.\nimport os\nfrom setuptools import setup\nimport importlib.util\nfilepath = os.path.abspath(os.path.dirname(__file__))\nfilepath_import = os.path.join(filepath, '..', 'core', 'src', 'autogluon', 'core', '_setup_utils.py')\nspec = importlib.util.spec_from_file_location(\"ag_min_dependencies\", filepath_import)\nag = importlib.util.module_from_spec(spec)\n# Identical to `from autogluon.core import _setup_utils as ag`, but works without `autogluon.core` being installed.\nspec.loader.exec_module(ag)\n###########################\n\nimport sys\n\nversion = ag.load_version_file()\nversion = ag.update_version(version)\n\nsubmodule = 'tabular'\ninstall_requires = [\n # version ranges added in ag.get_dependency_version_ranges()\n 'numpy', # version range defined in `core/_setup_utils.py`\n 'scipy', # version range defined in `core/_setup_utils.py`\n 'pandas', # version range defined in `core/_setup_utils.py`\n 'scikit-learn', # version range defined in `core/_setup_utils.py`\n 'networkx', # version range defined in `core/_setup_utils.py`\n f'{ag.PACKAGE_NAME}.core=={version}',\n f'{ag.PACKAGE_NAME}.features=={version}',\n]\n\nextras_require = {\n 'lightgbm': [\n 'lightgbm>=3.3,<3.4',\n ],\n 'catboost': [\n 'catboost>=1.0,<1.2',\n ],\n # FIXME: Debug why xgboost 1.6 has 4x+ slower inference on multiclass datasets compared to 1.4\n # It is possibly only present on MacOS, haven't tested linux.\n # XGBoost made API breaking changes in 1.6 with custom metric and callback support, so we don't support older versions.\n 'xgboost': [\n 'xgboost>=1.6,<1.8',\n ],\n 'fastai': [\n 'torch>=1.9,<1.14',\n 'fastai>=2.3.1,<2.8',\n ],\n 'ray': [\n f'{ag.PACKAGE_NAME}.core[all]=={version}',\n ],\n 'skex': [\n # Note: 2021.7 released on Sep 2022, version 2022.x doesn't exist (went directly from 2021.7 to 2023.0)\n 'scikit-learn-intelex>=2021.7,<2023.1',\n ],\n 'imodels': [\n 'imodels>=1.3.10,<1.4.0', # 1.3.8/1.3.9 either remove/renamed attribute `complexity_` causing failures. https://github.com/csinva/imodels/issues/147\n ],\n 'vowpalwabbit': [\n # FIXME: 9.5+ causes VW to save an empty model which always predicts 0. Confirmed on MacOS (Intel CPU). Unknown how to fix.\n 'vowpalwabbit>=9,<9.5',\n ],\n 'skl2onnx': [\n 'skl2onnx>=1.13.0,<1.14.0',\n # For macOS, there isn't a onnxruntime-gpu package installed with skl2onnx.\n # Therefore, we install onnxruntime explicitly here just for macOS.\n 'onnxruntime>=1.13.0,<1.14.0'\n ] if sys.platform == 'darwin' else [\n 'skl2onnx>=1.13.0,<1.14.0'\n ]\n}\n\nall_requires = []\n# TODO: Consider adding 'skex' to 'all'\nfor extra_package in ['lightgbm', 'catboost', 'xgboost', 'fastai', 'ray']:\n all_requires += extras_require[extra_package]\nall_requires = list(set(all_requires))\nextras_require['all'] = all_requires\n\n\ntest_requires = []\nfor test_package in ['imodels', 'vowpalwabbit', 'skl2onnx']:\n test_requires += extras_require[test_package]\nextras_require['tests'] = test_requires\ninstall_requires = ag.get_dependency_version_ranges(install_requires)\n\nif __name__ == '__main__':\n ag.create_version_file(version=version, submodule=submodule)\n setup_args = ag.default_setup_args(version=version, submodule=submodule)\n setup(\n install_requires=install_requires,\n extras_require=extras_require,\n **setup_args,\n )\n", "path": "tabular/setup.py"}]} | 1,462 | 237 |
gh_patches_debug_2274 | rasdani/github-patches | git_diff | svthalia__concrexit-1844 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Event (registration) status message in the API
### Is your feature request related to a problem? Please describe.
Currently, the event status messages (like 'you cannot cancel your registration without having to pay a fine') are hardcoded and whenever we update them, we must also update the app
### Describe the solution you'd like
Put the message in the API
### Additional context
Also checkout #1381
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `website/events/api/v2/serializers/event.py`
Content:
```
1 from rest_framework import serializers
2
3 from activemembers.api.v2.serializers.member_group import MemberGroupSerializer
4 from announcements.api.v2.serializers import SlideSerializer
5 from documents.api.v2.serializers.document import DocumentSerializer
6 from events import services
7 from events.api.v2.serializers.event_registration import EventRegistrationSerializer
8 from events.models import Event, EventRegistration
9 from thaliawebsite.api.v2.serializers import CleanedHTMLSerializer
10 from utils.snippets import create_google_maps_url
11
12
13 class EventSerializer(serializers.ModelSerializer):
14 """Serializer for events."""
15
16 class Meta:
17 model = Event
18 fields = (
19 "pk",
20 "title",
21 "description",
22 "start",
23 "end",
24 "category",
25 "registration_start",
26 "registration_end",
27 "cancel_deadline",
28 "optional_registrations",
29 "location",
30 "price",
31 "fine",
32 "num_participants",
33 "max_participants",
34 "no_registration_message",
35 "has_fields",
36 "food_event",
37 "maps_url",
38 "user_permissions",
39 "user_registration",
40 "organiser",
41 "slide",
42 "documents",
43 )
44
45 description = CleanedHTMLSerializer()
46 organiser = MemberGroupSerializer()
47 user_registration = serializers.SerializerMethodField("_user_registration")
48 num_participants = serializers.SerializerMethodField("_num_participants")
49 maps_url = serializers.SerializerMethodField("_maps_url")
50 price = serializers.DecimalField(max_digits=5, decimal_places=2)
51 fine = serializers.DecimalField(max_digits=5, decimal_places=2)
52 slide = SlideSerializer()
53 documents = DocumentSerializer(many=True)
54 user_permissions = serializers.SerializerMethodField("_user_permissions")
55
56 def _user_registration(self, instance):
57 try:
58 if self.context["request"].member:
59 reg = instance.eventregistration_set.get(
60 member=self.context["request"].member, date_cancelled=None
61 )
62 return EventRegistrationSerializer(
63 reg,
64 context=self.context,
65 fields=("pk", "present", "queue_position", "date", "payment"),
66 ).data
67 except EventRegistration.DoesNotExist:
68 pass
69 return None
70
71 def _num_participants(self, instance):
72 if (
73 instance.max_participants
74 and instance.participants.count() > instance.max_participants
75 ):
76 return instance.max_participants
77 return instance.participants.count()
78
79 def _user_permissions(self, instance):
80 member = self.context["request"].member
81 return services.event_permissions(member, instance)
82
83 def _maps_url(self, instance):
84 return create_google_maps_url(instance.map_location, zoom=13, size="450x250")
85
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/website/events/api/v2/serializers/event.py b/website/events/api/v2/serializers/event.py
--- a/website/events/api/v2/serializers/event.py
+++ b/website/events/api/v2/serializers/event.py
@@ -32,6 +32,7 @@
"num_participants",
"max_participants",
"no_registration_message",
+ "cancel_too_late_message",
"has_fields",
"food_event",
"maps_url",
| {"golden_diff": "diff --git a/website/events/api/v2/serializers/event.py b/website/events/api/v2/serializers/event.py\n--- a/website/events/api/v2/serializers/event.py\n+++ b/website/events/api/v2/serializers/event.py\n@@ -32,6 +32,7 @@\n \"num_participants\",\n \"max_participants\",\n \"no_registration_message\",\n+ \"cancel_too_late_message\",\n \"has_fields\",\n \"food_event\",\n \"maps_url\",\n", "issue": "Event (registration) status message in the API\n### Is your feature request related to a problem? Please describe.\r\nCurrently, the event status messages (like 'you cannot cancel your registration without having to pay a fine') are hardcoded and whenever we update them, we must also update the app\r\n\r\n### Describe the solution you'd like\r\nPut the message in the API\r\n\r\n### Additional context\r\nAlso checkout #1381 \n", "before_files": [{"content": "from rest_framework import serializers\n\nfrom activemembers.api.v2.serializers.member_group import MemberGroupSerializer\nfrom announcements.api.v2.serializers import SlideSerializer\nfrom documents.api.v2.serializers.document import DocumentSerializer\nfrom events import services\nfrom events.api.v2.serializers.event_registration import EventRegistrationSerializer\nfrom events.models import Event, EventRegistration\nfrom thaliawebsite.api.v2.serializers import CleanedHTMLSerializer\nfrom utils.snippets import create_google_maps_url\n\n\nclass EventSerializer(serializers.ModelSerializer):\n \"\"\"Serializer for events.\"\"\"\n\n class Meta:\n model = Event\n fields = (\n \"pk\",\n \"title\",\n \"description\",\n \"start\",\n \"end\",\n \"category\",\n \"registration_start\",\n \"registration_end\",\n \"cancel_deadline\",\n \"optional_registrations\",\n \"location\",\n \"price\",\n \"fine\",\n \"num_participants\",\n \"max_participants\",\n \"no_registration_message\",\n \"has_fields\",\n \"food_event\",\n \"maps_url\",\n \"user_permissions\",\n \"user_registration\",\n \"organiser\",\n \"slide\",\n \"documents\",\n )\n\n description = CleanedHTMLSerializer()\n organiser = MemberGroupSerializer()\n user_registration = serializers.SerializerMethodField(\"_user_registration\")\n num_participants = serializers.SerializerMethodField(\"_num_participants\")\n maps_url = serializers.SerializerMethodField(\"_maps_url\")\n price = serializers.DecimalField(max_digits=5, decimal_places=2)\n fine = serializers.DecimalField(max_digits=5, decimal_places=2)\n slide = SlideSerializer()\n documents = DocumentSerializer(many=True)\n user_permissions = serializers.SerializerMethodField(\"_user_permissions\")\n\n def _user_registration(self, instance):\n try:\n if self.context[\"request\"].member:\n reg = instance.eventregistration_set.get(\n member=self.context[\"request\"].member, date_cancelled=None\n )\n return EventRegistrationSerializer(\n reg,\n context=self.context,\n fields=(\"pk\", \"present\", \"queue_position\", \"date\", \"payment\"),\n ).data\n except EventRegistration.DoesNotExist:\n pass\n return None\n\n def _num_participants(self, instance):\n if (\n instance.max_participants\n and instance.participants.count() > instance.max_participants\n ):\n return instance.max_participants\n return instance.participants.count()\n\n def _user_permissions(self, instance):\n member = self.context[\"request\"].member\n return services.event_permissions(member, instance)\n\n def _maps_url(self, instance):\n return create_google_maps_url(instance.map_location, zoom=13, size=\"450x250\")\n", "path": "website/events/api/v2/serializers/event.py"}], "after_files": [{"content": "from rest_framework import serializers\n\nfrom activemembers.api.v2.serializers.member_group import MemberGroupSerializer\nfrom announcements.api.v2.serializers import SlideSerializer\nfrom documents.api.v2.serializers.document import DocumentSerializer\nfrom events import services\nfrom events.api.v2.serializers.event_registration import EventRegistrationSerializer\nfrom events.models import Event, EventRegistration\nfrom thaliawebsite.api.v2.serializers import CleanedHTMLSerializer\nfrom utils.snippets import create_google_maps_url\n\n\nclass EventSerializer(serializers.ModelSerializer):\n \"\"\"Serializer for events.\"\"\"\n\n class Meta:\n model = Event\n fields = (\n \"pk\",\n \"title\",\n \"description\",\n \"start\",\n \"end\",\n \"category\",\n \"registration_start\",\n \"registration_end\",\n \"cancel_deadline\",\n \"optional_registrations\",\n \"location\",\n \"price\",\n \"fine\",\n \"num_participants\",\n \"max_participants\",\n \"no_registration_message\",\n \"cancel_too_late_message\",\n \"has_fields\",\n \"food_event\",\n \"maps_url\",\n \"user_permissions\",\n \"user_registration\",\n \"organiser\",\n \"slide\",\n \"documents\",\n )\n\n description = CleanedHTMLSerializer()\n organiser = MemberGroupSerializer()\n user_registration = serializers.SerializerMethodField(\"_user_registration\")\n num_participants = serializers.SerializerMethodField(\"_num_participants\")\n maps_url = serializers.SerializerMethodField(\"_maps_url\")\n price = serializers.DecimalField(max_digits=5, decimal_places=2)\n fine = serializers.DecimalField(max_digits=5, decimal_places=2)\n slide = SlideSerializer()\n documents = DocumentSerializer(many=True)\n user_permissions = serializers.SerializerMethodField(\"_user_permissions\")\n\n def _user_registration(self, instance):\n try:\n if self.context[\"request\"].member:\n reg = instance.eventregistration_set.get(\n member=self.context[\"request\"].member, date_cancelled=None\n )\n return EventRegistrationSerializer(\n reg,\n context=self.context,\n fields=(\"pk\", \"present\", \"queue_position\", \"date\", \"payment\"),\n ).data\n except EventRegistration.DoesNotExist:\n pass\n return None\n\n def _num_participants(self, instance):\n if (\n instance.max_participants\n and instance.participants.count() > instance.max_participants\n ):\n return instance.max_participants\n return instance.participants.count()\n\n def _user_permissions(self, instance):\n member = self.context[\"request\"].member\n return services.event_permissions(member, instance)\n\n def _maps_url(self, instance):\n return create_google_maps_url(instance.map_location, zoom=13, size=\"450x250\")\n", "path": "website/events/api/v2/serializers/event.py"}]} | 1,077 | 111 |
gh_patches_debug_28 | rasdani/github-patches | git_diff | open-telemetry__opentelemetry-python-1889 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Run tests on Windows in CI
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs/getting_started/flask_example.py`
Content:
```
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 # flask_example.py
16 import flask
17 import requests
18
19 from opentelemetry import trace
20 from opentelemetry.instrumentation.flask import FlaskInstrumentor
21 from opentelemetry.instrumentation.requests import RequestsInstrumentor
22 from opentelemetry.sdk.trace import TracerProvider
23 from opentelemetry.sdk.trace.export import (
24 BatchSpanProcessor,
25 ConsoleSpanExporter,
26 )
27
28 trace.set_tracer_provider(TracerProvider())
29 trace.get_tracer_provider().add_span_processor(
30 BatchSpanProcessor(ConsoleSpanExporter())
31 )
32
33 app = flask.Flask(__name__)
34 FlaskInstrumentor().instrument_app(app)
35 RequestsInstrumentor().instrument()
36
37 tracer = trace.get_tracer(__name__)
38
39
40 @app.route("/")
41 def hello():
42 with tracer.start_as_current_span("example-request"):
43 requests.get("http://www.example.com")
44 return "hello"
45
46
47 app.run(debug=True, port=5000)
48
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/docs/getting_started/flask_example.py b/docs/getting_started/flask_example.py
--- a/docs/getting_started/flask_example.py
+++ b/docs/getting_started/flask_example.py
@@ -44,4 +44,4 @@
return "hello"
-app.run(debug=True, port=5000)
+app.run(port=5000)
| {"golden_diff": "diff --git a/docs/getting_started/flask_example.py b/docs/getting_started/flask_example.py\n--- a/docs/getting_started/flask_example.py\n+++ b/docs/getting_started/flask_example.py\n@@ -44,4 +44,4 @@\n return \"hello\"\n \n \n-app.run(debug=True, port=5000)\n+app.run(port=5000)\n", "issue": "Run tests on Windows in CI\n\n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# flask_example.py\nimport flask\nimport requests\n\nfrom opentelemetry import trace\nfrom opentelemetry.instrumentation.flask import FlaskInstrumentor\nfrom opentelemetry.instrumentation.requests import RequestsInstrumentor\nfrom opentelemetry.sdk.trace import TracerProvider\nfrom opentelemetry.sdk.trace.export import (\n BatchSpanProcessor,\n ConsoleSpanExporter,\n)\n\ntrace.set_tracer_provider(TracerProvider())\ntrace.get_tracer_provider().add_span_processor(\n BatchSpanProcessor(ConsoleSpanExporter())\n)\n\napp = flask.Flask(__name__)\nFlaskInstrumentor().instrument_app(app)\nRequestsInstrumentor().instrument()\n\ntracer = trace.get_tracer(__name__)\n\n\[email protected](\"/\")\ndef hello():\n with tracer.start_as_current_span(\"example-request\"):\n requests.get(\"http://www.example.com\")\n return \"hello\"\n\n\napp.run(debug=True, port=5000)\n", "path": "docs/getting_started/flask_example.py"}], "after_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# flask_example.py\nimport flask\nimport requests\n\nfrom opentelemetry import trace\nfrom opentelemetry.instrumentation.flask import FlaskInstrumentor\nfrom opentelemetry.instrumentation.requests import RequestsInstrumentor\nfrom opentelemetry.sdk.trace import TracerProvider\nfrom opentelemetry.sdk.trace.export import (\n BatchSpanProcessor,\n ConsoleSpanExporter,\n)\n\ntrace.set_tracer_provider(TracerProvider())\ntrace.get_tracer_provider().add_span_processor(\n BatchSpanProcessor(ConsoleSpanExporter())\n)\n\napp = flask.Flask(__name__)\nFlaskInstrumentor().instrument_app(app)\nRequestsInstrumentor().instrument()\n\ntracer = trace.get_tracer(__name__)\n\n\[email protected](\"/\")\ndef hello():\n with tracer.start_as_current_span(\"example-request\"):\n requests.get(\"http://www.example.com\")\n return \"hello\"\n\n\napp.run(port=5000)\n", "path": "docs/getting_started/flask_example.py"}]} | 672 | 84 |
gh_patches_debug_11592 | rasdani/github-patches | git_diff | opsdroid__opsdroid-184 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
API.AI Error when network is disconnected
The following error should be caught and warned about cleanly.
```
ERROR asyncio: Task exception was never retrieved
future: <Task finished coro=<parse_apiai() done, defined at /Users/jacob/Projects/opsdroid/opsdroid/opsdroid/parsers/apiai.py:34> exception=ClientConnectorError(8, 'Cannot connect to host api.api.ai:443 ssl:True [nodename nor servname provided, or not known]')>
Traceback (most recent call last):
File "/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/connector.py", line 375, in connect
proto = yield from self._create_connection(req)
File "/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/connector.py", line 632, in _create_connection
_, proto = yield from self._create_direct_connection(req)
File "/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/connector.py", line 643, in _create_direct_connection
hosts = yield from self._resolve_host(req.url.raw_host, req.port)
File "/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/connector.py", line 615, in _resolve_host
self._resolver.resolve(host, port, family=self._family)
File "/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/resolver.py", line 30, in resolve
host, port, type=socket.SOCK_STREAM, family=family)
File "/opt/boxen/homebrew/Cellar/python3/3.6.0/Frameworks/Python.framework/Versions/3.6/lib/python3.6/concurrent/futures/thread.py", line 55, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/boxen/homebrew/Cellar/python3/3.6.0/Frameworks/Python.framework/Versions/3.6/lib/python3.6/socket.py", line 743, in getaddrinfo
for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
socket.gaierror: [Errno 8] nodename nor servname provided, or not known
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/Users/jacob/Projects/opsdroid/opsdroid/opsdroid/parsers/apiai.py", line 42, in parse_apiai
result = await call_apiai(message, config)
File "/Users/jacob/Projects/opsdroid/opsdroid/opsdroid/parsers/apiai.py", line 27, in call_apiai
headers=headers)
File "/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/client.py", line 621, in __await__
resp = yield from self._coro
File "/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/client.py", line 225, in _request
conn = yield from self._connector.connect(req)
File "/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/connector.py", line 380, in connect
.format(key, exc.strerror)) from exc
aiohttp.client_exceptions.ClientConnectorError: [Errno 8] Cannot connect to host api.api.ai:443 ssl:True [nodename nor servname provided, or not known]
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `opsdroid/parsers/apiai.py`
Content:
```
1 """A helper function for parsing and executing api.ai skills."""
2
3 import logging
4 import json
5
6 import aiohttp
7
8
9 _LOGGER = logging.getLogger(__name__)
10
11
12 async def call_apiai(message, config):
13 """Call the api.ai api and return the response."""
14 async with aiohttp.ClientSession() as session:
15 payload = {
16 "v": "20150910",
17 "lang": "en",
18 "sessionId": message.connector.name,
19 "query": message.text
20 }
21 headers = {
22 "Authorization": "Bearer " + config['access-token'],
23 "Content-Type": "application/json"
24 }
25 resp = await session.post("https://api.api.ai/v1/query",
26 data=json.dumps(payload),
27 headers=headers)
28 result = await resp.json()
29 _LOGGER.debug("api.ai response - " + json.dumps(result))
30
31 return result
32
33
34 async def parse_apiai(opsdroid, message, config):
35 """Parse a message against all apiai skills."""
36 # pylint: disable=broad-except
37 # We want to catch all exceptions coming from a skill module and not
38 # halt the application. If a skill throws an exception it just doesn't
39 # give a response to the user, so an error response should be given.
40 if 'access-token' in config:
41
42 result = await call_apiai(message, config)
43
44 if result["status"]["code"] >= 300:
45 _LOGGER.error("api.ai error - " +
46 str(result["status"]["code"]) + " " +
47 result["status"]["errorType"])
48 return
49
50 if "min-score" in config and \
51 result["result"]["score"] < config["min-score"]:
52 _LOGGER.debug("api.ai score lower than min-score")
53 return
54
55 if result:
56 for skill in opsdroid.skills:
57
58 if "apiai_action" in skill or "apiai_intent" in skill:
59 if ("action" in result["result"] and
60 skill["apiai_action"] in
61 result["result"]["action"]) \
62 or ("intentName" in result["result"] and
63 skill["apiai_intent"] in
64 result["result"]["intentName"]):
65 message.apiai = result
66 try:
67 await skill["skill"](opsdroid, skill["config"],
68 message)
69 except Exception:
70 await message.respond(
71 "Whoops there has been an error")
72 await message.respond(
73 "Check the log for details")
74 _LOGGER.exception("Exception when parsing '" +
75 message.text +
76 "' against skill '" +
77 result["result"]["action"] + "'")
78
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/opsdroid/parsers/apiai.py b/opsdroid/parsers/apiai.py
--- a/opsdroid/parsers/apiai.py
+++ b/opsdroid/parsers/apiai.py
@@ -38,8 +38,11 @@
# halt the application. If a skill throws an exception it just doesn't
# give a response to the user, so an error response should be given.
if 'access-token' in config:
-
- result = await call_apiai(message, config)
+ try:
+ result = await call_apiai(message, config)
+ except aiohttp.ClientOSError:
+ _LOGGER.error("No response from api.ai, check your network.")
+ return
if result["status"]["code"] >= 300:
_LOGGER.error("api.ai error - " +
| {"golden_diff": "diff --git a/opsdroid/parsers/apiai.py b/opsdroid/parsers/apiai.py\n--- a/opsdroid/parsers/apiai.py\n+++ b/opsdroid/parsers/apiai.py\n@@ -38,8 +38,11 @@\n # halt the application. If a skill throws an exception it just doesn't\n # give a response to the user, so an error response should be given.\n if 'access-token' in config:\n-\n- result = await call_apiai(message, config)\n+ try:\n+ result = await call_apiai(message, config)\n+ except aiohttp.ClientOSError:\n+ _LOGGER.error(\"No response from api.ai, check your network.\")\n+ return\n \n if result[\"status\"][\"code\"] >= 300:\n _LOGGER.error(\"api.ai error - \" +\n", "issue": "API.AI Error when network is disconnected\nThe following error should be caught and warned about cleanly.\r\n\r\n```\r\nERROR asyncio: Task exception was never retrieved\r\nfuture: <Task finished coro=<parse_apiai() done, defined at /Users/jacob/Projects/opsdroid/opsdroid/opsdroid/parsers/apiai.py:34> exception=ClientConnectorError(8, 'Cannot connect to host api.api.ai:443 ssl:True [nodename nor servname provided, or not known]')>\r\nTraceback (most recent call last):\r\n File \"/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/connector.py\", line 375, in connect\r\n proto = yield from self._create_connection(req)\r\n File \"/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/connector.py\", line 632, in _create_connection\r\n _, proto = yield from self._create_direct_connection(req)\r\n File \"/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/connector.py\", line 643, in _create_direct_connection\r\n hosts = yield from self._resolve_host(req.url.raw_host, req.port)\r\n File \"/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/connector.py\", line 615, in _resolve_host\r\n self._resolver.resolve(host, port, family=self._family)\r\n File \"/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/resolver.py\", line 30, in resolve\r\n host, port, type=socket.SOCK_STREAM, family=family)\r\n File \"/opt/boxen/homebrew/Cellar/python3/3.6.0/Frameworks/Python.framework/Versions/3.6/lib/python3.6/concurrent/futures/thread.py\", line 55, in run\r\n result = self.fn(*self.args, **self.kwargs)\r\n File \"/opt/boxen/homebrew/Cellar/python3/3.6.0/Frameworks/Python.framework/Versions/3.6/lib/python3.6/socket.py\", line 743, in getaddrinfo\r\n for res in _socket.getaddrinfo(host, port, family, type, proto, flags):\r\nsocket.gaierror: [Errno 8] nodename nor servname provided, or not known\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File \"/Users/jacob/Projects/opsdroid/opsdroid/opsdroid/parsers/apiai.py\", line 42, in parse_apiai\r\n result = await call_apiai(message, config)\r\n File \"/Users/jacob/Projects/opsdroid/opsdroid/opsdroid/parsers/apiai.py\", line 27, in call_apiai\r\n headers=headers)\r\n File \"/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/client.py\", line 621, in __await__\r\n resp = yield from self._coro\r\n File \"/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/client.py\", line 225, in _request\r\n conn = yield from self._connector.connect(req)\r\n File \"/Users/jacob/Projects/opsdroid/opsdroid/venv/lib/python3.6/site-packages/aiohttp/connector.py\", line 380, in connect\r\n .format(key, exc.strerror)) from exc\r\naiohttp.client_exceptions.ClientConnectorError: [Errno 8] Cannot connect to host api.api.ai:443 ssl:True [nodename nor servname provided, or not known]\r\n```\n", "before_files": [{"content": "\"\"\"A helper function for parsing and executing api.ai skills.\"\"\"\n\nimport logging\nimport json\n\nimport aiohttp\n\n\n_LOGGER = logging.getLogger(__name__)\n\n\nasync def call_apiai(message, config):\n \"\"\"Call the api.ai api and return the response.\"\"\"\n async with aiohttp.ClientSession() as session:\n payload = {\n \"v\": \"20150910\",\n \"lang\": \"en\",\n \"sessionId\": message.connector.name,\n \"query\": message.text\n }\n headers = {\n \"Authorization\": \"Bearer \" + config['access-token'],\n \"Content-Type\": \"application/json\"\n }\n resp = await session.post(\"https://api.api.ai/v1/query\",\n data=json.dumps(payload),\n headers=headers)\n result = await resp.json()\n _LOGGER.debug(\"api.ai response - \" + json.dumps(result))\n\n return result\n\n\nasync def parse_apiai(opsdroid, message, config):\n \"\"\"Parse a message against all apiai skills.\"\"\"\n # pylint: disable=broad-except\n # We want to catch all exceptions coming from a skill module and not\n # halt the application. If a skill throws an exception it just doesn't\n # give a response to the user, so an error response should be given.\n if 'access-token' in config:\n\n result = await call_apiai(message, config)\n\n if result[\"status\"][\"code\"] >= 300:\n _LOGGER.error(\"api.ai error - \" +\n str(result[\"status\"][\"code\"]) + \" \" +\n result[\"status\"][\"errorType\"])\n return\n\n if \"min-score\" in config and \\\n result[\"result\"][\"score\"] < config[\"min-score\"]:\n _LOGGER.debug(\"api.ai score lower than min-score\")\n return\n\n if result:\n for skill in opsdroid.skills:\n\n if \"apiai_action\" in skill or \"apiai_intent\" in skill:\n if (\"action\" in result[\"result\"] and\n skill[\"apiai_action\"] in\n result[\"result\"][\"action\"]) \\\n or (\"intentName\" in result[\"result\"] and\n skill[\"apiai_intent\"] in\n result[\"result\"][\"intentName\"]):\n message.apiai = result\n try:\n await skill[\"skill\"](opsdroid, skill[\"config\"],\n message)\n except Exception:\n await message.respond(\n \"Whoops there has been an error\")\n await message.respond(\n \"Check the log for details\")\n _LOGGER.exception(\"Exception when parsing '\" +\n message.text +\n \"' against skill '\" +\n result[\"result\"][\"action\"] + \"'\")\n", "path": "opsdroid/parsers/apiai.py"}], "after_files": [{"content": "\"\"\"A helper function for parsing and executing api.ai skills.\"\"\"\n\nimport logging\nimport json\n\nimport aiohttp\n\n\n_LOGGER = logging.getLogger(__name__)\n\n\nasync def call_apiai(message, config):\n \"\"\"Call the api.ai api and return the response.\"\"\"\n async with aiohttp.ClientSession() as session:\n payload = {\n \"v\": \"20150910\",\n \"lang\": \"en\",\n \"sessionId\": message.connector.name,\n \"query\": message.text\n }\n headers = {\n \"Authorization\": \"Bearer \" + config['access-token'],\n \"Content-Type\": \"application/json\"\n }\n resp = await session.post(\"https://api.api.ai/v1/query\",\n data=json.dumps(payload),\n headers=headers)\n result = await resp.json()\n _LOGGER.debug(\"api.ai response - \" + json.dumps(result))\n\n return result\n\n\nasync def parse_apiai(opsdroid, message, config):\n \"\"\"Parse a message against all apiai skills.\"\"\"\n # pylint: disable=broad-except\n # We want to catch all exceptions coming from a skill module and not\n # halt the application. If a skill throws an exception it just doesn't\n # give a response to the user, so an error response should be given.\n if 'access-token' in config:\n try:\n result = await call_apiai(message, config)\n except aiohttp.ClientOSError:\n _LOGGER.error(\"No response from api.ai, check your network.\")\n return\n\n if result[\"status\"][\"code\"] >= 300:\n _LOGGER.error(\"api.ai error - \" +\n str(result[\"status\"][\"code\"]) + \" \" +\n result[\"status\"][\"errorType\"])\n return\n\n if \"min-score\" in config and \\\n result[\"result\"][\"score\"] < config[\"min-score\"]:\n _LOGGER.debug(\"api.ai score lower than min-score\")\n return\n\n if result:\n for skill in opsdroid.skills:\n\n if \"apiai_action\" in skill or \"apiai_intent\" in skill:\n if (\"action\" in result[\"result\"] and\n skill[\"apiai_action\"] in\n result[\"result\"][\"action\"]) \\\n or (\"intentName\" in result[\"result\"] and\n skill[\"apiai_intent\"] in\n result[\"result\"][\"intentName\"]):\n message.apiai = result\n try:\n await skill[\"skill\"](opsdroid, skill[\"config\"],\n message)\n except Exception:\n await message.respond(\n \"Whoops there has been an error\")\n await message.respond(\n \"Check the log for details\")\n _LOGGER.exception(\"Exception when parsing '\" +\n message.text +\n \"' against skill '\" +\n result[\"result\"][\"action\"] + \"'\")\n", "path": "opsdroid/parsers/apiai.py"}]} | 1,862 | 193 |
gh_patches_debug_9216 | rasdani/github-patches | git_diff | interlegis__sapl-2102 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Audiências Públicas sem possibilidade de Edição
Ao criar uma Audiência Pública e salva-la, não aparecem os metadados da matéria legislativa inseridas no preenchimento.
Ao clicar em Editar, só aparece o título da audiência criada.
grato
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sapl/audiencia/forms.py`
Content:
```
1 from django import forms
2 from django.core.exceptions import ObjectDoesNotExist, ValidationError
3 from django.db import transaction
4 from django.utils.translation import ugettext_lazy as _
5 from sapl.audiencia.models import AudienciaPublica, TipoAudienciaPublica
6 from sapl.materia.models import MateriaLegislativa, TipoMateriaLegislativa
7 from sapl.utils import timezone
8
9 class AudienciaForm(forms.ModelForm):
10
11 data_atual = timezone.now()
12
13 tipo = forms.ModelChoiceField(required=True,
14 label='Tipo de Audiência Pública',
15 queryset=TipoAudienciaPublica.objects.all().order_by('nome'))
16
17 tipo_materia = forms.ModelChoiceField(
18 label=_('Tipo Matéria'),
19 required=True,
20 queryset=TipoMateriaLegislativa.objects.all(),
21 empty_label='Selecione',
22 )
23
24 numero_materia = forms.CharField(
25 label='Número Matéria', required=True)
26
27 ano_materia = forms.CharField(
28 label='Ano Matéria',
29 initial=int(data_atual.year),
30 required=True)
31
32 class Meta:
33 model = AudienciaPublica
34 fields = ['tipo', 'numero', 'nome',
35 'tema', 'data', 'hora_inicio', 'hora_fim',
36 'observacao', 'audiencia_cancelada', 'url_audio',
37 'url_video', 'upload_pauta', 'upload_ata',
38 'upload_anexo', 'tipo_materia', 'numero_materia',
39 'ano_materia']
40
41
42 def __init__(self, **kwargs):
43 super(AudienciaForm, self).__init__(**kwargs)
44
45 tipos = []
46
47 if not self.fields['tipo'].queryset:
48 tipos.append(TipoAudienciaPublica.objects.create(nome='Audiência Pública', tipo='A'))
49 tipos.append(TipoAudienciaPublica.objects.create(nome='Plebiscito', tipo='P'))
50 tipos.append(TipoAudienciaPublica.objects.create(nome='Referendo', tipo='R'))
51 tipos.append(TipoAudienciaPublica.objects.create(nome='Iniciativa Popular', tipo='I'))
52
53 for t in tipos:
54 t.save()
55
56
57 def clean(self):
58 cleaned_data = super(AudienciaForm, self).clean()
59 if not self.is_valid():
60 return cleaned_data
61
62 try:
63 materia = MateriaLegislativa.objects.get(
64 numero=self.cleaned_data['numero_materia'],
65 ano=self.cleaned_data['ano_materia'],
66 tipo=self.cleaned_data['tipo_materia'])
67 except ObjectDoesNotExist:
68 msg = _('A matéria a ser inclusa não existe no cadastro'
69 ' de matérias legislativas.')
70 raise ValidationError(msg)
71 else:
72 cleaned_data['materia'] = materia
73
74 if self.cleaned_data['hora_inicio'] and self.cleaned_data['hora_fim']:
75 if (self.cleaned_data['hora_fim'] <
76 self.cleaned_data['hora_inicio']):
77 msg = _('A hora de fim não pode ser anterior a hora de ínicio')
78 raise ValidationError(msg)
79
80 return self.cleaned_data
81
82 @transaction.atomic()
83 def save(self, commit=True):
84 audiencia = super(AudienciaForm, self).save(commit)
85 return audiencia
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/sapl/audiencia/forms.py b/sapl/audiencia/forms.py
--- a/sapl/audiencia/forms.py
+++ b/sapl/audiencia/forms.py
@@ -77,9 +77,11 @@
msg = _('A hora de fim não pode ser anterior a hora de ínicio')
raise ValidationError(msg)
- return self.cleaned_data
+ return cleaned_data
@transaction.atomic()
def save(self, commit=True):
- audiencia = super(AudienciaForm, self).save(commit)
+ audiencia = super(AudienciaForm, self).save(False)
+ audiencia.materia = self.cleaned_data['materia']
+ audiencia.save()
return audiencia
\ No newline at end of file
| {"golden_diff": "diff --git a/sapl/audiencia/forms.py b/sapl/audiencia/forms.py\n--- a/sapl/audiencia/forms.py\n+++ b/sapl/audiencia/forms.py\n@@ -77,9 +77,11 @@\n msg = _('A hora de fim n\u00e3o pode ser anterior a hora de \u00ednicio')\n raise ValidationError(msg)\n \n- return self.cleaned_data\n+ return cleaned_data\n \n @transaction.atomic()\n def save(self, commit=True):\n- audiencia = super(AudienciaForm, self).save(commit)\n+ audiencia = super(AudienciaForm, self).save(False)\n+ audiencia.materia = self.cleaned_data['materia']\n+ audiencia.save()\n return audiencia\n\\ No newline at end of file\n", "issue": "Audi\u00eancias P\u00fablicas sem possibilidade de Edi\u00e7\u00e3o\nAo criar uma Audi\u00eancia P\u00fablica e salva-la, n\u00e3o aparecem os metadados da mat\u00e9ria legislativa inseridas no preenchimento. \r\nAo clicar em Editar, s\u00f3 aparece o t\u00edtulo da audi\u00eancia criada.\r\ngrato\n", "before_files": [{"content": "from django import forms\nfrom django.core.exceptions import ObjectDoesNotExist, ValidationError\nfrom django.db import transaction\nfrom django.utils.translation import ugettext_lazy as _\nfrom sapl.audiencia.models import AudienciaPublica, TipoAudienciaPublica\nfrom sapl.materia.models import MateriaLegislativa, TipoMateriaLegislativa\nfrom sapl.utils import timezone\n\nclass AudienciaForm(forms.ModelForm):\n\n data_atual = timezone.now()\n\n tipo = forms.ModelChoiceField(required=True,\n label='Tipo de Audi\u00eancia P\u00fablica',\n queryset=TipoAudienciaPublica.objects.all().order_by('nome'))\n\n tipo_materia = forms.ModelChoiceField(\n label=_('Tipo Mat\u00e9ria'),\n required=True,\n queryset=TipoMateriaLegislativa.objects.all(),\n empty_label='Selecione',\n )\n\n numero_materia = forms.CharField(\n label='N\u00famero Mat\u00e9ria', required=True)\n\n ano_materia = forms.CharField(\n label='Ano Mat\u00e9ria',\n initial=int(data_atual.year),\n required=True)\n\n class Meta:\n model = AudienciaPublica\n fields = ['tipo', 'numero', 'nome',\n 'tema', 'data', 'hora_inicio', 'hora_fim',\n 'observacao', 'audiencia_cancelada', 'url_audio',\n 'url_video', 'upload_pauta', 'upload_ata',\n 'upload_anexo', 'tipo_materia', 'numero_materia',\n 'ano_materia']\n\n\n def __init__(self, **kwargs):\n super(AudienciaForm, self).__init__(**kwargs)\n\n tipos = []\n\n if not self.fields['tipo'].queryset:\n tipos.append(TipoAudienciaPublica.objects.create(nome='Audi\u00eancia P\u00fablica', tipo='A'))\n tipos.append(TipoAudienciaPublica.objects.create(nome='Plebiscito', tipo='P'))\n tipos.append(TipoAudienciaPublica.objects.create(nome='Referendo', tipo='R'))\n tipos.append(TipoAudienciaPublica.objects.create(nome='Iniciativa Popular', tipo='I'))\n\n for t in tipos:\n t.save()\n\n\n def clean(self):\n cleaned_data = super(AudienciaForm, self).clean()\n if not self.is_valid():\n return cleaned_data\n\n try:\n materia = MateriaLegislativa.objects.get(\n numero=self.cleaned_data['numero_materia'],\n ano=self.cleaned_data['ano_materia'],\n tipo=self.cleaned_data['tipo_materia'])\n except ObjectDoesNotExist:\n msg = _('A mat\u00e9ria a ser inclusa n\u00e3o existe no cadastro'\n ' de mat\u00e9rias legislativas.')\n raise ValidationError(msg)\n else:\n cleaned_data['materia'] = materia\n\n if self.cleaned_data['hora_inicio'] and self.cleaned_data['hora_fim']:\n if (self.cleaned_data['hora_fim'] <\n self.cleaned_data['hora_inicio']):\n msg = _('A hora de fim n\u00e3o pode ser anterior a hora de \u00ednicio')\n raise ValidationError(msg)\n\n return self.cleaned_data\n\n @transaction.atomic()\n def save(self, commit=True):\n audiencia = super(AudienciaForm, self).save(commit)\n return audiencia", "path": "sapl/audiencia/forms.py"}], "after_files": [{"content": "from django import forms\nfrom django.core.exceptions import ObjectDoesNotExist, ValidationError\nfrom django.db import transaction\nfrom django.utils.translation import ugettext_lazy as _\nfrom sapl.audiencia.models import AudienciaPublica, TipoAudienciaPublica\nfrom sapl.materia.models import MateriaLegislativa, TipoMateriaLegislativa\nfrom sapl.utils import timezone\n\nclass AudienciaForm(forms.ModelForm):\n\n data_atual = timezone.now()\n\n tipo = forms.ModelChoiceField(required=True,\n label='Tipo de Audi\u00eancia P\u00fablica',\n queryset=TipoAudienciaPublica.objects.all().order_by('nome'))\n\n tipo_materia = forms.ModelChoiceField(\n label=_('Tipo Mat\u00e9ria'),\n required=True,\n queryset=TipoMateriaLegislativa.objects.all(),\n empty_label='Selecione',\n )\n\n numero_materia = forms.CharField(\n label='N\u00famero Mat\u00e9ria', required=True)\n\n ano_materia = forms.CharField(\n label='Ano Mat\u00e9ria',\n initial=int(data_atual.year),\n required=True)\n\n class Meta:\n model = AudienciaPublica\n fields = ['tipo', 'numero', 'nome',\n 'tema', 'data', 'hora_inicio', 'hora_fim',\n 'observacao', 'audiencia_cancelada', 'url_audio',\n 'url_video', 'upload_pauta', 'upload_ata',\n 'upload_anexo', 'tipo_materia', 'numero_materia',\n 'ano_materia']\n\n\n def __init__(self, **kwargs):\n super(AudienciaForm, self).__init__(**kwargs)\n\n tipos = []\n\n if not self.fields['tipo'].queryset:\n tipos.append(TipoAudienciaPublica.objects.create(nome='Audi\u00eancia P\u00fablica', tipo='A'))\n tipos.append(TipoAudienciaPublica.objects.create(nome='Plebiscito', tipo='P'))\n tipos.append(TipoAudienciaPublica.objects.create(nome='Referendo', tipo='R'))\n tipos.append(TipoAudienciaPublica.objects.create(nome='Iniciativa Popular', tipo='I'))\n\n for t in tipos:\n t.save()\n\n\n def clean(self):\n cleaned_data = super(AudienciaForm, self).clean()\n if not self.is_valid():\n return cleaned_data\n\n try:\n materia = MateriaLegislativa.objects.get(\n numero=self.cleaned_data['numero_materia'],\n ano=self.cleaned_data['ano_materia'],\n tipo=self.cleaned_data['tipo_materia'])\n except ObjectDoesNotExist:\n msg = _('A mat\u00e9ria a ser inclusa n\u00e3o existe no cadastro'\n ' de mat\u00e9rias legislativas.')\n raise ValidationError(msg)\n else:\n cleaned_data['materia'] = materia\n\n if self.cleaned_data['hora_inicio'] and self.cleaned_data['hora_fim']:\n if (self.cleaned_data['hora_fim'] <\n self.cleaned_data['hora_inicio']):\n msg = _('A hora de fim n\u00e3o pode ser anterior a hora de \u00ednicio')\n raise ValidationError(msg)\n\n return cleaned_data\n\n @transaction.atomic()\n def save(self, commit=True):\n audiencia = super(AudienciaForm, self).save(False)\n audiencia.materia = self.cleaned_data['materia']\n audiencia.save()\n return audiencia", "path": "sapl/audiencia/forms.py"}]} | 1,188 | 169 |
gh_patches_debug_15313 | rasdani/github-patches | git_diff | SciTools__cartopy-1027 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Doc: gallery code links don't need to be underlined
As an example, https://2-5282596-gh.circle-artifacts.com/0/root/project/docs/build/html/gallery/miscellanea/star_shaped_boundary.html#sphx-glr-gallery-miscellanea-star-shaped-boundary-py has all of the functions that have links underlined. This adds visual noise that distracts from the message.
We want to keep the links, but define the CSS to avoid the underlining.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs/source/sphinxext/pre_sphinx_gallery.py`
Content:
```
1 """
2 Override sphinx_gallery's treatment of groups (folders) with cartopy's
3 ``__tags__`` semantics. This is tightly bound to the sphinx_gallery
4 implementation, hence the explicit version checking.
5
6 """
7 from collections import OrderedDict
8 import os.path
9 import shutil
10 import tempfile
11 import textwrap
12
13 import sphinx_gallery.gen_gallery
14 import sphinx_gallery.gen_rst
15 from sphinx_gallery.gen_rst import (
16 write_backreferences, extract_intro, _thumbnail_div,
17 generate_file_rst, sphinx_compatibility)
18
19
20 if sphinx_gallery.__version__ not in ['0.1.12']:
21 raise RuntimeError('not tested with this version of sphinx_gallery ({}). '
22 'Please modify this check, and validate sphinx_gallery'
23 ' behaves as expected.'
24 ''.format(sphinx_gallery.__version__))
25
26
27 GALLERY_HEADER = textwrap.dedent("""
28
29 Cartopy Gallery
30 ---------------
31
32 The following visual examples demonstrate some of the functionality of
33 Cartopy, particularly its matplotlib interface.
34
35 For a structured introduction to cartopy, including some of these
36 examples, see :ref:`getting-started-with-cartopy`.
37
38 """)
39
40
41 def example_groups(src_dir):
42 """Return a dictionary of {tag: [example filenames]} for the given dir."""
43
44 sorted_listdir = [fname for fname in sorted(os.listdir(src_dir))
45 if fname.endswith('.py') and not fname.startswith('_')]
46 tagged_examples = {}
47
48 for fname in sorted_listdir:
49 fpath = os.path.join(src_dir, fname)
50 __tags__ = []
51 with open(fpath, 'r') as fh:
52 for line in fh:
53 # Crudely remove the __tags__ line.
54 if line.startswith('__tags__ = '):
55 exec(line.strip(), locals(), globals())
56 for tag in __tags__:
57 tagged_examples.setdefault(tag, []).append(fname)
58 break
59 else:
60 tag = 'Miscellanea'
61 tagged_examples.setdefault(tag, []).append(fname)
62 return tagged_examples
63
64
65 def order_examples(tagged_examples):
66 """Order the tags and their examples."""
67 preferred_tag_order = ['Introductory',
68 'Lines and polygons',
69 'Scalar data',
70 'Vector data',
71 'Web services']
72
73 def sort_key(item):
74 tag = item[0]
75 try:
76 index = preferred_tag_order.index(tag)
77 except ValueError:
78 index = len(preferred_tag_order) + 1
79
80 return (index, tag.lower())
81 sorted_items = sorted(tagged_examples.items(), key=sort_key)
82 return OrderedDict(sorted_items)
83
84
85 def write_example(src_fpath, target_dir):
86 target_fpath = os.path.join(target_dir, os.path.basename(src_fpath))
87 with open(src_fpath, 'r') as fh:
88 with open(target_fpath, 'w') as fh_out:
89 for line in fh:
90 # Crudely remove the __tags__ line.
91 if line.startswith('__tags__ = '):
92 continue
93 fh_out.write(line)
94
95
96 def generate_dir_rst(src_dir, target_dir, gallery_conf, seen_backrefs):
97 """Generate the gallery reStructuredText for an example directory"""
98
99 fhindex = GALLERY_HEADER
100
101 if not os.path.exists(target_dir):
102 os.makedirs(target_dir)
103 tagged_examples = example_groups(src_dir)
104 tagged_examples = order_examples(tagged_examples)
105
106 computation_times = []
107 build_target_dir = os.path.relpath(target_dir, gallery_conf['src_dir'])
108
109 seen = set()
110 tmp_dir = tempfile.mkdtemp()
111
112 for tag, examples in tagged_examples.items():
113 sorted_listdir = examples
114
115 entries_text = []
116 iterator = sphinx_compatibility.status_iterator(
117 sorted_listdir,
118 'Generating gallery for %s ' % tag,
119 length=len(sorted_listdir))
120 for fname in iterator:
121 write_example(os.path.join(src_dir, fname), tmp_dir)
122 amount_of_code, time_elapsed = generate_file_rst(
123 fname, target_dir, tmp_dir, gallery_conf)
124
125 if fname not in seen:
126 seen.add(fname)
127 computation_times.append((time_elapsed, fname))
128
129 new_fname = os.path.join(src_dir, fname)
130 intro = extract_intro(new_fname)
131 this_entry = _thumbnail_div(build_target_dir, fname, intro) + textwrap.dedent("""
132
133 .. toctree::
134 :hidden:
135
136 /%s
137
138 """) % os.path.join(build_target_dir, fname[:-3]).replace(os.sep, '/') # noqa: E501
139
140 entries_text.append((amount_of_code, this_entry))
141
142 if gallery_conf['backreferences_dir']:
143 write_backreferences(seen_backrefs, gallery_conf,
144 target_dir, fname, intro)
145
146 # sort to have the smallest entries in the beginning
147 entries_text.sort()
148
149 fhindex += textwrap.dedent("""
150
151 {tag}
152 {tag_underline}
153
154 .. container:: gallery_images
155
156 """.format(tag=tag, tag_underline='-' * len(tag)))
157
158 for _, entry_text in entries_text:
159 fhindex += '\n '.join(entry_text.split('\n'))
160
161 # clear at the end of the section
162 fhindex += """.. raw:: html\n
163 <div style='clear:both'></div>\n\n"""
164
165 # Tidy up the temp directory
166 shutil.rmtree(tmp_dir)
167
168 return fhindex, computation_times
169
170
171 # Monkey-patch sphinx_gallery to handle cartopy's example format.
172 sphinx_gallery.gen_rst.generate_dir_rst = generate_dir_rst
173 sphinx_gallery.gen_gallery.generate_dir_rst = generate_dir_rst
174
175
176 def setup(app):
177 pass
178
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/docs/source/sphinxext/pre_sphinx_gallery.py b/docs/source/sphinxext/pre_sphinx_gallery.py
--- a/docs/source/sphinxext/pre_sphinx_gallery.py
+++ b/docs/source/sphinxext/pre_sphinx_gallery.py
@@ -47,13 +47,12 @@
for fname in sorted_listdir:
fpath = os.path.join(src_dir, fname)
- __tags__ = []
with open(fpath, 'r') as fh:
for line in fh:
# Crudely remove the __tags__ line.
if line.startswith('__tags__ = '):
exec(line.strip(), locals(), globals())
- for tag in __tags__:
+ for tag in __tags__: # noqa:
tagged_examples.setdefault(tag, []).append(fname)
break
else:
| {"golden_diff": "diff --git a/docs/source/sphinxext/pre_sphinx_gallery.py b/docs/source/sphinxext/pre_sphinx_gallery.py\n--- a/docs/source/sphinxext/pre_sphinx_gallery.py\n+++ b/docs/source/sphinxext/pre_sphinx_gallery.py\n@@ -47,13 +47,12 @@\n \n for fname in sorted_listdir:\n fpath = os.path.join(src_dir, fname)\n- __tags__ = []\n with open(fpath, 'r') as fh:\n for line in fh:\n # Crudely remove the __tags__ line.\n if line.startswith('__tags__ = '):\n exec(line.strip(), locals(), globals())\n- for tag in __tags__:\n+ for tag in __tags__: # noqa:\n tagged_examples.setdefault(tag, []).append(fname)\n break\n else:\n", "issue": "Doc: gallery code links don't need to be underlined\nAs an example, https://2-5282596-gh.circle-artifacts.com/0/root/project/docs/build/html/gallery/miscellanea/star_shaped_boundary.html#sphx-glr-gallery-miscellanea-star-shaped-boundary-py has all of the functions that have links underlined. This adds visual noise that distracts from the message.\r\n\r\nWe want to keep the links, but define the CSS to avoid the underlining.\n", "before_files": [{"content": "\"\"\"\nOverride sphinx_gallery's treatment of groups (folders) with cartopy's\n``__tags__`` semantics. This is tightly bound to the sphinx_gallery\nimplementation, hence the explicit version checking.\n\n\"\"\"\nfrom collections import OrderedDict\nimport os.path\nimport shutil\nimport tempfile\nimport textwrap\n\nimport sphinx_gallery.gen_gallery\nimport sphinx_gallery.gen_rst\nfrom sphinx_gallery.gen_rst import (\n write_backreferences, extract_intro, _thumbnail_div,\n generate_file_rst, sphinx_compatibility)\n\n\nif sphinx_gallery.__version__ not in ['0.1.12']:\n raise RuntimeError('not tested with this version of sphinx_gallery ({}). '\n 'Please modify this check, and validate sphinx_gallery'\n ' behaves as expected.'\n ''.format(sphinx_gallery.__version__))\n\n\nGALLERY_HEADER = textwrap.dedent(\"\"\"\n\n Cartopy Gallery\n ---------------\n\n The following visual examples demonstrate some of the functionality of\n Cartopy, particularly its matplotlib interface.\n\n For a structured introduction to cartopy, including some of these\n examples, see :ref:`getting-started-with-cartopy`.\n\n\"\"\")\n\n\ndef example_groups(src_dir):\n \"\"\"Return a dictionary of {tag: [example filenames]} for the given dir.\"\"\"\n\n sorted_listdir = [fname for fname in sorted(os.listdir(src_dir))\n if fname.endswith('.py') and not fname.startswith('_')]\n tagged_examples = {}\n\n for fname in sorted_listdir:\n fpath = os.path.join(src_dir, fname)\n __tags__ = []\n with open(fpath, 'r') as fh:\n for line in fh:\n # Crudely remove the __tags__ line.\n if line.startswith('__tags__ = '):\n exec(line.strip(), locals(), globals())\n for tag in __tags__:\n tagged_examples.setdefault(tag, []).append(fname)\n break\n else:\n tag = 'Miscellanea'\n tagged_examples.setdefault(tag, []).append(fname)\n return tagged_examples\n\n\ndef order_examples(tagged_examples):\n \"\"\"Order the tags and their examples.\"\"\"\n preferred_tag_order = ['Introductory',\n 'Lines and polygons',\n 'Scalar data',\n 'Vector data',\n 'Web services']\n\n def sort_key(item):\n tag = item[0]\n try:\n index = preferred_tag_order.index(tag)\n except ValueError:\n index = len(preferred_tag_order) + 1\n\n return (index, tag.lower())\n sorted_items = sorted(tagged_examples.items(), key=sort_key)\n return OrderedDict(sorted_items)\n\n\ndef write_example(src_fpath, target_dir):\n target_fpath = os.path.join(target_dir, os.path.basename(src_fpath))\n with open(src_fpath, 'r') as fh:\n with open(target_fpath, 'w') as fh_out:\n for line in fh:\n # Crudely remove the __tags__ line.\n if line.startswith('__tags__ = '):\n continue\n fh_out.write(line)\n\n\ndef generate_dir_rst(src_dir, target_dir, gallery_conf, seen_backrefs):\n \"\"\"Generate the gallery reStructuredText for an example directory\"\"\"\n\n fhindex = GALLERY_HEADER\n\n if not os.path.exists(target_dir):\n os.makedirs(target_dir)\n tagged_examples = example_groups(src_dir)\n tagged_examples = order_examples(tagged_examples)\n\n computation_times = []\n build_target_dir = os.path.relpath(target_dir, gallery_conf['src_dir'])\n\n seen = set()\n tmp_dir = tempfile.mkdtemp()\n\n for tag, examples in tagged_examples.items():\n sorted_listdir = examples\n\n entries_text = []\n iterator = sphinx_compatibility.status_iterator(\n sorted_listdir,\n 'Generating gallery for %s ' % tag,\n length=len(sorted_listdir))\n for fname in iterator:\n write_example(os.path.join(src_dir, fname), tmp_dir)\n amount_of_code, time_elapsed = generate_file_rst(\n fname, target_dir, tmp_dir, gallery_conf)\n\n if fname not in seen:\n seen.add(fname)\n computation_times.append((time_elapsed, fname))\n\n new_fname = os.path.join(src_dir, fname)\n intro = extract_intro(new_fname)\n this_entry = _thumbnail_div(build_target_dir, fname, intro) + textwrap.dedent(\"\"\"\n\n .. toctree::\n :hidden:\n\n /%s\n\n \"\"\") % os.path.join(build_target_dir, fname[:-3]).replace(os.sep, '/') # noqa: E501\n\n entries_text.append((amount_of_code, this_entry))\n\n if gallery_conf['backreferences_dir']:\n write_backreferences(seen_backrefs, gallery_conf,\n target_dir, fname, intro)\n\n # sort to have the smallest entries in the beginning\n entries_text.sort()\n\n fhindex += textwrap.dedent(\"\"\"\n\n {tag}\n {tag_underline}\n\n .. container:: gallery_images\n\n \"\"\".format(tag=tag, tag_underline='-' * len(tag)))\n\n for _, entry_text in entries_text:\n fhindex += '\\n '.join(entry_text.split('\\n'))\n\n # clear at the end of the section\n fhindex += \"\"\".. raw:: html\\n\n <div style='clear:both'></div>\\n\\n\"\"\"\n\n # Tidy up the temp directory\n shutil.rmtree(tmp_dir)\n\n return fhindex, computation_times\n\n\n# Monkey-patch sphinx_gallery to handle cartopy's example format.\nsphinx_gallery.gen_rst.generate_dir_rst = generate_dir_rst\nsphinx_gallery.gen_gallery.generate_dir_rst = generate_dir_rst\n\n\ndef setup(app):\n pass\n", "path": "docs/source/sphinxext/pre_sphinx_gallery.py"}], "after_files": [{"content": "\"\"\"\nOverride sphinx_gallery's treatment of groups (folders) with cartopy's\n``__tags__`` semantics. This is tightly bound to the sphinx_gallery\nimplementation, hence the explicit version checking.\n\n\"\"\"\nfrom collections import OrderedDict\nimport os.path\nimport shutil\nimport tempfile\nimport textwrap\n\nimport sphinx_gallery.gen_gallery\nimport sphinx_gallery.gen_rst\nfrom sphinx_gallery.gen_rst import (\n write_backreferences, extract_intro, _thumbnail_div,\n generate_file_rst, sphinx_compatibility)\n\n\nif sphinx_gallery.__version__ not in ['0.1.12']:\n raise RuntimeError('not tested with this version of sphinx_gallery ({}). '\n 'Please modify this check, and validate sphinx_gallery'\n ' behaves as expected.'\n ''.format(sphinx_gallery.__version__))\n\n\nGALLERY_HEADER = textwrap.dedent(\"\"\"\n\n Cartopy Gallery\n ---------------\n\n The following visual examples demonstrate some of the functionality of\n Cartopy, particularly its matplotlib interface.\n\n For a structured introduction to cartopy, including some of these\n examples, see :ref:`getting-started-with-cartopy`.\n\n\"\"\")\n\n\ndef example_groups(src_dir):\n \"\"\"Return a dictionary of {tag: [example filenames]} for the given dir.\"\"\"\n\n sorted_listdir = [fname for fname in sorted(os.listdir(src_dir))\n if fname.endswith('.py') and not fname.startswith('_')]\n tagged_examples = {}\n\n for fname in sorted_listdir:\n fpath = os.path.join(src_dir, fname)\n with open(fpath, 'r') as fh:\n for line in fh:\n # Crudely remove the __tags__ line.\n if line.startswith('__tags__ = '):\n exec(line.strip(), locals(), globals())\n for tag in __tags__: # noqa:\n tagged_examples.setdefault(tag, []).append(fname)\n break\n else:\n tag = 'Miscellanea'\n tagged_examples.setdefault(tag, []).append(fname)\n return tagged_examples\n\n\ndef order_examples(tagged_examples):\n \"\"\"Order the tags and their examples.\"\"\"\n preferred_tag_order = ['Introductory',\n 'Lines and polygons',\n 'Scalar data',\n 'Vector data',\n 'Web services']\n\n def sort_key(item):\n tag = item[0]\n try:\n index = preferred_tag_order.index(tag)\n except ValueError:\n index = len(preferred_tag_order) + 1\n\n return (index, tag.lower())\n sorted_items = sorted(tagged_examples.items(), key=sort_key)\n return OrderedDict(sorted_items)\n\n\ndef write_example(src_fpath, target_dir):\n target_fpath = os.path.join(target_dir, os.path.basename(src_fpath))\n with open(src_fpath, 'r') as fh:\n with open(target_fpath, 'w') as fh_out:\n for line in fh:\n # Crudely remove the __tags__ line.\n if line.startswith('__tags__ = '):\n continue\n fh_out.write(line)\n\n\ndef generate_dir_rst(src_dir, target_dir, gallery_conf, seen_backrefs):\n \"\"\"Generate the gallery reStructuredText for an example directory\"\"\"\n\n fhindex = GALLERY_HEADER\n\n if not os.path.exists(target_dir):\n os.makedirs(target_dir)\n tagged_examples = example_groups(src_dir)\n tagged_examples = order_examples(tagged_examples)\n\n computation_times = []\n build_target_dir = os.path.relpath(target_dir, gallery_conf['src_dir'])\n\n seen = set()\n tmp_dir = tempfile.mkdtemp()\n\n for tag, examples in tagged_examples.items():\n sorted_listdir = examples\n\n entries_text = []\n iterator = sphinx_compatibility.status_iterator(\n sorted_listdir,\n 'Generating gallery for %s ' % tag,\n length=len(sorted_listdir))\n for fname in iterator:\n write_example(os.path.join(src_dir, fname), tmp_dir)\n amount_of_code, time_elapsed = generate_file_rst(\n fname, target_dir, tmp_dir, gallery_conf)\n\n if fname not in seen:\n seen.add(fname)\n computation_times.append((time_elapsed, fname))\n\n new_fname = os.path.join(src_dir, fname)\n intro = extract_intro(new_fname)\n this_entry = _thumbnail_div(build_target_dir, fname, intro) + textwrap.dedent(\"\"\"\n\n .. toctree::\n :hidden:\n\n /%s\n\n \"\"\") % os.path.join(build_target_dir, fname[:-3]).replace(os.sep, '/') # noqa: E501\n\n entries_text.append((amount_of_code, this_entry))\n\n if gallery_conf['backreferences_dir']:\n write_backreferences(seen_backrefs, gallery_conf,\n target_dir, fname, intro)\n\n # sort to have the smallest entries in the beginning\n entries_text.sort()\n\n fhindex += textwrap.dedent(\"\"\"\n\n {tag}\n {tag_underline}\n\n .. container:: gallery_images\n\n \"\"\".format(tag=tag, tag_underline='-' * len(tag)))\n\n for _, entry_text in entries_text:\n fhindex += '\\n '.join(entry_text.split('\\n'))\n\n # clear at the end of the section\n fhindex += \"\"\".. raw:: html\\n\n <div style='clear:both'></div>\\n\\n\"\"\"\n\n # Tidy up the temp directory\n shutil.rmtree(tmp_dir)\n\n return fhindex, computation_times\n\n\n# Monkey-patch sphinx_gallery to handle cartopy's example format.\nsphinx_gallery.gen_rst.generate_dir_rst = generate_dir_rst\nsphinx_gallery.gen_gallery.generate_dir_rst = generate_dir_rst\n\n\ndef setup(app):\n pass\n", "path": "docs/source/sphinxext/pre_sphinx_gallery.py"}]} | 2,027 | 179 |
gh_patches_debug_31210 | rasdani/github-patches | git_diff | ansible__ansible-lint-480 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
E202 (no leading zero for octal permission) False positive
# Issue Type
- Bug report
# Ansible and Ansible Lint details
- ansible-lint installation method: Ansible Galaxy use of ansible-lint
# Desired Behaviour
ansible-lint provides correct reason to following the lint
# Actual Behaviour (Bug report only)
Please give some details of what is actually happening.
Include a [minimum complete verifiable example](http://stackoverflow.com/help/mcve)
with:
- playbook
- output of running ansible-lint
- if you're getting a stack trace, output of
`ansible-playbook --syntax-check playbook`
"Numeric file permissions without leading zero can behave in unexpected ways. See http://docs.ansible.com/ansible/file_module.html" - https://docs.ansible.com/ansible-lint/rules/default_rules.html
./galaxy/downloads/tmp2REDBx/tasks/main-tasks.yml:4: [E202] Octal file permissions must contain leading zero
```
# Lets create the configuration first...
# avoid locking ourself out.
- name: Create firewalld config directories
file:
dest: "{{ item }}"
state: directory
mode: "750"
with_items:
- /etc/firewalld
- /etc/firewalld/zones
notify: firewalld__reload
```
It is not correct that "750" is mis-interpreted. "750" is a string, and is correctly interpreted as the octal permission number `750` i.e. `rwxr-x---`.
Personally I have been using the string "750", rather than remember that YAML supports octal numeric literals using a leading zero.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lib/ansiblelint/rules/OctalPermissionsRule.py`
Content:
```
1 # Copyright (c) 2013-2014 Will Thames <[email protected]>
2 #
3 # Permission is hereby granted, free of charge, to any person obtaining a copy
4 # of this software and associated documentation files (the "Software"), to deal
5 # in the Software without restriction, including without limitation the rights
6 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 # copies of the Software, and to permit persons to whom the Software is
8 # furnished to do so, subject to the following conditions:
9 #
10 # The above copyright notice and this permission notice shall be included in
11 # all copies or substantial portions of the Software.
12 #
13 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 # THE SOFTWARE.
20
21 from ansiblelint import AnsibleLintRule
22 import re
23 import six
24
25
26 class OctalPermissionsRule(AnsibleLintRule):
27 id = '202'
28 shortdesc = 'Octal file permissions must contain leading zero'
29 description = (
30 'Numeric file permissions without leading zero can behave '
31 'in unexpected ways. See '
32 'http://docs.ansible.com/ansible/file_module.html'
33 )
34 severity = 'VERY_HIGH'
35 tags = ['formatting', 'ANSIBLE0009']
36 version_added = 'historic'
37
38 _modules = ['assemble', 'copy', 'file', 'ini_file', 'lineinfile',
39 'replace', 'synchronize', 'template', 'unarchive']
40
41 mode_regex = re.compile(r'^\s*[0-9]+\s*$')
42 valid_mode_regex = re.compile(r'^\s*0[0-7]{3,4}\s*$')
43
44 def is_invalid_permission(self, mode):
45 # sensible file permission modes don't
46 # have write bit set when read bit is
47 # not set and don't have execute bit set
48 # when user execute bit is not set.
49 # also, user permissions are more generous than
50 # group permissions and user and group permissions
51 # are more generous than world permissions
52
53 other_write_without_read = (mode % 8 and mode % 8 < 4 and
54 not (mode % 8 == 1 and (mode >> 6) % 2 == 1))
55 group_write_without_read = ((mode >> 3) % 8 and (mode >> 3) % 8 < 4 and
56 not ((mode >> 3) % 8 == 1 and (mode >> 6) % 2 == 1))
57 user_write_without_read = ((mode >> 6) % 8 and (mode >> 6) % 8 < 4 and
58 not (mode >> 6) % 8 == 1)
59 other_more_generous_than_group = mode % 8 > (mode >> 3) % 8
60 other_more_generous_than_user = mode % 8 > (mode >> 6) % 8
61 group_more_generous_than_user = (mode >> 3) % 8 > (mode >> 6) % 8
62
63 return (other_write_without_read or
64 group_write_without_read or
65 user_write_without_read or
66 other_more_generous_than_group or
67 other_more_generous_than_user or
68 group_more_generous_than_user)
69
70 def matchtask(self, file, task):
71 if task["action"]["__ansible_module__"] in self._modules:
72 mode = task['action'].get('mode', None)
73 if isinstance(mode, six.string_types) and self.mode_regex.match(mode):
74 return not self.valid_mode_regex.match(mode)
75 if isinstance(mode, int):
76 return self.is_invalid_permission(mode)
77
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/lib/ansiblelint/rules/OctalPermissionsRule.py b/lib/ansiblelint/rules/OctalPermissionsRule.py
--- a/lib/ansiblelint/rules/OctalPermissionsRule.py
+++ b/lib/ansiblelint/rules/OctalPermissionsRule.py
@@ -19,13 +19,12 @@
# THE SOFTWARE.
from ansiblelint import AnsibleLintRule
-import re
import six
class OctalPermissionsRule(AnsibleLintRule):
id = '202'
- shortdesc = 'Octal file permissions must contain leading zero'
+ shortdesc = 'Octal file permissions must contain leading zero or be a string'
description = (
'Numeric file permissions without leading zero can behave '
'in unexpected ways. See '
@@ -38,9 +37,6 @@
_modules = ['assemble', 'copy', 'file', 'ini_file', 'lineinfile',
'replace', 'synchronize', 'template', 'unarchive']
- mode_regex = re.compile(r'^\s*[0-9]+\s*$')
- valid_mode_regex = re.compile(r'^\s*0[0-7]{3,4}\s*$')
-
def is_invalid_permission(self, mode):
# sensible file permission modes don't
# have write bit set when read bit is
@@ -70,7 +66,9 @@
def matchtask(self, file, task):
if task["action"]["__ansible_module__"] in self._modules:
mode = task['action'].get('mode', None)
- if isinstance(mode, six.string_types) and self.mode_regex.match(mode):
- return not self.valid_mode_regex.match(mode)
+
+ if isinstance(mode, six.string_types):
+ return False
+
if isinstance(mode, int):
return self.is_invalid_permission(mode)
| {"golden_diff": "diff --git a/lib/ansiblelint/rules/OctalPermissionsRule.py b/lib/ansiblelint/rules/OctalPermissionsRule.py\n--- a/lib/ansiblelint/rules/OctalPermissionsRule.py\n+++ b/lib/ansiblelint/rules/OctalPermissionsRule.py\n@@ -19,13 +19,12 @@\n # THE SOFTWARE.\n \n from ansiblelint import AnsibleLintRule\n-import re\n import six\n \n \n class OctalPermissionsRule(AnsibleLintRule):\n id = '202'\n- shortdesc = 'Octal file permissions must contain leading zero'\n+ shortdesc = 'Octal file permissions must contain leading zero or be a string'\n description = (\n 'Numeric file permissions without leading zero can behave '\n 'in unexpected ways. See '\n@@ -38,9 +37,6 @@\n _modules = ['assemble', 'copy', 'file', 'ini_file', 'lineinfile',\n 'replace', 'synchronize', 'template', 'unarchive']\n \n- mode_regex = re.compile(r'^\\s*[0-9]+\\s*$')\n- valid_mode_regex = re.compile(r'^\\s*0[0-7]{3,4}\\s*$')\n-\n def is_invalid_permission(self, mode):\n # sensible file permission modes don't\n # have write bit set when read bit is\n@@ -70,7 +66,9 @@\n def matchtask(self, file, task):\n if task[\"action\"][\"__ansible_module__\"] in self._modules:\n mode = task['action'].get('mode', None)\n- if isinstance(mode, six.string_types) and self.mode_regex.match(mode):\n- return not self.valid_mode_regex.match(mode)\n+\n+ if isinstance(mode, six.string_types):\n+ return False\n+\n if isinstance(mode, int):\n return self.is_invalid_permission(mode)\n", "issue": "E202 (no leading zero for octal permission) False positive\n# Issue Type\r\n- Bug report\r\n\r\n# Ansible and Ansible Lint details\r\n\r\n- ansible-lint installation method: Ansible Galaxy use of ansible-lint\r\n\r\n# Desired Behaviour\r\n\r\nansible-lint provides correct reason to following the lint\r\n\r\n# Actual Behaviour (Bug report only)\r\n\r\nPlease give some details of what is actually happening.\r\nInclude a [minimum complete verifiable example](http://stackoverflow.com/help/mcve)\r\nwith:\r\n- playbook\r\n- output of running ansible-lint\r\n- if you're getting a stack trace, output of\r\n `ansible-playbook --syntax-check playbook`\r\n\r\n\"Numeric file permissions without leading zero can behave in unexpected ways. See http://docs.ansible.com/ansible/file_module.html\" - https://docs.ansible.com/ansible-lint/rules/default_rules.html\r\n\r\n./galaxy/downloads/tmp2REDBx/tasks/main-tasks.yml:4: [E202] Octal file permissions must contain leading zero\r\n\r\n```\r\n# Lets create the configuration first...\r\n# avoid locking ourself out.\r\n\r\n- name: Create firewalld config directories\r\n file:\r\n dest: \"{{ item }}\"\r\n state: directory\r\n mode: \"750\"\r\n with_items:\r\n - /etc/firewalld\r\n - /etc/firewalld/zones\r\n notify: firewalld__reload\r\n```\r\n\r\nIt is not correct that \"750\" is mis-interpreted. \"750\" is a string, and is correctly interpreted as the octal permission number `750` i.e. `rwxr-x---`.\r\n\r\nPersonally I have been using the string \"750\", rather than remember that YAML supports octal numeric literals using a leading zero.\n", "before_files": [{"content": "# Copyright (c) 2013-2014 Will Thames <[email protected]>\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n# THE SOFTWARE.\n\nfrom ansiblelint import AnsibleLintRule\nimport re\nimport six\n\n\nclass OctalPermissionsRule(AnsibleLintRule):\n id = '202'\n shortdesc = 'Octal file permissions must contain leading zero'\n description = (\n 'Numeric file permissions without leading zero can behave '\n 'in unexpected ways. See '\n 'http://docs.ansible.com/ansible/file_module.html'\n )\n severity = 'VERY_HIGH'\n tags = ['formatting', 'ANSIBLE0009']\n version_added = 'historic'\n\n _modules = ['assemble', 'copy', 'file', 'ini_file', 'lineinfile',\n 'replace', 'synchronize', 'template', 'unarchive']\n\n mode_regex = re.compile(r'^\\s*[0-9]+\\s*$')\n valid_mode_regex = re.compile(r'^\\s*0[0-7]{3,4}\\s*$')\n\n def is_invalid_permission(self, mode):\n # sensible file permission modes don't\n # have write bit set when read bit is\n # not set and don't have execute bit set\n # when user execute bit is not set.\n # also, user permissions are more generous than\n # group permissions and user and group permissions\n # are more generous than world permissions\n\n other_write_without_read = (mode % 8 and mode % 8 < 4 and\n not (mode % 8 == 1 and (mode >> 6) % 2 == 1))\n group_write_without_read = ((mode >> 3) % 8 and (mode >> 3) % 8 < 4 and\n not ((mode >> 3) % 8 == 1 and (mode >> 6) % 2 == 1))\n user_write_without_read = ((mode >> 6) % 8 and (mode >> 6) % 8 < 4 and\n not (mode >> 6) % 8 == 1)\n other_more_generous_than_group = mode % 8 > (mode >> 3) % 8\n other_more_generous_than_user = mode % 8 > (mode >> 6) % 8\n group_more_generous_than_user = (mode >> 3) % 8 > (mode >> 6) % 8\n\n return (other_write_without_read or\n group_write_without_read or\n user_write_without_read or\n other_more_generous_than_group or\n other_more_generous_than_user or\n group_more_generous_than_user)\n\n def matchtask(self, file, task):\n if task[\"action\"][\"__ansible_module__\"] in self._modules:\n mode = task['action'].get('mode', None)\n if isinstance(mode, six.string_types) and self.mode_regex.match(mode):\n return not self.valid_mode_regex.match(mode)\n if isinstance(mode, int):\n return self.is_invalid_permission(mode)\n", "path": "lib/ansiblelint/rules/OctalPermissionsRule.py"}], "after_files": [{"content": "# Copyright (c) 2013-2014 Will Thames <[email protected]>\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n# THE SOFTWARE.\n\nfrom ansiblelint import AnsibleLintRule\nimport six\n\n\nclass OctalPermissionsRule(AnsibleLintRule):\n id = '202'\n shortdesc = 'Octal file permissions must contain leading zero or be a string'\n description = (\n 'Numeric file permissions without leading zero can behave '\n 'in unexpected ways. See '\n 'http://docs.ansible.com/ansible/file_module.html'\n )\n severity = 'VERY_HIGH'\n tags = ['formatting', 'ANSIBLE0009']\n version_added = 'historic'\n\n _modules = ['assemble', 'copy', 'file', 'ini_file', 'lineinfile',\n 'replace', 'synchronize', 'template', 'unarchive']\n\n def is_invalid_permission(self, mode):\n # sensible file permission modes don't\n # have write bit set when read bit is\n # not set and don't have execute bit set\n # when user execute bit is not set.\n # also, user permissions are more generous than\n # group permissions and user and group permissions\n # are more generous than world permissions\n\n other_write_without_read = (mode % 8 and mode % 8 < 4 and\n not (mode % 8 == 1 and (mode >> 6) % 2 == 1))\n group_write_without_read = ((mode >> 3) % 8 and (mode >> 3) % 8 < 4 and\n not ((mode >> 3) % 8 == 1 and (mode >> 6) % 2 == 1))\n user_write_without_read = ((mode >> 6) % 8 and (mode >> 6) % 8 < 4 and\n not (mode >> 6) % 8 == 1)\n other_more_generous_than_group = mode % 8 > (mode >> 3) % 8\n other_more_generous_than_user = mode % 8 > (mode >> 6) % 8\n group_more_generous_than_user = (mode >> 3) % 8 > (mode >> 6) % 8\n\n return (other_write_without_read or\n group_write_without_read or\n user_write_without_read or\n other_more_generous_than_group or\n other_more_generous_than_user or\n group_more_generous_than_user)\n\n def matchtask(self, file, task):\n if task[\"action\"][\"__ansible_module__\"] in self._modules:\n mode = task['action'].get('mode', None)\n\n if isinstance(mode, six.string_types):\n return False\n\n if isinstance(mode, int):\n return self.is_invalid_permission(mode)\n", "path": "lib/ansiblelint/rules/OctalPermissionsRule.py"}]} | 1,661 | 398 |
gh_patches_debug_32269 | rasdani/github-patches | git_diff | strawberry-graphql__strawberry-491 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add mypy plugin for strawberry.union
We have a function to create union types and also add name and description that are used when generating the GraphQL schema[1].
Unfortunately MyPy complains when using the annotation, since it doesn't recognise it as a type, here's an example:
```python
ExampleUnion = strawberry.union(
"ExampleUnion",
types=(
ABC,
CDE,
),
)
@strawberry.mutation
def abc(phone_number: str) -> ExampleUnion:
# ...
```
Error:
```
Variable "api.mutations.abc.ExampleUnion" is not valid as a type
```
[1] This is why we can't use typing.Union directly, as we can't pass arguments to it
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `strawberry/ext/mypy_plugin.py`
Content:
```
1 from typing import Callable, Optional
2
3 from mypy.plugin import AnalyzeTypeContext, ClassDefContext, Plugin
4 from mypy.plugins import dataclasses
5 from mypy.types import Type
6
7
8 def lazy_type_analyze_callback(ctx: AnalyzeTypeContext) -> Type:
9 type_name = ctx.type.args[0]
10 type_ = ctx.api.analyze_type(type_name)
11
12 return type_
13
14
15 def private_type_analyze_callback(ctx: AnalyzeTypeContext) -> Type:
16 type_name = ctx.type.args[0]
17 type_ = ctx.api.analyze_type(type_name)
18
19 return type_
20
21
22 class StrawberryPlugin(Plugin):
23 def get_type_analyze_hook(self, fullname: str):
24 if fullname == "strawberry.lazy_type.LazyType":
25 return lazy_type_analyze_callback
26
27 if any(
28 name in fullname
29 for name in {"strawberry.private.Private", "strawberry.Private"}
30 ):
31 return private_type_analyze_callback
32
33 return None
34
35 def get_class_decorator_hook(
36 self, fullname: str
37 ) -> Optional[Callable[[ClassDefContext], None]]:
38 if any(
39 strawberry_decorator in fullname
40 for strawberry_decorator in {
41 "strawberry.type",
42 "strawberry.federation.type",
43 "strawberry.input",
44 "strawberry.interface",
45 }
46 ):
47 return dataclasses.dataclass_class_maker_callback
48 return None
49
50
51 def plugin(version: str):
52 return StrawberryPlugin
53
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/strawberry/ext/mypy_plugin.py b/strawberry/ext/mypy_plugin.py
--- a/strawberry/ext/mypy_plugin.py
+++ b/strawberry/ext/mypy_plugin.py
@@ -1,8 +1,23 @@
from typing import Callable, Optional
-from mypy.plugin import AnalyzeTypeContext, ClassDefContext, Plugin
+from mypy.nodes import (
+ GDEF,
+ Expression,
+ IndexExpr,
+ NameExpr,
+ SymbolTableNode,
+ TupleExpr,
+ TypeAlias,
+)
+from mypy.plugin import (
+ AnalyzeTypeContext,
+ ClassDefContext,
+ DynamicClassDefContext,
+ Plugin,
+ SemanticAnalyzerPluginInterface,
+)
from mypy.plugins import dataclasses
-from mypy.types import Type
+from mypy.types import Type, UnionType
def lazy_type_analyze_callback(ctx: AnalyzeTypeContext) -> Type:
@@ -19,7 +34,48 @@
return type_
+def _get_type_for_expr(expr: Expression, api: SemanticAnalyzerPluginInterface):
+ if isinstance(expr, NameExpr):
+ return api.named_type(expr.name)
+
+ if isinstance(expr, IndexExpr):
+ type_ = _get_type_for_expr(expr.base, api)
+ type_.args = [_get_type_for_expr(expr.index, api)]
+
+ return type_
+
+ raise ValueError(f"Unsupported expression f{type(expr)}")
+
+
+def union_hook(ctx: DynamicClassDefContext) -> None:
+ types = ctx.call.args[1]
+
+ if isinstance(types, TupleExpr):
+ type_ = UnionType(tuple(_get_type_for_expr(x, ctx.api) for x in types.items))
+
+ type_alias = TypeAlias(
+ type_,
+ fullname=ctx.api.qualified_name(ctx.name),
+ line=ctx.call.line,
+ column=ctx.call.column,
+ )
+
+ ctx.api.add_symbol_table_node(
+ ctx.name, SymbolTableNode(GDEF, type_alias, plugin_generated=False)
+ )
+
+
class StrawberryPlugin(Plugin):
+ def get_dynamic_class_hook(
+ self, fullname: str
+ ) -> Optional[Callable[[DynamicClassDefContext], None]]:
+ # TODO: investigate why we need this instead of `strawberry.union.union` on CI
+ # we have the same issue in the other hooks
+ if "strawberry.union" in fullname:
+ return union_hook
+
+ return None
+
def get_type_analyze_hook(self, fullname: str):
if fullname == "strawberry.lazy_type.LazyType":
return lazy_type_analyze_callback
| {"golden_diff": "diff --git a/strawberry/ext/mypy_plugin.py b/strawberry/ext/mypy_plugin.py\n--- a/strawberry/ext/mypy_plugin.py\n+++ b/strawberry/ext/mypy_plugin.py\n@@ -1,8 +1,23 @@\n from typing import Callable, Optional\n \n-from mypy.plugin import AnalyzeTypeContext, ClassDefContext, Plugin\n+from mypy.nodes import (\n+ GDEF,\n+ Expression,\n+ IndexExpr,\n+ NameExpr,\n+ SymbolTableNode,\n+ TupleExpr,\n+ TypeAlias,\n+)\n+from mypy.plugin import (\n+ AnalyzeTypeContext,\n+ ClassDefContext,\n+ DynamicClassDefContext,\n+ Plugin,\n+ SemanticAnalyzerPluginInterface,\n+)\n from mypy.plugins import dataclasses\n-from mypy.types import Type\n+from mypy.types import Type, UnionType\n \n \n def lazy_type_analyze_callback(ctx: AnalyzeTypeContext) -> Type:\n@@ -19,7 +34,48 @@\n return type_\n \n \n+def _get_type_for_expr(expr: Expression, api: SemanticAnalyzerPluginInterface):\n+ if isinstance(expr, NameExpr):\n+ return api.named_type(expr.name)\n+\n+ if isinstance(expr, IndexExpr):\n+ type_ = _get_type_for_expr(expr.base, api)\n+ type_.args = [_get_type_for_expr(expr.index, api)]\n+\n+ return type_\n+\n+ raise ValueError(f\"Unsupported expression f{type(expr)}\")\n+\n+\n+def union_hook(ctx: DynamicClassDefContext) -> None:\n+ types = ctx.call.args[1]\n+\n+ if isinstance(types, TupleExpr):\n+ type_ = UnionType(tuple(_get_type_for_expr(x, ctx.api) for x in types.items))\n+\n+ type_alias = TypeAlias(\n+ type_,\n+ fullname=ctx.api.qualified_name(ctx.name),\n+ line=ctx.call.line,\n+ column=ctx.call.column,\n+ )\n+\n+ ctx.api.add_symbol_table_node(\n+ ctx.name, SymbolTableNode(GDEF, type_alias, plugin_generated=False)\n+ )\n+\n+\n class StrawberryPlugin(Plugin):\n+ def get_dynamic_class_hook(\n+ self, fullname: str\n+ ) -> Optional[Callable[[DynamicClassDefContext], None]]:\n+ # TODO: investigate why we need this instead of `strawberry.union.union` on CI\n+ # we have the same issue in the other hooks\n+ if \"strawberry.union\" in fullname:\n+ return union_hook\n+\n+ return None\n+\n def get_type_analyze_hook(self, fullname: str):\n if fullname == \"strawberry.lazy_type.LazyType\":\n return lazy_type_analyze_callback\n", "issue": "Add mypy plugin for strawberry.union\nWe have a function to create union types and also add name and description that are used when generating the GraphQL schema[1].\r\n\r\nUnfortunately MyPy complains when using the annotation, since it doesn't recognise it as a type, here's an example:\r\n\r\n```python\r\nExampleUnion = strawberry.union(\r\n \"ExampleUnion\",\r\n types=(\r\n ABC,\r\n CDE,\r\n ),\r\n)\r\n\r\[email protected]\r\ndef abc(phone_number: str) -> ExampleUnion:\r\n # ...\r\n```\r\n\r\nError:\r\n\r\n```\r\nVariable \"api.mutations.abc.ExampleUnion\" is not valid as a type\r\n```\r\n\r\n[1] This is why we can't use typing.Union directly, as we can't pass arguments to it\n", "before_files": [{"content": "from typing import Callable, Optional\n\nfrom mypy.plugin import AnalyzeTypeContext, ClassDefContext, Plugin\nfrom mypy.plugins import dataclasses\nfrom mypy.types import Type\n\n\ndef lazy_type_analyze_callback(ctx: AnalyzeTypeContext) -> Type:\n type_name = ctx.type.args[0]\n type_ = ctx.api.analyze_type(type_name)\n\n return type_\n\n\ndef private_type_analyze_callback(ctx: AnalyzeTypeContext) -> Type:\n type_name = ctx.type.args[0]\n type_ = ctx.api.analyze_type(type_name)\n\n return type_\n\n\nclass StrawberryPlugin(Plugin):\n def get_type_analyze_hook(self, fullname: str):\n if fullname == \"strawberry.lazy_type.LazyType\":\n return lazy_type_analyze_callback\n\n if any(\n name in fullname\n for name in {\"strawberry.private.Private\", \"strawberry.Private\"}\n ):\n return private_type_analyze_callback\n\n return None\n\n def get_class_decorator_hook(\n self, fullname: str\n ) -> Optional[Callable[[ClassDefContext], None]]:\n if any(\n strawberry_decorator in fullname\n for strawberry_decorator in {\n \"strawberry.type\",\n \"strawberry.federation.type\",\n \"strawberry.input\",\n \"strawberry.interface\",\n }\n ):\n return dataclasses.dataclass_class_maker_callback\n return None\n\n\ndef plugin(version: str):\n return StrawberryPlugin\n", "path": "strawberry/ext/mypy_plugin.py"}], "after_files": [{"content": "from typing import Callable, Optional\n\nfrom mypy.nodes import (\n GDEF,\n Expression,\n IndexExpr,\n NameExpr,\n SymbolTableNode,\n TupleExpr,\n TypeAlias,\n)\nfrom mypy.plugin import (\n AnalyzeTypeContext,\n ClassDefContext,\n DynamicClassDefContext,\n Plugin,\n SemanticAnalyzerPluginInterface,\n)\nfrom mypy.plugins import dataclasses\nfrom mypy.types import Type, UnionType\n\n\ndef lazy_type_analyze_callback(ctx: AnalyzeTypeContext) -> Type:\n type_name = ctx.type.args[0]\n type_ = ctx.api.analyze_type(type_name)\n\n return type_\n\n\ndef private_type_analyze_callback(ctx: AnalyzeTypeContext) -> Type:\n type_name = ctx.type.args[0]\n type_ = ctx.api.analyze_type(type_name)\n\n return type_\n\n\ndef _get_type_for_expr(expr: Expression, api: SemanticAnalyzerPluginInterface):\n if isinstance(expr, NameExpr):\n return api.named_type(expr.name)\n\n if isinstance(expr, IndexExpr):\n type_ = _get_type_for_expr(expr.base, api)\n type_.args = [_get_type_for_expr(expr.index, api)]\n\n return type_\n\n raise ValueError(f\"Unsupported expression f{type(expr)}\")\n\n\ndef union_hook(ctx: DynamicClassDefContext) -> None:\n types = ctx.call.args[1]\n\n if isinstance(types, TupleExpr):\n type_ = UnionType(tuple(_get_type_for_expr(x, ctx.api) for x in types.items))\n\n type_alias = TypeAlias(\n type_,\n fullname=ctx.api.qualified_name(ctx.name),\n line=ctx.call.line,\n column=ctx.call.column,\n )\n\n ctx.api.add_symbol_table_node(\n ctx.name, SymbolTableNode(GDEF, type_alias, plugin_generated=False)\n )\n\n\nclass StrawberryPlugin(Plugin):\n def get_dynamic_class_hook(\n self, fullname: str\n ) -> Optional[Callable[[DynamicClassDefContext], None]]:\n # TODO: investigate why we need this instead of `strawberry.union.union` on CI\n # we have the same issue in the other hooks\n if \"strawberry.union\" in fullname:\n return union_hook\n\n return None\n\n def get_type_analyze_hook(self, fullname: str):\n if fullname == \"strawberry.lazy_type.LazyType\":\n return lazy_type_analyze_callback\n\n if any(\n name in fullname\n for name in {\"strawberry.private.Private\", \"strawberry.Private\"}\n ):\n return private_type_analyze_callback\n\n return None\n\n def get_class_decorator_hook(\n self, fullname: str\n ) -> Optional[Callable[[ClassDefContext], None]]:\n if any(\n strawberry_decorator in fullname\n for strawberry_decorator in {\n \"strawberry.type\",\n \"strawberry.federation.type\",\n \"strawberry.input\",\n \"strawberry.interface\",\n }\n ):\n return dataclasses.dataclass_class_maker_callback\n return None\n\n\ndef plugin(version: str):\n return StrawberryPlugin\n", "path": "strawberry/ext/mypy_plugin.py"}]} | 835 | 594 |
gh_patches_debug_4568 | rasdani/github-patches | git_diff | medtagger__MedTagger-466 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add information about Dataset to Tasks endpoint
## Current Behavior
Tasks endpoint returns list of all Tasks **without** information about their Datasets.
## Expected Behavior
Each Task returned by this endpoint should also contain information about its Dataset.
## Tasks to do
- [ ] Add Dataset key to the Task returned by Tasks endpoint.
## Additional comment
Dataset key will be fine only if there will be an endpoint that will return all Datasets available in MedTagger.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `backend/medtagger/api/tasks/serializers.py`
Content:
```
1 """Module responsible for storage of serializers used in Tasks endpoints."""
2 from flask_restplus import fields
3
4 from medtagger.api import api
5 from medtagger.definitions import LabelTool
6
7 out__label_tag = api.model('Label Tag model', {
8 'key': fields.String(),
9 'name': fields.String(),
10 'actions_ids': fields.List(fields.Integer(),
11 attribute=lambda label_tag: [action.id for action in label_tag.actions]),
12 'tools': fields.List(fields.String(), description='Available tools for Label Tag',
13 enum=[tool.name for tool in LabelTool],
14 attribute=lambda label_tag: [tool.name for tool in label_tag.tools]),
15 })
16
17 in__label_tag = api.model('Label Tag model', {
18 'key': fields.String(),
19 'name': fields.String(),
20 'actions_ids': fields.List(fields.Integer()),
21 'tools': fields.List(fields.String(), description='Available tools for Label Tag',
22 enum=[tool.name for tool in LabelTool]),
23 })
24
25 out__task = api.model('Task model', {
26 'key': fields.String(),
27 'name': fields.String(),
28 'image_path': fields.String(),
29 'tags': fields.List(fields.Nested(out__label_tag), attribute='available_tags'),
30 })
31
32 in__task = api.model('New Task model', {
33 'key': fields.String(),
34 'name': fields.String(),
35 'image_path': fields.String(),
36 'datasets_keys': fields.List(fields.String()),
37 'tags': fields.List(fields.Nested(in__label_tag), attribute='available_tags'),
38 })
39
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/backend/medtagger/api/tasks/serializers.py b/backend/medtagger/api/tasks/serializers.py
--- a/backend/medtagger/api/tasks/serializers.py
+++ b/backend/medtagger/api/tasks/serializers.py
@@ -27,6 +27,7 @@
'name': fields.String(),
'image_path': fields.String(),
'tags': fields.List(fields.Nested(out__label_tag), attribute='available_tags'),
+ 'datasets_keys': fields.List(fields.String(), attribute=lambda task: [dataset.key for dataset in task.datasets]),
})
in__task = api.model('New Task model', {
| {"golden_diff": "diff --git a/backend/medtagger/api/tasks/serializers.py b/backend/medtagger/api/tasks/serializers.py\n--- a/backend/medtagger/api/tasks/serializers.py\n+++ b/backend/medtagger/api/tasks/serializers.py\n@@ -27,6 +27,7 @@\n 'name': fields.String(),\n 'image_path': fields.String(),\n 'tags': fields.List(fields.Nested(out__label_tag), attribute='available_tags'),\n+ 'datasets_keys': fields.List(fields.String(), attribute=lambda task: [dataset.key for dataset in task.datasets]),\n })\n \n in__task = api.model('New Task model', {\n", "issue": "Add information about Dataset to Tasks endpoint\n## Current Behavior\r\n\r\nTasks endpoint returns list of all Tasks **without** information about their Datasets.\r\n\r\n## Expected Behavior\r\n\r\nEach Task returned by this endpoint should also contain information about its Dataset.\r\n\r\n## Tasks to do\r\n\r\n- [ ] Add Dataset key to the Task returned by Tasks endpoint.\r\n\r\n## Additional comment\r\n\r\nDataset key will be fine only if there will be an endpoint that will return all Datasets available in MedTagger.\n", "before_files": [{"content": "\"\"\"Module responsible for storage of serializers used in Tasks endpoints.\"\"\"\nfrom flask_restplus import fields\n\nfrom medtagger.api import api\nfrom medtagger.definitions import LabelTool\n\nout__label_tag = api.model('Label Tag model', {\n 'key': fields.String(),\n 'name': fields.String(),\n 'actions_ids': fields.List(fields.Integer(),\n attribute=lambda label_tag: [action.id for action in label_tag.actions]),\n 'tools': fields.List(fields.String(), description='Available tools for Label Tag',\n enum=[tool.name for tool in LabelTool],\n attribute=lambda label_tag: [tool.name for tool in label_tag.tools]),\n})\n\nin__label_tag = api.model('Label Tag model', {\n 'key': fields.String(),\n 'name': fields.String(),\n 'actions_ids': fields.List(fields.Integer()),\n 'tools': fields.List(fields.String(), description='Available tools for Label Tag',\n enum=[tool.name for tool in LabelTool]),\n})\n\nout__task = api.model('Task model', {\n 'key': fields.String(),\n 'name': fields.String(),\n 'image_path': fields.String(),\n 'tags': fields.List(fields.Nested(out__label_tag), attribute='available_tags'),\n})\n\nin__task = api.model('New Task model', {\n 'key': fields.String(),\n 'name': fields.String(),\n 'image_path': fields.String(),\n 'datasets_keys': fields.List(fields.String()),\n 'tags': fields.List(fields.Nested(in__label_tag), attribute='available_tags'),\n})\n", "path": "backend/medtagger/api/tasks/serializers.py"}], "after_files": [{"content": "\"\"\"Module responsible for storage of serializers used in Tasks endpoints.\"\"\"\nfrom flask_restplus import fields\n\nfrom medtagger.api import api\nfrom medtagger.definitions import LabelTool\n\nout__label_tag = api.model('Label Tag model', {\n 'key': fields.String(),\n 'name': fields.String(),\n 'actions_ids': fields.List(fields.Integer(),\n attribute=lambda label_tag: [action.id for action in label_tag.actions]),\n 'tools': fields.List(fields.String(), description='Available tools for Label Tag',\n enum=[tool.name for tool in LabelTool],\n attribute=lambda label_tag: [tool.name for tool in label_tag.tools]),\n})\n\nin__label_tag = api.model('Label Tag model', {\n 'key': fields.String(),\n 'name': fields.String(),\n 'actions_ids': fields.List(fields.Integer()),\n 'tools': fields.List(fields.String(), description='Available tools for Label Tag',\n enum=[tool.name for tool in LabelTool]),\n})\n\nout__task = api.model('Task model', {\n 'key': fields.String(),\n 'name': fields.String(),\n 'image_path': fields.String(),\n 'tags': fields.List(fields.Nested(out__label_tag), attribute='available_tags'),\n 'datasets_keys': fields.List(fields.String(), attribute=lambda task: [dataset.key for dataset in task.datasets]),\n})\n\nin__task = api.model('New Task model', {\n 'key': fields.String(),\n 'name': fields.String(),\n 'image_path': fields.String(),\n 'datasets_keys': fields.List(fields.String()),\n 'tags': fields.List(fields.Nested(in__label_tag), attribute='available_tags'),\n})\n", "path": "backend/medtagger/api/tasks/serializers.py"}]} | 750 | 140 |
gh_patches_debug_29120 | rasdani/github-patches | git_diff | OCA__social-262 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[10.0] mail_sendrid, Error: No module named simplejson
> File ".../addons/oca-social/mail_sendgrid/controllers/json_request.py", line 4, in <module>
> import simplejson
> ImportError: No module named simplejson
I put `simplejson` into file requirements.txt and it solved this issue.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mail_sendgrid/controllers/json_request.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 # Copyright 2016-2017 Compassion CH (http://www.compassion.ch)
3 # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
4 import simplejson
5
6 from odoo.http import JsonRequest, Root, Response
7
8 # Monkeypatch type of request rooter to use RESTJsonRequest
9 old_get_request = Root.get_request
10
11
12 def get_request(self, httprequest):
13 if (httprequest.mimetype == "application/json" and
14 httprequest.environ['PATH_INFO'].startswith('/mail')):
15 return RESTJsonRequest(httprequest)
16 return old_get_request(self, httprequest)
17
18
19 Root.get_request = get_request
20
21
22 class RESTJsonRequest(JsonRequest):
23 """ Special RestJson Handler to enable receiving lists in JSON
24 body
25 """
26 def __init__(self, *args):
27 try:
28 super(RESTJsonRequest, self).__init__(*args)
29 except AttributeError:
30 # The JSON may contain a list
31 self.params = dict()
32 self.context = dict(self.session.context)
33
34 def _json_response(self, result=None, error=None):
35 response = {}
36 if error is not None:
37 response['error'] = error
38 if result is not None:
39 response['result'] = result
40
41 mime = 'application/json'
42 body = simplejson.dumps(response)
43
44 return Response(
45 body, headers=[('Content-Type', mime),
46 ('Content-Length', len(body))])
47
```
Path: `mail_sendgrid/__manifest__.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 # Copyright 2015-2017 Compassion CH (http://www.compassion.ch)
3 # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
4 {
5 'name': 'SendGrid',
6 'version': '10.0.1.0.0',
7 'category': 'Social Network',
8 'author': 'Compassion CH, Odoo Community Association (OCA)',
9 'license': 'AGPL-3',
10 'website': 'https://github.com/OCA/social',
11 'depends': ['mail_tracking'],
12 'data': [
13 'security/ir.model.access.csv',
14 'views/sendgrid_email_view.xml',
15 'views/sendgrid_template_view.xml',
16 'views/mail_compose_message_view.xml',
17 'views/email_template_view.xml',
18 ],
19 'demo': [],
20 'installable': True,
21 'auto_install': False,
22 'external_dependencies': {
23 'python': ['sendgrid'],
24 },
25 }
26
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mail_sendgrid/__manifest__.py b/mail_sendgrid/__manifest__.py
--- a/mail_sendgrid/__manifest__.py
+++ b/mail_sendgrid/__manifest__.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
-# Copyright 2015-2017 Compassion CH (http://www.compassion.ch)
+# Copyright 2015-2018 Compassion CH (http://www.compassion.ch)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'SendGrid',
- 'version': '10.0.1.0.0',
+ 'version': '10.0.1.0.1',
'category': 'Social Network',
'author': 'Compassion CH, Odoo Community Association (OCA)',
'license': 'AGPL-3',
@@ -20,6 +20,6 @@
'installable': True,
'auto_install': False,
'external_dependencies': {
- 'python': ['sendgrid'],
+ 'python': ['sendgrid', 'simplejson'],
},
}
diff --git a/mail_sendgrid/controllers/json_request.py b/mail_sendgrid/controllers/json_request.py
--- a/mail_sendgrid/controllers/json_request.py
+++ b/mail_sendgrid/controllers/json_request.py
@@ -1,12 +1,18 @@
# -*- coding: utf-8 -*-
# Copyright 2016-2017 Compassion CH (http://www.compassion.ch)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
-import simplejson
-
+import logging
from odoo.http import JsonRequest, Root, Response
# Monkeypatch type of request rooter to use RESTJsonRequest
old_get_request = Root.get_request
+_logger = logging.getLogger(__name__)
+
+try:
+ import simplejson
+except ImportError:
+ _logger.error("Please install simplejson tu use mail_sendgrid module")
+ _logger.debug("ImportError details:", exc_info=True)
def get_request(self, httprequest):
| {"golden_diff": "diff --git a/mail_sendgrid/__manifest__.py b/mail_sendgrid/__manifest__.py\n--- a/mail_sendgrid/__manifest__.py\n+++ b/mail_sendgrid/__manifest__.py\n@@ -1,9 +1,9 @@\n # -*- coding: utf-8 -*-\n-# Copyright 2015-2017 Compassion CH (http://www.compassion.ch)\n+# Copyright 2015-2018 Compassion CH (http://www.compassion.ch)\n # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).\n {\n 'name': 'SendGrid',\n- 'version': '10.0.1.0.0',\n+ 'version': '10.0.1.0.1',\n 'category': 'Social Network',\n 'author': 'Compassion CH, Odoo Community Association (OCA)',\n 'license': 'AGPL-3',\n@@ -20,6 +20,6 @@\n 'installable': True,\n 'auto_install': False,\n 'external_dependencies': {\n- 'python': ['sendgrid'],\n+ 'python': ['sendgrid', 'simplejson'],\n },\n }\ndiff --git a/mail_sendgrid/controllers/json_request.py b/mail_sendgrid/controllers/json_request.py\n--- a/mail_sendgrid/controllers/json_request.py\n+++ b/mail_sendgrid/controllers/json_request.py\n@@ -1,12 +1,18 @@\n # -*- coding: utf-8 -*-\n # Copyright 2016-2017 Compassion CH (http://www.compassion.ch)\n # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).\n-import simplejson\n-\n+import logging\n from odoo.http import JsonRequest, Root, Response\n \n # Monkeypatch type of request rooter to use RESTJsonRequest\n old_get_request = Root.get_request\n+_logger = logging.getLogger(__name__)\n+\n+try:\n+ import simplejson\n+except ImportError:\n+ _logger.error(\"Please install simplejson tu use mail_sendgrid module\")\n+ _logger.debug(\"ImportError details:\", exc_info=True)\n \n \n def get_request(self, httprequest):\n", "issue": "[10.0] mail_sendrid, Error: No module named simplejson\n> File \".../addons/oca-social/mail_sendgrid/controllers/json_request.py\", line 4, in <module>\r\n> import simplejson\r\n> ImportError: No module named simplejson\r\n\r\nI put `simplejson` into file requirements.txt and it solved this issue.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# Copyright 2016-2017 Compassion CH (http://www.compassion.ch)\n# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).\nimport simplejson\n\nfrom odoo.http import JsonRequest, Root, Response\n\n# Monkeypatch type of request rooter to use RESTJsonRequest\nold_get_request = Root.get_request\n\n\ndef get_request(self, httprequest):\n if (httprequest.mimetype == \"application/json\" and\n httprequest.environ['PATH_INFO'].startswith('/mail')):\n return RESTJsonRequest(httprequest)\n return old_get_request(self, httprequest)\n\n\nRoot.get_request = get_request\n\n\nclass RESTJsonRequest(JsonRequest):\n \"\"\" Special RestJson Handler to enable receiving lists in JSON\n body\n \"\"\"\n def __init__(self, *args):\n try:\n super(RESTJsonRequest, self).__init__(*args)\n except AttributeError:\n # The JSON may contain a list\n self.params = dict()\n self.context = dict(self.session.context)\n\n def _json_response(self, result=None, error=None):\n response = {}\n if error is not None:\n response['error'] = error\n if result is not None:\n response['result'] = result\n\n mime = 'application/json'\n body = simplejson.dumps(response)\n\n return Response(\n body, headers=[('Content-Type', mime),\n ('Content-Length', len(body))])\n", "path": "mail_sendgrid/controllers/json_request.py"}, {"content": "# -*- coding: utf-8 -*-\n# Copyright 2015-2017 Compassion CH (http://www.compassion.ch)\n# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).\n{\n 'name': 'SendGrid',\n 'version': '10.0.1.0.0',\n 'category': 'Social Network',\n 'author': 'Compassion CH, Odoo Community Association (OCA)',\n 'license': 'AGPL-3',\n 'website': 'https://github.com/OCA/social',\n 'depends': ['mail_tracking'],\n 'data': [\n 'security/ir.model.access.csv',\n 'views/sendgrid_email_view.xml',\n 'views/sendgrid_template_view.xml',\n 'views/mail_compose_message_view.xml',\n 'views/email_template_view.xml',\n ],\n 'demo': [],\n 'installable': True,\n 'auto_install': False,\n 'external_dependencies': {\n 'python': ['sendgrid'],\n },\n}\n", "path": "mail_sendgrid/__manifest__.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n# Copyright 2016-2017 Compassion CH (http://www.compassion.ch)\n# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).\nimport logging\nfrom odoo.http import JsonRequest, Root, Response\n\n# Monkeypatch type of request rooter to use RESTJsonRequest\nold_get_request = Root.get_request\n_logger = logging.getLogger(__name__)\n\ntry:\n import simplejson\nexcept ImportError:\n _logger.error(\"Please install simplejson tu use mail_sendgrid module\")\n _logger.debug(\"ImportError details:\", exc_info=True)\n\n\ndef get_request(self, httprequest):\n if (httprequest.mimetype == \"application/json\" and\n httprequest.environ['PATH_INFO'].startswith('/mail')):\n return RESTJsonRequest(httprequest)\n return old_get_request(self, httprequest)\n\n\nRoot.get_request = get_request\n\n\nclass RESTJsonRequest(JsonRequest):\n \"\"\" Special RestJson Handler to enable receiving lists in JSON\n body\n \"\"\"\n def __init__(self, *args):\n try:\n super(RESTJsonRequest, self).__init__(*args)\n except AttributeError:\n # The JSON may contain a list\n self.params = dict()\n self.context = dict(self.session.context)\n\n def _json_response(self, result=None, error=None):\n response = {}\n if error is not None:\n response['error'] = error\n if result is not None:\n response['result'] = result\n\n mime = 'application/json'\n body = simplejson.dumps(response)\n\n return Response(\n body, headers=[('Content-Type', mime),\n ('Content-Length', len(body))])\n", "path": "mail_sendgrid/controllers/json_request.py"}, {"content": "# -*- coding: utf-8 -*-\n# Copyright 2015-2018 Compassion CH (http://www.compassion.ch)\n# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).\n{\n 'name': 'SendGrid',\n 'version': '10.0.1.0.1',\n 'category': 'Social Network',\n 'author': 'Compassion CH, Odoo Community Association (OCA)',\n 'license': 'AGPL-3',\n 'website': 'https://github.com/OCA/social',\n 'depends': ['mail_tracking'],\n 'data': [\n 'security/ir.model.access.csv',\n 'views/sendgrid_email_view.xml',\n 'views/sendgrid_template_view.xml',\n 'views/mail_compose_message_view.xml',\n 'views/email_template_view.xml',\n ],\n 'demo': [],\n 'installable': True,\n 'auto_install': False,\n 'external_dependencies': {\n 'python': ['sendgrid', 'simplejson'],\n },\n}\n", "path": "mail_sendgrid/__manifest__.py"}]} | 1,026 | 471 |
gh_patches_debug_27142 | rasdani/github-patches | git_diff | Netflix__lemur-302 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Chain Certificate is not exporting
Hi Team,
While extracting .p12 formatted file (which was exported from Lemur) we cannot find the Chain file on the same. Could you please let us know if we need to perform any additional step to download the .p12 along with the chain.
Thanks,
Akash John
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lemur/plugins/lemur_openssl/plugin.py`
Content:
```
1 """
2 .. module: lemur.plugins.lemur_openssl.plugin
3 :platform: Unix
4 :copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
5 :license: Apache, see LICENSE for more details.
6
7 .. moduleauthor:: Kevin Glisson <[email protected]>
8 """
9 import subprocess
10
11 from flask import current_app
12
13 from lemur.utils import mktempfile, mktemppath
14 from lemur.plugins.bases import ExportPlugin
15 from lemur.plugins import lemur_openssl as openssl
16 from lemur.common.utils import get_psuedo_random_string
17
18
19 def run_process(command):
20 """
21 Runs a given command with pOpen and wraps some
22 error handling around it.
23 :param command:
24 :return:
25 """
26 p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
27 current_app.logger.debug(command)
28 stdout, stderr = p.communicate()
29
30 if p.returncode != 0:
31 current_app.logger.debug(" ".join(command))
32 current_app.logger.error(stderr)
33 raise Exception(stderr)
34
35
36 def create_pkcs12(cert, p12_tmp, key, alias, passphrase):
37 """
38 Creates a pkcs12 formated file.
39 :param cert:
40 :param jks_tmp:
41 :param key:
42 :param alias:
43 :param passphrase:
44 """
45 with mktempfile() as key_tmp:
46 with open(key_tmp, 'w') as f:
47 f.write(key)
48
49 # Create PKCS12 keystore from private key and public certificate
50 with mktempfile() as cert_tmp:
51 with open(cert_tmp, 'w') as f:
52 f.write(cert)
53
54 run_process([
55 "openssl",
56 "pkcs12",
57 "-export",
58 "-name", alias,
59 "-in", cert_tmp,
60 "-inkey", key_tmp,
61 "-out", p12_tmp,
62 "-password", "pass:{}".format(passphrase)
63 ])
64
65
66 class OpenSSLExportPlugin(ExportPlugin):
67 title = 'OpenSSL'
68 slug = 'openssl-export'
69 description = 'Is a loose interface to openssl and support various formats'
70 version = openssl.VERSION
71
72 author = 'Kevin Glisson'
73 author_url = 'https://github.com/netflix/lemur'
74
75 options = [
76 {
77 'name': 'type',
78 'type': 'select',
79 'required': True,
80 'available': ['PKCS12 (.p12)'],
81 'helpMessage': 'Choose the format you wish to export',
82 },
83 {
84 'name': 'passphrase',
85 'type': 'str',
86 'required': False,
87 'helpMessage': 'If no passphrase is given one will be generated for you, we highly recommend this. Minimum length is 8.',
88 'validation': ''
89 },
90 {
91 'name': 'alias',
92 'type': 'str',
93 'required': False,
94 'helpMessage': 'Enter the alias you wish to use for the keystore.',
95 }
96 ]
97
98 def export(self, body, chain, key, options, **kwargs):
99 """
100 Generates a Java Keystore or Truststore
101
102 :param key:
103 :param chain:
104 :param body:
105 :param options:
106 :param kwargs:
107 """
108 if self.get_option('passphrase', options):
109 passphrase = self.get_option('passphrase', options)
110 else:
111 passphrase = get_psuedo_random_string()
112
113 if self.get_option('alias', options):
114 alias = self.get_option('alias', options)
115 else:
116 alias = "blah"
117
118 type = self.get_option('type', options)
119
120 with mktemppath() as output_tmp:
121 if type == 'PKCS12 (.p12)':
122 create_pkcs12(body, output_tmp, key, alias, passphrase)
123 extension = "p12"
124 else:
125 raise Exception("Unable to export, unsupported type: {0}".format(type))
126
127 with open(output_tmp, 'rb') as f:
128 raw = f.read()
129
130 return extension, passphrase, raw
131
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/lemur/plugins/lemur_openssl/plugin.py b/lemur/plugins/lemur_openssl/plugin.py
--- a/lemur/plugins/lemur_openssl/plugin.py
+++ b/lemur/plugins/lemur_openssl/plugin.py
@@ -33,11 +33,12 @@
raise Exception(stderr)
-def create_pkcs12(cert, p12_tmp, key, alias, passphrase):
+def create_pkcs12(cert, chain, p12_tmp, key, alias, passphrase):
"""
Creates a pkcs12 formated file.
:param cert:
- :param jks_tmp:
+ :param chain:
+ :param p12_tmp:
:param key:
:param alias:
:param passphrase:
@@ -49,7 +50,7 @@
# Create PKCS12 keystore from private key and public certificate
with mktempfile() as cert_tmp:
with open(cert_tmp, 'w') as f:
- f.write(cert)
+ f.writelines([cert + "\n", chain + "\n"])
run_process([
"openssl",
@@ -119,7 +120,7 @@
with mktemppath() as output_tmp:
if type == 'PKCS12 (.p12)':
- create_pkcs12(body, output_tmp, key, alias, passphrase)
+ create_pkcs12(body, chain, output_tmp, key, alias, passphrase)
extension = "p12"
else:
raise Exception("Unable to export, unsupported type: {0}".format(type))
| {"golden_diff": "diff --git a/lemur/plugins/lemur_openssl/plugin.py b/lemur/plugins/lemur_openssl/plugin.py\n--- a/lemur/plugins/lemur_openssl/plugin.py\n+++ b/lemur/plugins/lemur_openssl/plugin.py\n@@ -33,11 +33,12 @@\n raise Exception(stderr)\n \n \n-def create_pkcs12(cert, p12_tmp, key, alias, passphrase):\n+def create_pkcs12(cert, chain, p12_tmp, key, alias, passphrase):\n \"\"\"\n Creates a pkcs12 formated file.\n :param cert:\n- :param jks_tmp:\n+ :param chain:\n+ :param p12_tmp:\n :param key:\n :param alias:\n :param passphrase:\n@@ -49,7 +50,7 @@\n # Create PKCS12 keystore from private key and public certificate\n with mktempfile() as cert_tmp:\n with open(cert_tmp, 'w') as f:\n- f.write(cert)\n+ f.writelines([cert + \"\\n\", chain + \"\\n\"])\n \n run_process([\n \"openssl\",\n@@ -119,7 +120,7 @@\n \n with mktemppath() as output_tmp:\n if type == 'PKCS12 (.p12)':\n- create_pkcs12(body, output_tmp, key, alias, passphrase)\n+ create_pkcs12(body, chain, output_tmp, key, alias, passphrase)\n extension = \"p12\"\n else:\n raise Exception(\"Unable to export, unsupported type: {0}\".format(type))\n", "issue": "Chain Certificate is not exporting\nHi Team,\n\nWhile extracting .p12 formatted file (which was exported from Lemur) we cannot find the Chain file on the same. Could you please let us know if we need to perform any additional step to download the .p12 along with the chain. \n\nThanks,\nAkash John\n\n", "before_files": [{"content": "\"\"\"\n.. module: lemur.plugins.lemur_openssl.plugin\n :platform: Unix\n :copyright: (c) 2015 by Netflix Inc., see AUTHORS for more\n :license: Apache, see LICENSE for more details.\n\n.. moduleauthor:: Kevin Glisson <[email protected]>\n\"\"\"\nimport subprocess\n\nfrom flask import current_app\n\nfrom lemur.utils import mktempfile, mktemppath\nfrom lemur.plugins.bases import ExportPlugin\nfrom lemur.plugins import lemur_openssl as openssl\nfrom lemur.common.utils import get_psuedo_random_string\n\n\ndef run_process(command):\n \"\"\"\n Runs a given command with pOpen and wraps some\n error handling around it.\n :param command:\n :return:\n \"\"\"\n p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n current_app.logger.debug(command)\n stdout, stderr = p.communicate()\n\n if p.returncode != 0:\n current_app.logger.debug(\" \".join(command))\n current_app.logger.error(stderr)\n raise Exception(stderr)\n\n\ndef create_pkcs12(cert, p12_tmp, key, alias, passphrase):\n \"\"\"\n Creates a pkcs12 formated file.\n :param cert:\n :param jks_tmp:\n :param key:\n :param alias:\n :param passphrase:\n \"\"\"\n with mktempfile() as key_tmp:\n with open(key_tmp, 'w') as f:\n f.write(key)\n\n # Create PKCS12 keystore from private key and public certificate\n with mktempfile() as cert_tmp:\n with open(cert_tmp, 'w') as f:\n f.write(cert)\n\n run_process([\n \"openssl\",\n \"pkcs12\",\n \"-export\",\n \"-name\", alias,\n \"-in\", cert_tmp,\n \"-inkey\", key_tmp,\n \"-out\", p12_tmp,\n \"-password\", \"pass:{}\".format(passphrase)\n ])\n\n\nclass OpenSSLExportPlugin(ExportPlugin):\n title = 'OpenSSL'\n slug = 'openssl-export'\n description = 'Is a loose interface to openssl and support various formats'\n version = openssl.VERSION\n\n author = 'Kevin Glisson'\n author_url = 'https://github.com/netflix/lemur'\n\n options = [\n {\n 'name': 'type',\n 'type': 'select',\n 'required': True,\n 'available': ['PKCS12 (.p12)'],\n 'helpMessage': 'Choose the format you wish to export',\n },\n {\n 'name': 'passphrase',\n 'type': 'str',\n 'required': False,\n 'helpMessage': 'If no passphrase is given one will be generated for you, we highly recommend this. Minimum length is 8.',\n 'validation': ''\n },\n {\n 'name': 'alias',\n 'type': 'str',\n 'required': False,\n 'helpMessage': 'Enter the alias you wish to use for the keystore.',\n }\n ]\n\n def export(self, body, chain, key, options, **kwargs):\n \"\"\"\n Generates a Java Keystore or Truststore\n\n :param key:\n :param chain:\n :param body:\n :param options:\n :param kwargs:\n \"\"\"\n if self.get_option('passphrase', options):\n passphrase = self.get_option('passphrase', options)\n else:\n passphrase = get_psuedo_random_string()\n\n if self.get_option('alias', options):\n alias = self.get_option('alias', options)\n else:\n alias = \"blah\"\n\n type = self.get_option('type', options)\n\n with mktemppath() as output_tmp:\n if type == 'PKCS12 (.p12)':\n create_pkcs12(body, output_tmp, key, alias, passphrase)\n extension = \"p12\"\n else:\n raise Exception(\"Unable to export, unsupported type: {0}\".format(type))\n\n with open(output_tmp, 'rb') as f:\n raw = f.read()\n\n return extension, passphrase, raw\n", "path": "lemur/plugins/lemur_openssl/plugin.py"}], "after_files": [{"content": "\"\"\"\n.. module: lemur.plugins.lemur_openssl.plugin\n :platform: Unix\n :copyright: (c) 2015 by Netflix Inc., see AUTHORS for more\n :license: Apache, see LICENSE for more details.\n\n.. moduleauthor:: Kevin Glisson <[email protected]>\n\"\"\"\nimport subprocess\n\nfrom flask import current_app\n\nfrom lemur.utils import mktempfile, mktemppath\nfrom lemur.plugins.bases import ExportPlugin\nfrom lemur.plugins import lemur_openssl as openssl\nfrom lemur.common.utils import get_psuedo_random_string\n\n\ndef run_process(command):\n \"\"\"\n Runs a given command with pOpen and wraps some\n error handling around it.\n :param command:\n :return:\n \"\"\"\n p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n current_app.logger.debug(command)\n stdout, stderr = p.communicate()\n\n if p.returncode != 0:\n current_app.logger.debug(\" \".join(command))\n current_app.logger.error(stderr)\n raise Exception(stderr)\n\n\ndef create_pkcs12(cert, chain, p12_tmp, key, alias, passphrase):\n \"\"\"\n Creates a pkcs12 formated file.\n :param cert:\n :param chain:\n :param p12_tmp:\n :param key:\n :param alias:\n :param passphrase:\n \"\"\"\n with mktempfile() as key_tmp:\n with open(key_tmp, 'w') as f:\n f.write(key)\n\n # Create PKCS12 keystore from private key and public certificate\n with mktempfile() as cert_tmp:\n with open(cert_tmp, 'w') as f:\n f.writelines([cert + \"\\n\", chain + \"\\n\"])\n\n run_process([\n \"openssl\",\n \"pkcs12\",\n \"-export\",\n \"-name\", alias,\n \"-in\", cert_tmp,\n \"-inkey\", key_tmp,\n \"-out\", p12_tmp,\n \"-password\", \"pass:{}\".format(passphrase)\n ])\n\n\nclass OpenSSLExportPlugin(ExportPlugin):\n title = 'OpenSSL'\n slug = 'openssl-export'\n description = 'Is a loose interface to openssl and support various formats'\n version = openssl.VERSION\n\n author = 'Kevin Glisson'\n author_url = 'https://github.com/netflix/lemur'\n\n options = [\n {\n 'name': 'type',\n 'type': 'select',\n 'required': True,\n 'available': ['PKCS12 (.p12)'],\n 'helpMessage': 'Choose the format you wish to export',\n },\n {\n 'name': 'passphrase',\n 'type': 'str',\n 'required': False,\n 'helpMessage': 'If no passphrase is given one will be generated for you, we highly recommend this. Minimum length is 8.',\n 'validation': ''\n },\n {\n 'name': 'alias',\n 'type': 'str',\n 'required': False,\n 'helpMessage': 'Enter the alias you wish to use for the keystore.',\n }\n ]\n\n def export(self, body, chain, key, options, **kwargs):\n \"\"\"\n Generates a Java Keystore or Truststore\n\n :param key:\n :param chain:\n :param body:\n :param options:\n :param kwargs:\n \"\"\"\n if self.get_option('passphrase', options):\n passphrase = self.get_option('passphrase', options)\n else:\n passphrase = get_psuedo_random_string()\n\n if self.get_option('alias', options):\n alias = self.get_option('alias', options)\n else:\n alias = \"blah\"\n\n type = self.get_option('type', options)\n\n with mktemppath() as output_tmp:\n if type == 'PKCS12 (.p12)':\n create_pkcs12(body, chain, output_tmp, key, alias, passphrase)\n extension = \"p12\"\n else:\n raise Exception(\"Unable to export, unsupported type: {0}\".format(type))\n\n with open(output_tmp, 'rb') as f:\n raw = f.read()\n\n return extension, passphrase, raw\n", "path": "lemur/plugins/lemur_openssl/plugin.py"}]} | 1,515 | 365 |
gh_patches_debug_21335 | rasdani/github-patches | git_diff | bridgecrewio__checkov-5189 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[CKV_AZURE_6] AKS API Server White Tests Failing with Correct Code
**Describe the issue**
[CKV_AZURE_6](https://github.com/bridgecrewio/checkov/tree/master/checkov/arm/checks/resource/AKSApiServerAuthorizedIpRanges.py)
This check should trigger when an API Server whitelist IP isn't found in the TF code.
**Examples**
Please share an example code sample (in the IaC of your choice) + the expected outcomes.
Sample (Pre 3.39.0 Provider Version):
```
resource "azurerm_kubernetes_cluster" "aks_k2" {
name = var.cluster_name
location = azurerm_resource_group.rg_aks.location
resource_group_name = azurerm_resource_group.rg_aks.name
sku_tier = var.sku_tier
dns_prefix = var.dns_name
api_server_authorized_ip_ranges = [my_ip_list]
}
```
Sample (Post 3.39.0):
```
resource "azurerm_kubernetes_cluster" "aks_k2" {
name = var.cluster_name
location = azurerm_resource_group.rg_aks.location
resource_group_name = azurerm_resource_group.rg_aks.name
sku_tier = var.sku_tier
dns_prefix = var.dns_name
api_server_access_profile {
authorized_ip_ranges = [my_ip_list]
}
}
```
Both have expected outcome of passing this test, as we list 4 IP's for whitelisting.
We are failing tests

**Version (please complete the following information):**
- Checkov Version: checkov-2.3.272
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `checkov/terraform/checks/resource/azure/AKSApiServerAuthorizedIpRanges.py`
Content:
```
1 from __future__ import annotations
2
3 from typing import Any
4
5 from checkov.common.models.consts import ANY_VALUE
6 from checkov.common.models.enums import CheckCategories, CheckResult
7 from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck
8
9
10 class AKSApiServerAuthorizedIpRanges(BaseResourceValueCheck):
11 def __init__(self) -> None:
12 name = "Ensure AKS has an API Server Authorized IP Ranges enabled"
13 id = "CKV_AZURE_6"
14 supported_resources = ("azurerm_kubernetes_cluster",)
15 categories = (CheckCategories.KUBERNETES,)
16 super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
17
18 def get_inspected_key(self) -> str:
19 return "api_server_authorized_ip_ranges/[0]"
20
21 def get_expected_value(self) -> Any:
22 return ANY_VALUE
23
24 def scan_resource_conf(self, conf: dict[str, list[Any]]) -> CheckResult:
25 # can't be set for private cluster
26 private_cluster_enabled = conf.get("private_cluster_enabled", [False])[0]
27 if private_cluster_enabled:
28 return CheckResult.PASSED
29 return super().scan_resource_conf(conf)
30
31
32 check = AKSApiServerAuthorizedIpRanges()
33
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/checkov/terraform/checks/resource/azure/AKSApiServerAuthorizedIpRanges.py b/checkov/terraform/checks/resource/azure/AKSApiServerAuthorizedIpRanges.py
--- a/checkov/terraform/checks/resource/azure/AKSApiServerAuthorizedIpRanges.py
+++ b/checkov/terraform/checks/resource/azure/AKSApiServerAuthorizedIpRanges.py
@@ -16,7 +16,7 @@
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
def get_inspected_key(self) -> str:
- return "api_server_authorized_ip_ranges/[0]"
+ return "api_server_access_profile/[0]/authorized_ip_ranges/[0]"
def get_expected_value(self) -> Any:
return ANY_VALUE
@@ -26,6 +26,12 @@
private_cluster_enabled = conf.get("private_cluster_enabled", [False])[0]
if private_cluster_enabled:
return CheckResult.PASSED
+
+ # provider version <=3.38.0
+ api_server = conf.get("api_server_authorized_ip_ranges")
+ if api_server and isinstance(api_server, list) and api_server[0]:
+ return CheckResult.PASSED
+
return super().scan_resource_conf(conf)
| {"golden_diff": "diff --git a/checkov/terraform/checks/resource/azure/AKSApiServerAuthorizedIpRanges.py b/checkov/terraform/checks/resource/azure/AKSApiServerAuthorizedIpRanges.py\n--- a/checkov/terraform/checks/resource/azure/AKSApiServerAuthorizedIpRanges.py\n+++ b/checkov/terraform/checks/resource/azure/AKSApiServerAuthorizedIpRanges.py\n@@ -16,7 +16,7 @@\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n \n def get_inspected_key(self) -> str:\n- return \"api_server_authorized_ip_ranges/[0]\"\n+ return \"api_server_access_profile/[0]/authorized_ip_ranges/[0]\"\n \n def get_expected_value(self) -> Any:\n return ANY_VALUE\n@@ -26,6 +26,12 @@\n private_cluster_enabled = conf.get(\"private_cluster_enabled\", [False])[0]\n if private_cluster_enabled:\n return CheckResult.PASSED\n+\n+ # provider version <=3.38.0\n+ api_server = conf.get(\"api_server_authorized_ip_ranges\")\n+ if api_server and isinstance(api_server, list) and api_server[0]:\n+ return CheckResult.PASSED\n+\n return super().scan_resource_conf(conf)\n", "issue": "[CKV_AZURE_6] AKS API Server White Tests Failing with Correct Code\n**Describe the issue**\r\n[CKV_AZURE_6](https://github.com/bridgecrewio/checkov/tree/master/checkov/arm/checks/resource/AKSApiServerAuthorizedIpRanges.py)\r\n\r\nThis check should trigger when an API Server whitelist IP isn't found in the TF code. \r\n\r\n**Examples**\r\nPlease share an example code sample (in the IaC of your choice) + the expected outcomes.\r\n\r\nSample (Pre 3.39.0 Provider Version):\r\n```\r\nresource \"azurerm_kubernetes_cluster\" \"aks_k2\" {\r\n name = var.cluster_name\r\n location = azurerm_resource_group.rg_aks.location\r\n resource_group_name = azurerm_resource_group.rg_aks.name\r\n sku_tier = var.sku_tier\r\n dns_prefix = var.dns_name\r\n api_server_authorized_ip_ranges = [my_ip_list]\r\n}\r\n```\r\nSample (Post 3.39.0):\r\n```\r\nresource \"azurerm_kubernetes_cluster\" \"aks_k2\" {\r\n name = var.cluster_name\r\n location = azurerm_resource_group.rg_aks.location\r\n resource_group_name = azurerm_resource_group.rg_aks.name\r\n sku_tier = var.sku_tier\r\n dns_prefix = var.dns_name\r\n api_server_access_profile {\r\n authorized_ip_ranges = [my_ip_list]\r\n }\r\n}\r\n```\r\n\r\nBoth have expected outcome of passing this test, as we list 4 IP's for whitelisting.\r\nWe are failing tests\r\n\r\n\r\n**Version (please complete the following information):**\r\n - Checkov Version: checkov-2.3.272\r\n\n", "before_files": [{"content": "from __future__ import annotations\n\nfrom typing import Any\n\nfrom checkov.common.models.consts import ANY_VALUE\nfrom checkov.common.models.enums import CheckCategories, CheckResult\nfrom checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck\n\n\nclass AKSApiServerAuthorizedIpRanges(BaseResourceValueCheck):\n def __init__(self) -> None:\n name = \"Ensure AKS has an API Server Authorized IP Ranges enabled\"\n id = \"CKV_AZURE_6\"\n supported_resources = (\"azurerm_kubernetes_cluster\",)\n categories = (CheckCategories.KUBERNETES,)\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def get_inspected_key(self) -> str:\n return \"api_server_authorized_ip_ranges/[0]\"\n\n def get_expected_value(self) -> Any:\n return ANY_VALUE\n\n def scan_resource_conf(self, conf: dict[str, list[Any]]) -> CheckResult:\n # can't be set for private cluster\n private_cluster_enabled = conf.get(\"private_cluster_enabled\", [False])[0]\n if private_cluster_enabled:\n return CheckResult.PASSED\n return super().scan_resource_conf(conf)\n\n\ncheck = AKSApiServerAuthorizedIpRanges()\n", "path": "checkov/terraform/checks/resource/azure/AKSApiServerAuthorizedIpRanges.py"}], "after_files": [{"content": "from __future__ import annotations\n\nfrom typing import Any\n\nfrom checkov.common.models.consts import ANY_VALUE\nfrom checkov.common.models.enums import CheckCategories, CheckResult\nfrom checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck\n\n\nclass AKSApiServerAuthorizedIpRanges(BaseResourceValueCheck):\n def __init__(self) -> None:\n name = \"Ensure AKS has an API Server Authorized IP Ranges enabled\"\n id = \"CKV_AZURE_6\"\n supported_resources = (\"azurerm_kubernetes_cluster\",)\n categories = (CheckCategories.KUBERNETES,)\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def get_inspected_key(self) -> str:\n return \"api_server_access_profile/[0]/authorized_ip_ranges/[0]\"\n\n def get_expected_value(self) -> Any:\n return ANY_VALUE\n\n def scan_resource_conf(self, conf: dict[str, list[Any]]) -> CheckResult:\n # can't be set for private cluster\n private_cluster_enabled = conf.get(\"private_cluster_enabled\", [False])[0]\n if private_cluster_enabled:\n return CheckResult.PASSED\n\n # provider version <=3.38.0\n api_server = conf.get(\"api_server_authorized_ip_ranges\")\n if api_server and isinstance(api_server, list) and api_server[0]:\n return CheckResult.PASSED\n\n return super().scan_resource_conf(conf)\n\n\ncheck = AKSApiServerAuthorizedIpRanges()\n", "path": "checkov/terraform/checks/resource/azure/AKSApiServerAuthorizedIpRanges.py"}]} | 1,029 | 281 |
gh_patches_debug_48523 | rasdani/github-patches | git_diff | meltano__meltano-6488 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Support Node v16
Currently building the Meltano UI with Node 16 results in a crash while building libsass. [That library is deprecated](https://sass-lang.com/blog/libsass-is-deprecated), so we should switch to using Dart-sass instead.
CC @alexmarple
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scripts/alembic_freeze.py`
Content:
```
1 #!/usr/bin/env python3
2
3 """Script to freeze the Meltano database - executed by the Makefile."""
4
5 from __future__ import annotations
6
7 from alembic.script import ScriptDirectory
8
9 from meltano.migrations import LOCK_PATH, MIGRATION_DIR
10
11 scripts = ScriptDirectory(str(MIGRATION_DIR))
12
13 with LOCK_PATH.open("w") as lock:
14 HEAD = scripts.get_current_head()
15 lock.write(HEAD)
16
17 print(f"Meltano database frozen at {HEAD}.")
18
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/scripts/alembic_freeze.py b/scripts/alembic_freeze.py
--- a/scripts/alembic_freeze.py
+++ b/scripts/alembic_freeze.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
-"""Script to freeze the Meltano database - executed by the Makefile."""
+"""Script to freeze the Meltano database - executed by GitHub CI."""
from __future__ import annotations
| {"golden_diff": "diff --git a/scripts/alembic_freeze.py b/scripts/alembic_freeze.py\n--- a/scripts/alembic_freeze.py\n+++ b/scripts/alembic_freeze.py\n@@ -1,6 +1,6 @@\n #!/usr/bin/env python3\n \n-\"\"\"Script to freeze the Meltano database - executed by the Makefile.\"\"\"\n+\"\"\"Script to freeze the Meltano database - executed by GitHub CI.\"\"\"\n \n from __future__ import annotations\n", "issue": "Support Node v16\nCurrently building the Meltano UI with Node 16 results in a crash while building libsass. [That library is deprecated](https://sass-lang.com/blog/libsass-is-deprecated), so we should switch to using Dart-sass instead.\r\n\r\nCC @alexmarple \n", "before_files": [{"content": "#!/usr/bin/env python3\n\n\"\"\"Script to freeze the Meltano database - executed by the Makefile.\"\"\"\n\nfrom __future__ import annotations\n\nfrom alembic.script import ScriptDirectory\n\nfrom meltano.migrations import LOCK_PATH, MIGRATION_DIR\n\nscripts = ScriptDirectory(str(MIGRATION_DIR))\n\nwith LOCK_PATH.open(\"w\") as lock:\n HEAD = scripts.get_current_head()\n lock.write(HEAD)\n\nprint(f\"Meltano database frozen at {HEAD}.\")\n", "path": "scripts/alembic_freeze.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\n\"\"\"Script to freeze the Meltano database - executed by GitHub CI.\"\"\"\n\nfrom __future__ import annotations\n\nfrom alembic.script import ScriptDirectory\n\nfrom meltano.migrations import LOCK_PATH, MIGRATION_DIR\n\nscripts = ScriptDirectory(str(MIGRATION_DIR))\n\nwith LOCK_PATH.open(\"w\") as lock:\n HEAD = scripts.get_current_head()\n lock.write(HEAD)\n\nprint(f\"Meltano database frozen at {HEAD}.\")\n", "path": "scripts/alembic_freeze.py"}]} | 458 | 103 |
gh_patches_debug_2696 | rasdani/github-patches | git_diff | dbt-labs__dbt-core-7080 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[CT-2225] [Bug] Suddenly getting ModuleNotFoundError: No module named 'pytz'
### Is this a new bug in dbt-core?
- [X] I believe this is a new bug in dbt-core
- [X] I have searched the existing issues, and I could not find an existing issue for this bug
### Current Behavior
I am installing dbt-bigquery with meltano (which installs it in a isolated *venv*).
Today when invoking `dbt deps` using `meltano invoke dbt-bigquery:deps` I am getting a stacktrace with
ModuleNotFoundError: No module named 'pytz'
### Expected Behavior
`pytz` should be found. I have noted that it is not included in the requirements. So while it's strange that it suddenly started failing, maybe it was more of an accident that it ever worked in the first place?
### Steps To Reproduce
With versions specified as
dbt-core~=1.3.0
dbt-bigquery~=1.3.0
invoking `dbt deps` should not throw a ModuleNotFoundError
### Relevant log output
```shell
Traceback (most recent call last):
File "/workspaces/elt/.meltano/transformers/dbt-bigquery/venv/bin/dbt", line 5, in <module>
from dbt.main import main
File "/workspaces/elt/.meltano/transformers/dbt-bigquery/venv/lib/python3.9/site-packages/dbt/main.py", line 24, in <module>
import dbt.task.build as build_task
File "/workspaces/elt/.meltano/transformers/dbt-bigquery/venv/lib/python3.9/site-packages/dbt/task/build.py", line 1, in <module>
from .run import RunTask, ModelRunner as run_model_runner
File "/workspaces/elt/.meltano/transformers/dbt-bigquery/venv/lib/python3.9/site-packages/dbt/task/run.py", line 8, in <module>
from .compile import CompileRunner, CompileTask
File "/workspaces/elt/.meltano/transformers/dbt-bigquery/venv/lib/python3.9/site-packages/dbt/task/compile.py", line 4, in <module>
from .runnable import GraphRunnableTask
File "/workspaces/elt/.meltano/transformers/dbt-bigquery/venv/lib/python3.9/site-packages/dbt/task/runnable.py", line 11, in <module>
from .printer import (
File "/workspaces/elt/.meltano/transformers/dbt-bigquery/venv/lib/python3.9/site-packages/dbt/task/printer.py", line 22, in <module>
from dbt.tracking import InvocationProcessor
File "/workspaces/elt/.meltano/transformers/dbt-bigquery/venv/lib/python3.9/site-packages/dbt/tracking.py", line 25, in <module>
import pytz
ModuleNotFoundError: No module named 'pytz'
```
### Environment
```markdown
- OS: Linux (fresh docker container inside virtual environment)
- Python: 3.9
- dbt: 1.3.1 (~=1.3.0)
```
### Which database adapter are you using with dbt?
other (mention it in "Additional Context")
### Additional Context
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `core/setup.py`
Content:
```
1 #!/usr/bin/env python
2 import os
3 import sys
4
5 if sys.version_info < (3, 7, 2):
6 print("Error: dbt does not support this version of Python.")
7 print("Please upgrade to Python 3.7.2 or higher.")
8 sys.exit(1)
9
10
11 from setuptools import setup
12
13 try:
14 from setuptools import find_namespace_packages
15 except ImportError:
16 # the user has a downlevel version of setuptools.
17 print("Error: dbt requires setuptools v40.1.0 or higher.")
18 print('Please upgrade setuptools with "pip install --upgrade setuptools" ' "and try again")
19 sys.exit(1)
20
21
22 this_directory = os.path.abspath(os.path.dirname(__file__))
23 with open(os.path.join(this_directory, "README.md")) as f:
24 long_description = f.read()
25
26
27 package_name = "dbt-core"
28 package_version = "1.2.4"
29 description = """With dbt, data analysts and engineers can build analytics \
30 the way engineers build applications."""
31
32
33 setup(
34 name=package_name,
35 version=package_version,
36 description=description,
37 long_description=long_description,
38 long_description_content_type="text/markdown",
39 author="dbt Labs",
40 author_email="[email protected]",
41 url="https://github.com/dbt-labs/dbt-core",
42 packages=find_namespace_packages(include=["dbt", "dbt.*"]),
43 include_package_data=True,
44 test_suite="test",
45 entry_points={
46 "console_scripts": [
47 "dbt = dbt.main:main",
48 ],
49 },
50 install_requires=[
51 "Jinja2==2.11.3",
52 "MarkupSafe>=0.23,<2.1",
53 "agate>=1.6,<1.6.4",
54 "click>=7.0,<9",
55 "colorama>=0.3.9,<0.4.6",
56 "hologram>=0.0.14,<=0.0.15",
57 "isodate>=0.6,<0.7",
58 "logbook>=1.5,<1.6",
59 "mashumaro==2.9",
60 "minimal-snowplow-tracker==0.0.2",
61 "networkx>=2.3,<2.8.1;python_version<'3.8'",
62 "networkx>=2.3,<3;python_version>='3.8'",
63 "packaging>=20.9,<22.0",
64 "sqlparse>=0.2.3,<0.5",
65 "dbt-extractor~=0.4.1",
66 "typing-extensions>=3.7.4",
67 "werkzeug>=1,<3",
68 # the following are all to match snowflake-connector-python
69 "requests<3.0.0",
70 "idna>=2.5,<4",
71 "cffi>=1.9,<2.0.0",
72 ],
73 zip_safe=False,
74 classifiers=[
75 "Development Status :: 5 - Production/Stable",
76 "License :: OSI Approved :: Apache Software License",
77 "Operating System :: Microsoft :: Windows",
78 "Operating System :: MacOS :: MacOS X",
79 "Operating System :: POSIX :: Linux",
80 "Programming Language :: Python :: 3.7",
81 "Programming Language :: Python :: 3.8",
82 "Programming Language :: Python :: 3.9",
83 "Programming Language :: Python :: 3.10",
84 ],
85 python_requires=">=3.7.2",
86 )
87
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/core/setup.py b/core/setup.py
--- a/core/setup.py
+++ b/core/setup.py
@@ -65,6 +65,7 @@
"dbt-extractor~=0.4.1",
"typing-extensions>=3.7.4",
"werkzeug>=1,<3",
+ "pytz>=2015.7",
# the following are all to match snowflake-connector-python
"requests<3.0.0",
"idna>=2.5,<4",
| {"golden_diff": "diff --git a/core/setup.py b/core/setup.py\n--- a/core/setup.py\n+++ b/core/setup.py\n@@ -65,6 +65,7 @@\n \"dbt-extractor~=0.4.1\",\n \"typing-extensions>=3.7.4\",\n \"werkzeug>=1,<3\",\n+ \"pytz>=2015.7\",\n # the following are all to match snowflake-connector-python\n \"requests<3.0.0\",\n \"idna>=2.5,<4\",\n", "issue": "[CT-2225] [Bug] Suddenly getting ModuleNotFoundError: No module named 'pytz'\n### Is this a new bug in dbt-core?\n\n- [X] I believe this is a new bug in dbt-core\n- [X] I have searched the existing issues, and I could not find an existing issue for this bug\n\n### Current Behavior\n\nI am installing dbt-bigquery with meltano (which installs it in a isolated *venv*).\r\n\r\nToday when invoking `dbt deps` using `meltano invoke dbt-bigquery:deps` I am getting a stacktrace with \r\n\r\n ModuleNotFoundError: No module named 'pytz'\r\n\r\n\n\n### Expected Behavior\n\n`pytz` should be found. I have noted that it is not included in the requirements. So while it's strange that it suddenly started failing, maybe it was more of an accident that it ever worked in the first place?\n\n### Steps To Reproduce\n\nWith versions specified as\r\n\r\n dbt-core~=1.3.0\r\n dbt-bigquery~=1.3.0\r\n\r\ninvoking `dbt deps` should not throw a ModuleNotFoundError\n\n### Relevant log output\n\n```shell\nTraceback (most recent call last):\r\n File \"/workspaces/elt/.meltano/transformers/dbt-bigquery/venv/bin/dbt\", line 5, in <module>\r\n from dbt.main import main\r\n File \"/workspaces/elt/.meltano/transformers/dbt-bigquery/venv/lib/python3.9/site-packages/dbt/main.py\", line 24, in <module>\r\n import dbt.task.build as build_task\r\n File \"/workspaces/elt/.meltano/transformers/dbt-bigquery/venv/lib/python3.9/site-packages/dbt/task/build.py\", line 1, in <module>\r\n from .run import RunTask, ModelRunner as run_model_runner\r\n File \"/workspaces/elt/.meltano/transformers/dbt-bigquery/venv/lib/python3.9/site-packages/dbt/task/run.py\", line 8, in <module>\r\n from .compile import CompileRunner, CompileTask\r\n File \"/workspaces/elt/.meltano/transformers/dbt-bigquery/venv/lib/python3.9/site-packages/dbt/task/compile.py\", line 4, in <module>\r\n from .runnable import GraphRunnableTask\r\n File \"/workspaces/elt/.meltano/transformers/dbt-bigquery/venv/lib/python3.9/site-packages/dbt/task/runnable.py\", line 11, in <module>\r\n from .printer import (\r\n File \"/workspaces/elt/.meltano/transformers/dbt-bigquery/venv/lib/python3.9/site-packages/dbt/task/printer.py\", line 22, in <module>\r\n from dbt.tracking import InvocationProcessor\r\n File \"/workspaces/elt/.meltano/transformers/dbt-bigquery/venv/lib/python3.9/site-packages/dbt/tracking.py\", line 25, in <module>\r\n import pytz\r\nModuleNotFoundError: No module named 'pytz'\n```\n\n\n### Environment\n\n```markdown\n- OS: Linux (fresh docker container inside virtual environment)\r\n- Python: 3.9\r\n- dbt: 1.3.1 (~=1.3.0)\n```\n\n\n### Which database adapter are you using with dbt?\n\nother (mention it in \"Additional Context\")\n\n### Additional Context\n\n_No response_\n", "before_files": [{"content": "#!/usr/bin/env python\nimport os\nimport sys\n\nif sys.version_info < (3, 7, 2):\n print(\"Error: dbt does not support this version of Python.\")\n print(\"Please upgrade to Python 3.7.2 or higher.\")\n sys.exit(1)\n\n\nfrom setuptools import setup\n\ntry:\n from setuptools import find_namespace_packages\nexcept ImportError:\n # the user has a downlevel version of setuptools.\n print(\"Error: dbt requires setuptools v40.1.0 or higher.\")\n print('Please upgrade setuptools with \"pip install --upgrade setuptools\" ' \"and try again\")\n sys.exit(1)\n\n\nthis_directory = os.path.abspath(os.path.dirname(__file__))\nwith open(os.path.join(this_directory, \"README.md\")) as f:\n long_description = f.read()\n\n\npackage_name = \"dbt-core\"\npackage_version = \"1.2.4\"\ndescription = \"\"\"With dbt, data analysts and engineers can build analytics \\\nthe way engineers build applications.\"\"\"\n\n\nsetup(\n name=package_name,\n version=package_version,\n description=description,\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n author=\"dbt Labs\",\n author_email=\"[email protected]\",\n url=\"https://github.com/dbt-labs/dbt-core\",\n packages=find_namespace_packages(include=[\"dbt\", \"dbt.*\"]),\n include_package_data=True,\n test_suite=\"test\",\n entry_points={\n \"console_scripts\": [\n \"dbt = dbt.main:main\",\n ],\n },\n install_requires=[\n \"Jinja2==2.11.3\",\n \"MarkupSafe>=0.23,<2.1\",\n \"agate>=1.6,<1.6.4\",\n \"click>=7.0,<9\",\n \"colorama>=0.3.9,<0.4.6\",\n \"hologram>=0.0.14,<=0.0.15\",\n \"isodate>=0.6,<0.7\",\n \"logbook>=1.5,<1.6\",\n \"mashumaro==2.9\",\n \"minimal-snowplow-tracker==0.0.2\",\n \"networkx>=2.3,<2.8.1;python_version<'3.8'\",\n \"networkx>=2.3,<3;python_version>='3.8'\",\n \"packaging>=20.9,<22.0\",\n \"sqlparse>=0.2.3,<0.5\",\n \"dbt-extractor~=0.4.1\",\n \"typing-extensions>=3.7.4\",\n \"werkzeug>=1,<3\",\n # the following are all to match snowflake-connector-python\n \"requests<3.0.0\",\n \"idna>=2.5,<4\",\n \"cffi>=1.9,<2.0.0\",\n ],\n zip_safe=False,\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: Microsoft :: Windows\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: POSIX :: Linux\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n ],\n python_requires=\">=3.7.2\",\n)\n", "path": "core/setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\nimport os\nimport sys\n\nif sys.version_info < (3, 7, 2):\n print(\"Error: dbt does not support this version of Python.\")\n print(\"Please upgrade to Python 3.7.2 or higher.\")\n sys.exit(1)\n\n\nfrom setuptools import setup\n\ntry:\n from setuptools import find_namespace_packages\nexcept ImportError:\n # the user has a downlevel version of setuptools.\n print(\"Error: dbt requires setuptools v40.1.0 or higher.\")\n print('Please upgrade setuptools with \"pip install --upgrade setuptools\" ' \"and try again\")\n sys.exit(1)\n\n\nthis_directory = os.path.abspath(os.path.dirname(__file__))\nwith open(os.path.join(this_directory, \"README.md\")) as f:\n long_description = f.read()\n\n\npackage_name = \"dbt-core\"\npackage_version = \"1.2.4\"\ndescription = \"\"\"With dbt, data analysts and engineers can build analytics \\\nthe way engineers build applications.\"\"\"\n\n\nsetup(\n name=package_name,\n version=package_version,\n description=description,\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n author=\"dbt Labs\",\n author_email=\"[email protected]\",\n url=\"https://github.com/dbt-labs/dbt-core\",\n packages=find_namespace_packages(include=[\"dbt\", \"dbt.*\"]),\n include_package_data=True,\n test_suite=\"test\",\n entry_points={\n \"console_scripts\": [\n \"dbt = dbt.main:main\",\n ],\n },\n install_requires=[\n \"Jinja2==2.11.3\",\n \"MarkupSafe>=0.23,<2.1\",\n \"agate>=1.6,<1.6.4\",\n \"click>=7.0,<9\",\n \"colorama>=0.3.9,<0.4.6\",\n \"hologram>=0.0.14,<=0.0.15\",\n \"isodate>=0.6,<0.7\",\n \"logbook>=1.5,<1.6\",\n \"mashumaro==2.9\",\n \"minimal-snowplow-tracker==0.0.2\",\n \"networkx>=2.3,<2.8.1;python_version<'3.8'\",\n \"networkx>=2.3,<3;python_version>='3.8'\",\n \"packaging>=20.9,<22.0\",\n \"sqlparse>=0.2.3,<0.5\",\n \"dbt-extractor~=0.4.1\",\n \"typing-extensions>=3.7.4\",\n \"werkzeug>=1,<3\",\n \"pytz>=2015.7\",\n # the following are all to match snowflake-connector-python\n \"requests<3.0.0\",\n \"idna>=2.5,<4\",\n \"cffi>=1.9,<2.0.0\",\n ],\n zip_safe=False,\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: Microsoft :: Windows\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: POSIX :: Linux\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n ],\n python_requires=\">=3.7.2\",\n)\n", "path": "core/setup.py"}]} | 1,940 | 118 |
gh_patches_debug_1373 | rasdani/github-patches | git_diff | ros__ros_comm-2007 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Rospy import *
Hi,
Doing
```python
from rospy import *
```
raises the following exception :
```
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: module 'rospy' has no attribute 'NodeProxy'
```
After some investigations, `NodeProxy` doesn't seem to exist anymore in the codebase. Simply removing it from the exports should do the trick.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `clients/rospy/src/rospy/__init__.py`
Content:
```
1 # Software License Agreement (BSD License)
2 #
3 # Copyright (c) 2008, Willow Garage, Inc.
4 # All rights reserved.
5 #
6 # Redistribution and use in source and binary forms, with or without
7 # modification, are permitted provided that the following conditions
8 # are met:
9 #
10 # * Redistributions of source code must retain the above copyright
11 # notice, this list of conditions and the following disclaimer.
12 # * Redistributions in binary form must reproduce the above
13 # copyright notice, this list of conditions and the following
14 # disclaimer in the documentation and/or other materials provided
15 # with the distribution.
16 # * Neither the name of Willow Garage, Inc. nor the names of its
17 # contributors may be used to endorse or promote products derived
18 # from this software without specific prior written permission.
19 #
20 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23 # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24 # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25 # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
26 # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
27 # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
28 # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
29 # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
30 # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
31 # POSSIBILITY OF SUCH DAMAGE.
32 #
33 # Copyright (c) 2008, Willow Garage, Inc.
34 # Revision $Id$
35
36 """
37 ROS client library for Python.
38 See U{http://ros.org/wiki/rospy}
39 @author: Ken Conley (kwc)
40 """
41
42 # import symbols into rospy namespace
43 # NOTE: there are much better ways to configure python module
44 # dictionaries, but the rospy codebase isn't quite in shape for that
45 # yet
46
47 from std_msgs.msg import Header
48
49 from .client import spin, myargv, init_node, \
50 get_published_topics, \
51 wait_for_message, \
52 get_master, \
53 on_shutdown, \
54 get_param, get_param_cached, get_param_names, set_param, delete_param, has_param, search_param,\
55 DEBUG, INFO, WARN, ERROR, FATAL
56 from .timer import sleep, Rate, Timer
57 from .core import is_shutdown, signal_shutdown, \
58 get_node_uri, get_ros_root, \
59 logdebug, logwarn, loginfo, logout, logerr, logfatal, \
60 logdebug_throttle, logwarn_throttle, loginfo_throttle, logerr_throttle, logfatal_throttle, \
61 logdebug_throttle_identical, logwarn_throttle_identical, loginfo_throttle_identical, logerr_throttle_identical, logfatal_throttle_identical, \
62 logdebug_once, logwarn_once, loginfo_once, logerr_once, logfatal_once, \
63 parse_rosrpc_uri
64 from .exceptions import *
65 from .msg import AnyMsg
66 from .msproxy import MasterProxy
67 from .names import get_name, get_caller_id, get_namespace, resolve_name, remap_name
68 from .rostime import Time, Duration, get_rostime, get_time
69 from .service import ServiceException
70
71 # - use tcp ros implementation of services
72 from .impl.tcpros_service import Service, ServiceProxy, wait_for_service
73 from .topics import Message, SubscribeListener, Publisher, Subscriber
74
75 ## \defgroup validators Validators
76 ## \defgroup clientapi Client API
77
78 __all__ = [
79 'Header',
80 'spin',
81 'myargv',
82 'init_node',
83 'get_master',
84 'get_published_topics',
85 'wait_for_service',
86 'on_shutdown',
87 'get_param',
88 'get_param_cached',
89 'get_param_names',
90 'set_param',
91 'delete_param',
92 'has_param',
93 'search_param',
94 'sleep',
95 'Rate',
96 'DEBUG',
97 'INFO',
98 'WARN',
99 'ERROR',
100 'FATAL',
101 'is_shutdown',
102 'signal_shutdown',
103 'get_node_uri',
104 'get_ros_root',
105 'logdebug',
106 'logwarn', 'loginfo',
107 'logout', 'logerr', 'logfatal',
108 'logdebug_throttle',
109 'logwarn_throttle', 'loginfo_throttle',
110 'logerr_throttle', 'logfatal_throttle',
111 'logdebug_once',
112 'logwarn_once', 'loginfo_once',
113 'logerr_once', 'logfatal_once',
114 'parse_rosrpc_uri',
115 'MasterProxy',
116 'NodeProxy',
117 'ROSException',
118 'ROSSerializationException',
119 'ROSInitException',
120 'ROSInterruptException',
121 'ROSInternalException',
122 'TransportException',
123 'TransportTerminated',
124 'TransportInitError',
125 'AnyMsg', 'Message',
126 'get_name',
127 'get_caller_id',
128 'get_namespace',
129 'resolve_name',
130 'remap_name',
131 'Time', 'Duration', 'get_rostime', 'get_time',
132 'ServiceException',
133 'Service', 'ServiceProxy',
134 'SubscribeListener', 'Publisher', 'Subscriber',
135 ]
136
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/clients/rospy/src/rospy/__init__.py b/clients/rospy/src/rospy/__init__.py
--- a/clients/rospy/src/rospy/__init__.py
+++ b/clients/rospy/src/rospy/__init__.py
@@ -113,7 +113,6 @@
'logerr_once', 'logfatal_once',
'parse_rosrpc_uri',
'MasterProxy',
- 'NodeProxy',
'ROSException',
'ROSSerializationException',
'ROSInitException',
| {"golden_diff": "diff --git a/clients/rospy/src/rospy/__init__.py b/clients/rospy/src/rospy/__init__.py\n--- a/clients/rospy/src/rospy/__init__.py\n+++ b/clients/rospy/src/rospy/__init__.py\n@@ -113,7 +113,6 @@\n 'logerr_once', 'logfatal_once',\n 'parse_rosrpc_uri',\n 'MasterProxy',\n- 'NodeProxy', \n 'ROSException',\n 'ROSSerializationException',\n 'ROSInitException',\n", "issue": "Rospy import *\nHi, \r\n\r\nDoing \r\n```python\r\nfrom rospy import *\r\n```\r\nraises the following exception : \r\n```\r\nTraceback (most recent call last):\r\n File \"<stdin>\", line 1, in <module>\r\nAttributeError: module 'rospy' has no attribute 'NodeProxy'\r\n```\r\nAfter some investigations, `NodeProxy` doesn't seem to exist anymore in the codebase. Simply removing it from the exports should do the trick.\r\n\n", "before_files": [{"content": "# Software License Agreement (BSD License)\n#\n# Copyright (c) 2008, Willow Garage, Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions\n# are met:\n#\n# * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n# * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following\n# disclaimer in the documentation and/or other materials provided\n# with the distribution.\n# * Neither the name of Willow Garage, Inc. nor the names of its\n# contributors may be used to endorse or promote products derived\n# from this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS\n# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE\n# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,\n# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\n# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT\n# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN\n# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n# POSSIBILITY OF SUCH DAMAGE.\n#\n# Copyright (c) 2008, Willow Garage, Inc.\n# Revision $Id$\n\n\"\"\"\nROS client library for Python.\nSee U{http://ros.org/wiki/rospy}\n@author: Ken Conley (kwc)\n\"\"\"\n\n# import symbols into rospy namespace\n# NOTE: there are much better ways to configure python module\n# dictionaries, but the rospy codebase isn't quite in shape for that\n# yet\n\nfrom std_msgs.msg import Header\n\nfrom .client import spin, myargv, init_node, \\\n get_published_topics, \\\n wait_for_message, \\\n get_master, \\\n on_shutdown, \\\n get_param, get_param_cached, get_param_names, set_param, delete_param, has_param, search_param,\\\n DEBUG, INFO, WARN, ERROR, FATAL\nfrom .timer import sleep, Rate, Timer\nfrom .core import is_shutdown, signal_shutdown, \\\n get_node_uri, get_ros_root, \\\n logdebug, logwarn, loginfo, logout, logerr, logfatal, \\\n logdebug_throttle, logwarn_throttle, loginfo_throttle, logerr_throttle, logfatal_throttle, \\\n logdebug_throttle_identical, logwarn_throttle_identical, loginfo_throttle_identical, logerr_throttle_identical, logfatal_throttle_identical, \\\n logdebug_once, logwarn_once, loginfo_once, logerr_once, logfatal_once, \\\n parse_rosrpc_uri\nfrom .exceptions import *\nfrom .msg import AnyMsg\nfrom .msproxy import MasterProxy\nfrom .names import get_name, get_caller_id, get_namespace, resolve_name, remap_name\nfrom .rostime import Time, Duration, get_rostime, get_time\nfrom .service import ServiceException\n\n# - use tcp ros implementation of services\nfrom .impl.tcpros_service import Service, ServiceProxy, wait_for_service\nfrom .topics import Message, SubscribeListener, Publisher, Subscriber\n\n## \\defgroup validators Validators\n## \\defgroup clientapi Client API\n\n__all__ = [\n 'Header',\n 'spin',\n 'myargv',\n 'init_node',\n 'get_master',\n 'get_published_topics',\n 'wait_for_service',\n 'on_shutdown',\n 'get_param',\n 'get_param_cached',\n 'get_param_names',\n 'set_param',\n 'delete_param',\n 'has_param',\n 'search_param',\n 'sleep',\n 'Rate',\n 'DEBUG',\n 'INFO',\n 'WARN',\n 'ERROR',\n 'FATAL',\n 'is_shutdown',\n 'signal_shutdown',\n 'get_node_uri',\n 'get_ros_root',\n 'logdebug',\n 'logwarn', 'loginfo',\n 'logout', 'logerr', 'logfatal',\n 'logdebug_throttle',\n 'logwarn_throttle', 'loginfo_throttle',\n 'logerr_throttle', 'logfatal_throttle',\n 'logdebug_once',\n 'logwarn_once', 'loginfo_once',\n 'logerr_once', 'logfatal_once',\n 'parse_rosrpc_uri',\n 'MasterProxy',\n 'NodeProxy', \n 'ROSException',\n 'ROSSerializationException',\n 'ROSInitException',\n 'ROSInterruptException',\n 'ROSInternalException',\n 'TransportException',\n 'TransportTerminated',\n 'TransportInitError',\n 'AnyMsg', 'Message',\n 'get_name',\n 'get_caller_id',\n 'get_namespace',\n 'resolve_name',\n 'remap_name',\n 'Time', 'Duration', 'get_rostime', 'get_time',\n 'ServiceException', \n 'Service', 'ServiceProxy',\n 'SubscribeListener', 'Publisher', 'Subscriber',\n ]\n", "path": "clients/rospy/src/rospy/__init__.py"}], "after_files": [{"content": "# Software License Agreement (BSD License)\n#\n# Copyright (c) 2008, Willow Garage, Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions\n# are met:\n#\n# * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n# * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following\n# disclaimer in the documentation and/or other materials provided\n# with the distribution.\n# * Neither the name of Willow Garage, Inc. nor the names of its\n# contributors may be used to endorse or promote products derived\n# from this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS\n# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE\n# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,\n# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\n# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT\n# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN\n# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n# POSSIBILITY OF SUCH DAMAGE.\n#\n# Copyright (c) 2008, Willow Garage, Inc.\n# Revision $Id$\n\n\"\"\"\nROS client library for Python.\nSee U{http://ros.org/wiki/rospy}\n@author: Ken Conley (kwc)\n\"\"\"\n\n# import symbols into rospy namespace\n# NOTE: there are much better ways to configure python module\n# dictionaries, but the rospy codebase isn't quite in shape for that\n# yet\n\nfrom std_msgs.msg import Header\n\nfrom .client import spin, myargv, init_node, \\\n get_published_topics, \\\n wait_for_message, \\\n get_master, \\\n on_shutdown, \\\n get_param, get_param_cached, get_param_names, set_param, delete_param, has_param, search_param,\\\n DEBUG, INFO, WARN, ERROR, FATAL\nfrom .timer import sleep, Rate, Timer\nfrom .core import is_shutdown, signal_shutdown, \\\n get_node_uri, get_ros_root, \\\n logdebug, logwarn, loginfo, logout, logerr, logfatal, \\\n logdebug_throttle, logwarn_throttle, loginfo_throttle, logerr_throttle, logfatal_throttle, \\\n logdebug_throttle_identical, logwarn_throttle_identical, loginfo_throttle_identical, logerr_throttle_identical, logfatal_throttle_identical, \\\n logdebug_once, logwarn_once, loginfo_once, logerr_once, logfatal_once, \\\n parse_rosrpc_uri\nfrom .exceptions import *\nfrom .msg import AnyMsg\nfrom .msproxy import MasterProxy\nfrom .names import get_name, get_caller_id, get_namespace, resolve_name, remap_name\nfrom .rostime import Time, Duration, get_rostime, get_time\nfrom .service import ServiceException\n\n# - use tcp ros implementation of services\nfrom .impl.tcpros_service import Service, ServiceProxy, wait_for_service\nfrom .topics import Message, SubscribeListener, Publisher, Subscriber\n\n## \\defgroup validators Validators\n## \\defgroup clientapi Client API\n\n__all__ = [\n 'Header',\n 'spin',\n 'myargv',\n 'init_node',\n 'get_master',\n 'get_published_topics',\n 'wait_for_service',\n 'on_shutdown',\n 'get_param',\n 'get_param_cached',\n 'get_param_names',\n 'set_param',\n 'delete_param',\n 'has_param',\n 'search_param',\n 'sleep',\n 'Rate',\n 'DEBUG',\n 'INFO',\n 'WARN',\n 'ERROR',\n 'FATAL',\n 'is_shutdown',\n 'signal_shutdown',\n 'get_node_uri',\n 'get_ros_root',\n 'logdebug',\n 'logwarn', 'loginfo',\n 'logout', 'logerr', 'logfatal',\n 'logdebug_throttle',\n 'logwarn_throttle', 'loginfo_throttle',\n 'logerr_throttle', 'logfatal_throttle',\n 'logdebug_once',\n 'logwarn_once', 'loginfo_once',\n 'logerr_once', 'logfatal_once',\n 'parse_rosrpc_uri',\n 'MasterProxy',\n 'ROSException',\n 'ROSSerializationException',\n 'ROSInitException',\n 'ROSInterruptException',\n 'ROSInternalException',\n 'TransportException',\n 'TransportTerminated',\n 'TransportInitError',\n 'AnyMsg', 'Message',\n 'get_name',\n 'get_caller_id',\n 'get_namespace',\n 'resolve_name',\n 'remap_name',\n 'Time', 'Duration', 'get_rostime', 'get_time',\n 'ServiceException', \n 'Service', 'ServiceProxy',\n 'SubscribeListener', 'Publisher', 'Subscriber',\n ]\n", "path": "clients/rospy/src/rospy/__init__.py"}]} | 1,795 | 125 |
gh_patches_debug_2776 | rasdani/github-patches | git_diff | sunpy__sunpy-1505 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
EIT data fails with wcsaxes
The wcs information in the EIT header is not being identified as celestial axes by wcslib (inside astropy.wcs). This means that wcs is not detecting them as angular axes properly so therefore the set to arcsecond formatting is failing.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sunpy/visualization/wcsaxes_compat.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 """
3 Helpers and Functions to make WCSAxes work in SunPy
4 """
5 import warnings
6
7 import matplotlib.pyplot as plt
8
9 import astropy.units as u
10
11 try:
12 import wcsaxes
13 HAVE_WCSAXES = True
14
15 except ImportError:
16 HAVE_WCSAXES = False
17 warnings.warn("SunPy plotting is improved by installing the WCSAxes module: http://wcsaxes.readthedocs.org/en/latest/index.html")
18
19 FORCE_NO_WCSAXES = False
20
21 __all__ = ['HAVE_WCSAXES', 'is_wcsaxes', 'FORCE_NO_WCSAXES']
22
23 def is_wcsaxes(axes):
24 """
25 Test a matplotlib Axes object to see if it is an instance of WCSAxes
26
27 Parameters
28 ----------
29 axes : matplotlib Axes Object
30 Axes to test
31
32 Returns
33 -------
34 result : bool
35 Result of the test
36 """
37
38 if HAVE_WCSAXES and not FORCE_NO_WCSAXES:
39 return isinstance(axes, wcsaxes.WCSAxes)
40 else:
41 return False
42
43
44 def gca_wcs(wcs, fig=None):
45 """
46 Get the current axes, and return a WCSAxes if possible
47 """
48
49 if not fig:
50 fig = plt.gcf()
51
52 if not len(fig.get_axes()):
53 if HAVE_WCSAXES and not FORCE_NO_WCSAXES:
54 ax = plt.gca(projection=wcs)
55 else:
56 ax = plt.gca()
57
58 else:
59 ax = plt.gca()
60
61 return ax
62
63 def get_world_transform(axes):
64 if is_wcsaxes(axes):
65 transform = axes.get_transform('world')
66 else:
67 transform = axes.transData
68
69 return transform
70
71 def default_wcs_grid(axes):
72 """
73 Apply some default wcsaxes grid formatting
74 """
75 if not isinstance(axes, wcsaxes.WCSAxes):
76 raise TypeError("This axes is not a WCSAxes")
77
78 x = axes.coords[0]
79 y = axes.coords[1]
80
81 x.set_ticks(color='white')
82 y.set_ticks(color='white')
83
84 x.set_ticks_position('bl')
85 y.set_ticks_position('bl')
86
87 x.set_major_formatter('s.s')
88 y.set_major_formatter('s.s')
89
90 axes.coords.grid(color='white', alpha=0.6)
91
92 def wcsaxes_heliographic_overlay(axes):
93 """
94 Draw a heliographic overlay using wcsaxes
95 """
96 overlay = axes.get_coords_overlay('heliographicstonyhurst')
97
98 lon = overlay[0]
99 lat = overlay[1]
100
101 lon.coord_wrap = 180
102 lon.set_major_formatter('dd')
103
104 lon.set_axislabel('Solar Longitude')
105 lat.set_axislabel('Solar Latitude')
106
107 lon.set_ticks_position('tr')
108 lat.set_ticks_position('tr')
109
110 lon.set_ticks(spacing=10. * u.deg, color='white')
111 lat.set_ticks(spacing=10. * u.deg, color='white')
112
113 overlay.grid(color='white', alpha=0.5)
114
115 return overlay
116
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/sunpy/visualization/wcsaxes_compat.py b/sunpy/visualization/wcsaxes_compat.py
--- a/sunpy/visualization/wcsaxes_compat.py
+++ b/sunpy/visualization/wcsaxes_compat.py
@@ -84,6 +84,11 @@
x.set_ticks_position('bl')
y.set_ticks_position('bl')
+ if x.coord_type != 'longitude':
+ x.set_coord_type('longitude', coord_wrap=180.)
+ if y.coord_type != 'latitude':
+ y.set_coord_type('latitude')
+
x.set_major_formatter('s.s')
y.set_major_formatter('s.s')
| {"golden_diff": "diff --git a/sunpy/visualization/wcsaxes_compat.py b/sunpy/visualization/wcsaxes_compat.py\n--- a/sunpy/visualization/wcsaxes_compat.py\n+++ b/sunpy/visualization/wcsaxes_compat.py\n@@ -84,6 +84,11 @@\n x.set_ticks_position('bl')\n y.set_ticks_position('bl')\n \n+ if x.coord_type != 'longitude':\n+ x.set_coord_type('longitude', coord_wrap=180.)\n+ if y.coord_type != 'latitude':\n+ y.set_coord_type('latitude')\n+\n x.set_major_formatter('s.s')\n y.set_major_formatter('s.s')\n", "issue": "EIT data fails with wcsaxes\nThe wcs information in the EIT header is not being identified as celestial axes by wcslib (inside astropy.wcs). This means that wcs is not detecting them as angular axes properly so therefore the set to arcsecond formatting is failing.\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\nHelpers and Functions to make WCSAxes work in SunPy\n\"\"\"\nimport warnings\n\nimport matplotlib.pyplot as plt\n\nimport astropy.units as u\n\ntry:\n import wcsaxes\n HAVE_WCSAXES = True\n\nexcept ImportError:\n HAVE_WCSAXES = False\n warnings.warn(\"SunPy plotting is improved by installing the WCSAxes module: http://wcsaxes.readthedocs.org/en/latest/index.html\")\n\nFORCE_NO_WCSAXES = False\n\n__all__ = ['HAVE_WCSAXES', 'is_wcsaxes', 'FORCE_NO_WCSAXES']\n\ndef is_wcsaxes(axes):\n \"\"\"\n Test a matplotlib Axes object to see if it is an instance of WCSAxes\n\n Parameters\n ----------\n axes : matplotlib Axes Object\n Axes to test\n\n Returns\n -------\n result : bool\n Result of the test\n \"\"\"\n\n if HAVE_WCSAXES and not FORCE_NO_WCSAXES:\n return isinstance(axes, wcsaxes.WCSAxes)\n else:\n return False\n\n\ndef gca_wcs(wcs, fig=None):\n \"\"\"\n Get the current axes, and return a WCSAxes if possible\n \"\"\"\n\n if not fig:\n fig = plt.gcf()\n\n if not len(fig.get_axes()):\n if HAVE_WCSAXES and not FORCE_NO_WCSAXES:\n ax = plt.gca(projection=wcs)\n else:\n ax = plt.gca()\n\n else:\n ax = plt.gca()\n\n return ax\n\ndef get_world_transform(axes):\n if is_wcsaxes(axes):\n transform = axes.get_transform('world')\n else:\n transform = axes.transData\n\n return transform\n\ndef default_wcs_grid(axes):\n \"\"\"\n Apply some default wcsaxes grid formatting\n \"\"\"\n if not isinstance(axes, wcsaxes.WCSAxes):\n raise TypeError(\"This axes is not a WCSAxes\")\n\n x = axes.coords[0]\n y = axes.coords[1]\n\n x.set_ticks(color='white')\n y.set_ticks(color='white')\n\n x.set_ticks_position('bl')\n y.set_ticks_position('bl')\n\n x.set_major_formatter('s.s')\n y.set_major_formatter('s.s')\n\n axes.coords.grid(color='white', alpha=0.6)\n\ndef wcsaxes_heliographic_overlay(axes):\n \"\"\"\n Draw a heliographic overlay using wcsaxes\n \"\"\"\n overlay = axes.get_coords_overlay('heliographicstonyhurst')\n\n lon = overlay[0]\n lat = overlay[1]\n\n lon.coord_wrap = 180\n lon.set_major_formatter('dd')\n\n lon.set_axislabel('Solar Longitude')\n lat.set_axislabel('Solar Latitude')\n\n lon.set_ticks_position('tr')\n lat.set_ticks_position('tr')\n\n lon.set_ticks(spacing=10. * u.deg, color='white')\n lat.set_ticks(spacing=10. * u.deg, color='white')\n\n overlay.grid(color='white', alpha=0.5)\n\n return overlay\n", "path": "sunpy/visualization/wcsaxes_compat.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\nHelpers and Functions to make WCSAxes work in SunPy\n\"\"\"\nimport warnings\n\nimport matplotlib.pyplot as plt\n\nimport astropy.units as u\n\ntry:\n import wcsaxes\n HAVE_WCSAXES = True\n\nexcept ImportError:\n HAVE_WCSAXES = False\n warnings.warn(\"SunPy plotting is improved by installing the WCSAxes module: http://wcsaxes.readthedocs.org/en/latest/index.html\")\n\nFORCE_NO_WCSAXES = False\n\n__all__ = ['HAVE_WCSAXES', 'is_wcsaxes', 'FORCE_NO_WCSAXES']\n\ndef is_wcsaxes(axes):\n \"\"\"\n Test a matplotlib Axes object to see if it is an instance of WCSAxes\n\n Parameters\n ----------\n axes : matplotlib Axes Object\n Axes to test\n\n Returns\n -------\n result : bool\n Result of the test\n \"\"\"\n\n if HAVE_WCSAXES and not FORCE_NO_WCSAXES:\n return isinstance(axes, wcsaxes.WCSAxes)\n else:\n return False\n\n\ndef gca_wcs(wcs, fig=None):\n \"\"\"\n Get the current axes, and return a WCSAxes if possible\n \"\"\"\n\n if not fig:\n fig = plt.gcf()\n\n if not len(fig.get_axes()):\n if HAVE_WCSAXES and not FORCE_NO_WCSAXES:\n ax = plt.gca(projection=wcs)\n else:\n ax = plt.gca()\n\n else:\n ax = plt.gca()\n\n return ax\n\ndef get_world_transform(axes):\n if is_wcsaxes(axes):\n transform = axes.get_transform('world')\n else:\n transform = axes.transData\n\n return transform\n\ndef default_wcs_grid(axes):\n \"\"\"\n Apply some default wcsaxes grid formatting\n \"\"\"\n if not isinstance(axes, wcsaxes.WCSAxes):\n raise TypeError(\"This axes is not a WCSAxes\")\n\n x = axes.coords[0]\n y = axes.coords[1]\n\n x.set_ticks(color='white')\n y.set_ticks(color='white')\n\n x.set_ticks_position('bl')\n y.set_ticks_position('bl')\n\n if x.coord_type != 'longitude':\n x.set_coord_type('longitude', coord_wrap=180.)\n if y.coord_type != 'latitude':\n y.set_coord_type('latitude')\n\n x.set_major_formatter('s.s')\n y.set_major_formatter('s.s')\n\n axes.coords.grid(color='white', alpha=0.6)\n\ndef wcsaxes_heliographic_overlay(axes):\n \"\"\"\n Draw a heliographic overlay using wcsaxes\n \"\"\"\n overlay = axes.get_coords_overlay('heliographicstonyhurst')\n\n lon = overlay[0]\n lat = overlay[1]\n\n lon.coord_wrap = 180\n lon.set_major_formatter('dd')\n\n lon.set_axislabel('Solar Longitude')\n lat.set_axislabel('Solar Latitude')\n\n lon.set_ticks_position('tr')\n lat.set_ticks_position('tr')\n\n lon.set_ticks(spacing=10. * u.deg, color='white')\n lat.set_ticks(spacing=10. * u.deg, color='white')\n\n overlay.grid(color='white', alpha=0.5)\n\n return overlay\n", "path": "sunpy/visualization/wcsaxes_compat.py"}]} | 1,238 | 146 |
gh_patches_debug_29580 | rasdani/github-patches | git_diff | mit-ll-responsible-ai__hydra-zen-175 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Inconsistent static analysis via pyright
```python
x = [1, 2, 3]
make_config(a=[1, 2, 3])
make_config(a=x) # pyright marks this as invalid based on our annotations
```
This seems like a pyright issue, but we might consider revising annotations to accommodate
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/hydra_zen/typing/_implementations.py`
Content:
```
1 # Copyright (c) 2021 Massachusetts Institute of Technology
2 # SPDX-License-Identifier: MIT
3
4 from dataclasses import Field
5 from enum import Enum
6 from pathlib import Path
7 from typing import (
8 Any,
9 Callable,
10 Counter,
11 Deque,
12 Dict,
13 FrozenSet,
14 Generic,
15 List,
16 NewType,
17 Set,
18 Tuple,
19 TypeVar,
20 Union,
21 )
22
23 from omegaconf import DictConfig, ListConfig
24 from typing_extensions import Protocol, runtime_checkable
25
26 __all__ = [
27 "Just",
28 "Builds",
29 "PartialBuilds",
30 "Partial",
31 "Importable",
32 "SupportedPrimitive",
33 ]
34
35
36 _T = TypeVar("_T", covariant=True)
37
38
39 class Partial(Generic[_T]):
40 func: Callable[..., _T]
41 args: Tuple[Any, ...]
42 keywords: Dict[str, Any]
43
44 def __init__(
45 self, func: Callable[..., _T], *args: Any, **kwargs: Any
46 ) -> None: # pragma: no cover
47 ...
48
49 def __call__(self, *args: Any, **kwargs: Any) -> _T: # pragma: no cover
50 ...
51
52
53 InterpStr = NewType("InterpStr", str)
54
55 Importable = TypeVar("Importable")
56
57
58 class _DataClass(Protocol): # pragma: no cover
59 # doesn't provide __init__, __getattribute__, etc.
60 __dataclass_fields__: Dict[str, Field]
61
62
63 class DataClass(_DataClass, Protocol): # pragma: no cover
64 def __init__(self, *args, **kwargs) -> None:
65 ...
66
67 def __getattribute__(self, name: str) -> Any:
68 ...
69
70 def __setattr__(self, name: str, value: Any) -> None:
71 ...
72
73
74 @runtime_checkable
75 class Builds(DataClass, Protocol[_T]): # pragma: no cover
76
77 _target_: str
78
79
80 @runtime_checkable
81 class Just(Builds, Protocol[_T]): # pragma: no cover
82 path: str # interpolated string for importing obj
83 _target_: str = "hydra_zen.funcs.get_obj"
84
85
86 @runtime_checkable
87 class PartialBuilds(Builds, Protocol[_T]): # pragma: no cover
88 _target_: str = "hydra_zen.funcs.zen_processing"
89 _zen_target: str
90 _zen_partial: bool = True
91
92
93 @runtime_checkable
94 class HasTarget(Protocol): # pragma: no cover
95 _target_: str
96
97
98 @runtime_checkable
99 class HasPartialTarget(Protocol): # pragma: no cover
100 _zen_partial: bool = True
101
102
103 _HydraPrimitive = Union[
104 bool,
105 None,
106 int,
107 float,
108 str,
109 ]
110
111 _SupportedPrimitive = Union[
112 _HydraPrimitive,
113 ListConfig,
114 DictConfig,
115 type,
116 Callable,
117 Enum,
118 _DataClass,
119 complex,
120 Path,
121 range,
122 ]
123
124 SupportedPrimitive = Union[
125 _SupportedPrimitive,
126 Dict[_HydraPrimitive, "SupportedPrimitive"],
127 Counter[_HydraPrimitive],
128 Set["SupportedPrimitive"],
129 FrozenSet["SupportedPrimitive"],
130 Deque["SupportedPrimitive"],
131 List["SupportedPrimitive"],
132 Tuple["SupportedPrimitive", ...],
133 ]
134
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/hydra_zen/typing/_implementations.py b/src/hydra_zen/typing/_implementations.py
--- a/src/hydra_zen/typing/_implementations.py
+++ b/src/hydra_zen/typing/_implementations.py
@@ -7,21 +7,19 @@
from typing import (
Any,
Callable,
- Counter,
- Deque,
Dict,
FrozenSet,
Generic,
- List,
+ Mapping,
NewType,
- Set,
+ Sequence,
Tuple,
TypeVar,
Union,
)
from omegaconf import DictConfig, ListConfig
-from typing_extensions import Protocol, runtime_checkable
+from typing_extensions import Protocol, TypedDict, runtime_checkable
__all__ = [
"Just",
@@ -33,6 +31,10 @@
]
+class EmptyDict(TypedDict):
+ pass
+
+
_T = TypeVar("_T", covariant=True)
@@ -119,15 +121,21 @@
complex,
Path,
range,
+ set,
+ EmptyDict, # not covered by Mapping[..., ...]
]
SupportedPrimitive = Union[
_SupportedPrimitive,
- Dict[_HydraPrimitive, "SupportedPrimitive"],
- Counter[_HydraPrimitive],
- Set["SupportedPrimitive"],
FrozenSet["SupportedPrimitive"],
- Deque["SupportedPrimitive"],
- List["SupportedPrimitive"],
- Tuple["SupportedPrimitive", ...],
+ # Even thought this is redundant with Sequence, it seems to
+ # be needed for pyright to do proper checking of tuple contents
+ Tuple["SupportedPrimitive"],
+ # Mutable generic containers need to be invariant, so
+ # we have to settle for Sequence/Mapping. While this
+ # is overly permissive in terms of sequence-type, it
+ # at least affords quality checking of sequence content
+ Sequence["SupportedPrimitive"],
+ # Mapping is covariant only in value
+ Mapping[Any, "SupportedPrimitive"],
]
| {"golden_diff": "diff --git a/src/hydra_zen/typing/_implementations.py b/src/hydra_zen/typing/_implementations.py\n--- a/src/hydra_zen/typing/_implementations.py\n+++ b/src/hydra_zen/typing/_implementations.py\n@@ -7,21 +7,19 @@\n from typing import (\n Any,\n Callable,\n- Counter,\n- Deque,\n Dict,\n FrozenSet,\n Generic,\n- List,\n+ Mapping,\n NewType,\n- Set,\n+ Sequence,\n Tuple,\n TypeVar,\n Union,\n )\n \n from omegaconf import DictConfig, ListConfig\n-from typing_extensions import Protocol, runtime_checkable\n+from typing_extensions import Protocol, TypedDict, runtime_checkable\n \n __all__ = [\n \"Just\",\n@@ -33,6 +31,10 @@\n ]\n \n \n+class EmptyDict(TypedDict):\n+ pass\n+\n+\n _T = TypeVar(\"_T\", covariant=True)\n \n \n@@ -119,15 +121,21 @@\n complex,\n Path,\n range,\n+ set,\n+ EmptyDict, # not covered by Mapping[..., ...]\n ]\n \n SupportedPrimitive = Union[\n _SupportedPrimitive,\n- Dict[_HydraPrimitive, \"SupportedPrimitive\"],\n- Counter[_HydraPrimitive],\n- Set[\"SupportedPrimitive\"],\n FrozenSet[\"SupportedPrimitive\"],\n- Deque[\"SupportedPrimitive\"],\n- List[\"SupportedPrimitive\"],\n- Tuple[\"SupportedPrimitive\", ...],\n+ # Even thought this is redundant with Sequence, it seems to\n+ # be needed for pyright to do proper checking of tuple contents\n+ Tuple[\"SupportedPrimitive\"],\n+ # Mutable generic containers need to be invariant, so\n+ # we have to settle for Sequence/Mapping. While this\n+ # is overly permissive in terms of sequence-type, it\n+ # at least affords quality checking of sequence content\n+ Sequence[\"SupportedPrimitive\"],\n+ # Mapping is covariant only in value\n+ Mapping[Any, \"SupportedPrimitive\"],\n ]\n", "issue": "Inconsistent static analysis via pyright\n```python\r\nx = [1, 2, 3]\r\nmake_config(a=[1, 2, 3])\r\nmake_config(a=x) # pyright marks this as invalid based on our annotations\r\n```\r\n\r\nThis seems like a pyright issue, but we might consider revising annotations to accommodate\n", "before_files": [{"content": "# Copyright (c) 2021 Massachusetts Institute of Technology\n# SPDX-License-Identifier: MIT\n\nfrom dataclasses import Field\nfrom enum import Enum\nfrom pathlib import Path\nfrom typing import (\n Any,\n Callable,\n Counter,\n Deque,\n Dict,\n FrozenSet,\n Generic,\n List,\n NewType,\n Set,\n Tuple,\n TypeVar,\n Union,\n)\n\nfrom omegaconf import DictConfig, ListConfig\nfrom typing_extensions import Protocol, runtime_checkable\n\n__all__ = [\n \"Just\",\n \"Builds\",\n \"PartialBuilds\",\n \"Partial\",\n \"Importable\",\n \"SupportedPrimitive\",\n]\n\n\n_T = TypeVar(\"_T\", covariant=True)\n\n\nclass Partial(Generic[_T]):\n func: Callable[..., _T]\n args: Tuple[Any, ...]\n keywords: Dict[str, Any]\n\n def __init__(\n self, func: Callable[..., _T], *args: Any, **kwargs: Any\n ) -> None: # pragma: no cover\n ...\n\n def __call__(self, *args: Any, **kwargs: Any) -> _T: # pragma: no cover\n ...\n\n\nInterpStr = NewType(\"InterpStr\", str)\n\nImportable = TypeVar(\"Importable\")\n\n\nclass _DataClass(Protocol): # pragma: no cover\n # doesn't provide __init__, __getattribute__, etc.\n __dataclass_fields__: Dict[str, Field]\n\n\nclass DataClass(_DataClass, Protocol): # pragma: no cover\n def __init__(self, *args, **kwargs) -> None:\n ...\n\n def __getattribute__(self, name: str) -> Any:\n ...\n\n def __setattr__(self, name: str, value: Any) -> None:\n ...\n\n\n@runtime_checkable\nclass Builds(DataClass, Protocol[_T]): # pragma: no cover\n\n _target_: str\n\n\n@runtime_checkable\nclass Just(Builds, Protocol[_T]): # pragma: no cover\n path: str # interpolated string for importing obj\n _target_: str = \"hydra_zen.funcs.get_obj\"\n\n\n@runtime_checkable\nclass PartialBuilds(Builds, Protocol[_T]): # pragma: no cover\n _target_: str = \"hydra_zen.funcs.zen_processing\"\n _zen_target: str\n _zen_partial: bool = True\n\n\n@runtime_checkable\nclass HasTarget(Protocol): # pragma: no cover\n _target_: str\n\n\n@runtime_checkable\nclass HasPartialTarget(Protocol): # pragma: no cover\n _zen_partial: bool = True\n\n\n_HydraPrimitive = Union[\n bool,\n None,\n int,\n float,\n str,\n]\n\n_SupportedPrimitive = Union[\n _HydraPrimitive,\n ListConfig,\n DictConfig,\n type,\n Callable,\n Enum,\n _DataClass,\n complex,\n Path,\n range,\n]\n\nSupportedPrimitive = Union[\n _SupportedPrimitive,\n Dict[_HydraPrimitive, \"SupportedPrimitive\"],\n Counter[_HydraPrimitive],\n Set[\"SupportedPrimitive\"],\n FrozenSet[\"SupportedPrimitive\"],\n Deque[\"SupportedPrimitive\"],\n List[\"SupportedPrimitive\"],\n Tuple[\"SupportedPrimitive\", ...],\n]\n", "path": "src/hydra_zen/typing/_implementations.py"}], "after_files": [{"content": "# Copyright (c) 2021 Massachusetts Institute of Technology\n# SPDX-License-Identifier: MIT\n\nfrom dataclasses import Field\nfrom enum import Enum\nfrom pathlib import Path\nfrom typing import (\n Any,\n Callable,\n Dict,\n FrozenSet,\n Generic,\n Mapping,\n NewType,\n Sequence,\n Tuple,\n TypeVar,\n Union,\n)\n\nfrom omegaconf import DictConfig, ListConfig\nfrom typing_extensions import Protocol, TypedDict, runtime_checkable\n\n__all__ = [\n \"Just\",\n \"Builds\",\n \"PartialBuilds\",\n \"Partial\",\n \"Importable\",\n \"SupportedPrimitive\",\n]\n\n\nclass EmptyDict(TypedDict):\n pass\n\n\n_T = TypeVar(\"_T\", covariant=True)\n\n\nclass Partial(Generic[_T]):\n func: Callable[..., _T]\n args: Tuple[Any, ...]\n keywords: Dict[str, Any]\n\n def __init__(\n self, func: Callable[..., _T], *args: Any, **kwargs: Any\n ) -> None: # pragma: no cover\n ...\n\n def __call__(self, *args: Any, **kwargs: Any) -> _T: # pragma: no cover\n ...\n\n\nInterpStr = NewType(\"InterpStr\", str)\n\nImportable = TypeVar(\"Importable\")\n\n\nclass _DataClass(Protocol): # pragma: no cover\n # doesn't provide __init__, __getattribute__, etc.\n __dataclass_fields__: Dict[str, Field]\n\n\nclass DataClass(_DataClass, Protocol): # pragma: no cover\n def __init__(self, *args, **kwargs) -> None:\n ...\n\n def __getattribute__(self, name: str) -> Any:\n ...\n\n def __setattr__(self, name: str, value: Any) -> None:\n ...\n\n\n@runtime_checkable\nclass Builds(DataClass, Protocol[_T]): # pragma: no cover\n\n _target_: str\n\n\n@runtime_checkable\nclass Just(Builds, Protocol[_T]): # pragma: no cover\n path: str # interpolated string for importing obj\n _target_: str = \"hydra_zen.funcs.get_obj\"\n\n\n@runtime_checkable\nclass PartialBuilds(Builds, Protocol[_T]): # pragma: no cover\n _target_: str = \"hydra_zen.funcs.zen_processing\"\n _zen_target: str\n _zen_partial: bool = True\n\n\n@runtime_checkable\nclass HasTarget(Protocol): # pragma: no cover\n _target_: str\n\n\n@runtime_checkable\nclass HasPartialTarget(Protocol): # pragma: no cover\n _zen_partial: bool = True\n\n\n_HydraPrimitive = Union[\n bool,\n None,\n int,\n float,\n str,\n]\n\n_SupportedPrimitive = Union[\n _HydraPrimitive,\n ListConfig,\n DictConfig,\n type,\n Callable,\n Enum,\n _DataClass,\n complex,\n Path,\n range,\n set,\n EmptyDict, # not covered by Mapping[..., ...]\n]\n\nSupportedPrimitive = Union[\n _SupportedPrimitive,\n FrozenSet[\"SupportedPrimitive\"],\n # Even thought this is redundant with Sequence, it seems to\n # be needed for pyright to do proper checking of tuple contents\n Tuple[\"SupportedPrimitive\"],\n # Mutable generic containers need to be invariant, so\n # we have to settle for Sequence/Mapping. While this\n # is overly permissive in terms of sequence-type, it\n # at least affords quality checking of sequence content\n Sequence[\"SupportedPrimitive\"],\n # Mapping is covariant only in value\n Mapping[Any, \"SupportedPrimitive\"],\n]\n", "path": "src/hydra_zen/typing/_implementations.py"}]} | 1,364 | 453 |
gh_patches_debug_44429 | rasdani/github-patches | git_diff | pytorch__ignite-408 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove `activation` argument from AveragePrecision and ROC_AUC
I propose to remove `activation` argument from `AveragePrecision` and `ROC_AUC`:
https://github.com/pytorch/ignite/blob/862ab0073e461a32bf5b53fa015f88c143ae9079/ignite/contrib/metrics/roc_auc.py#L23-L28
to be coherent with `Accuracy` et friends and the usage of `output_transform` to apply the binarization/activation :
https://github.com/pytorch/ignite/blob/862ab0073e461a32bf5b53fa015f88c143ae9079/ignite/metrics/accuracy.py#L81-L93
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ignite/contrib/metrics/roc_auc.py`
Content:
```
1 from functools import partial
2 from ignite.metrics import EpochMetric
3
4
5 def roc_auc_compute_fn(y_preds, y_targets, activation=None):
6 try:
7 from sklearn.metrics import roc_auc_score
8 except ImportError:
9 raise RuntimeError("This contrib module requires sklearn to be installed.")
10
11 y_true = y_targets.numpy()
12 if activation is not None:
13 y_preds = activation(y_preds)
14 y_pred = y_preds.numpy()
15 return roc_auc_score(y_true, y_pred)
16
17
18 class ROC_AUC(EpochMetric):
19 """Computes Area Under the Receiver Operating Characteristic Curve (ROC AUC)
20 accumulating predictions and the ground-truth during an epoch and applying
21 `sklearn.metrics.roc_auc_score <http://scikit-learn.org/stable/modules/generated/
22 sklearn.metrics.roc_auc_score.html#sklearn.metrics.roc_auc_score>`_ .
23
24 Args:
25 activation (callable, optional): optional function to apply on prediction tensors,
26 e.g. `activation=torch.sigmoid` to transform logits.
27 output_transform (callable, optional): a callable that is used to transform the
28 :class:`~ignite.engine.Engine`'s `process_function`'s output into the
29 form expected by the metric. This can be useful if, for example, you have a multi-output model and
30 you want to compute the metric with respect to one of the outputs.
31
32 """
33 def __init__(self, activation=None, output_transform=lambda x: x):
34 super(ROC_AUC, self).__init__(partial(roc_auc_compute_fn, activation=activation),
35 output_transform=output_transform)
36
```
Path: `ignite/contrib/metrics/average_precision.py`
Content:
```
1 from functools import partial
2 from ignite.metrics import EpochMetric
3
4
5 def average_precision_compute_fn(y_preds, y_targets, activation=None):
6 try:
7 from sklearn.metrics import average_precision_score
8 except ImportError:
9 raise RuntimeError("This contrib module requires sklearn to be installed.")
10
11 y_true = y_targets.numpy()
12 if activation is not None:
13 y_preds = activation(y_preds)
14 y_pred = y_preds.numpy()
15 return average_precision_score(y_true, y_pred)
16
17
18 class AveragePrecision(EpochMetric):
19 """Computes Average Precision accumulating predictions and the ground-truth during an epoch
20 and applying `sklearn.metrics.average_precision_score <http://scikit-learn.org/stable/modules/generated/
21 sklearn.metrics.average_precision_score.html#sklearn.metrics.average_precision_score>`_ .
22
23 Args:
24 activation (callable, optional): optional function to apply on prediction tensors,
25 e.g. `activation=torch.sigmoid` to transform logits.
26 output_transform (callable, optional): a callable that is used to transform the
27 :class:`~ignite.engine.Engine`'s `process_function`'s output into the
28 form expected by the metric. This can be useful if, for example, you have a multi-output model and
29 you want to compute the metric with respect to one of the outputs.
30
31 """
32 def __init__(self, activation=None, output_transform=lambda x: x):
33 super(AveragePrecision, self).__init__(partial(average_precision_compute_fn, activation=activation),
34 output_transform=output_transform)
35
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/ignite/contrib/metrics/average_precision.py b/ignite/contrib/metrics/average_precision.py
--- a/ignite/contrib/metrics/average_precision.py
+++ b/ignite/contrib/metrics/average_precision.py
@@ -1,16 +1,13 @@
-from functools import partial
from ignite.metrics import EpochMetric
-def average_precision_compute_fn(y_preds, y_targets, activation=None):
+def average_precision_compute_fn(y_preds, y_targets):
try:
from sklearn.metrics import average_precision_score
except ImportError:
raise RuntimeError("This contrib module requires sklearn to be installed.")
y_true = y_targets.numpy()
- if activation is not None:
- y_preds = activation(y_preds)
y_pred = y_preds.numpy()
return average_precision_score(y_true, y_pred)
@@ -21,14 +18,23 @@
sklearn.metrics.average_precision_score.html#sklearn.metrics.average_precision_score>`_ .
Args:
- activation (callable, optional): optional function to apply on prediction tensors,
- e.g. `activation=torch.sigmoid` to transform logits.
output_transform (callable, optional): a callable that is used to transform the
:class:`~ignite.engine.Engine`'s `process_function`'s output into the
form expected by the metric. This can be useful if, for example, you have a multi-output model and
you want to compute the metric with respect to one of the outputs.
+ AveragePrecision expects y to be comprised of 0's and 1's. y_pred must either be probability estimates or
+ confidence values. To apply an activation to y_pred, use output_transform as shown below:
+
+ .. code-block:: python
+
+ def activated_output_transform(output):
+ y_pred, y = output
+ y_pred = torch.softmax(y_pred)
+ return y_pred, y
+
+ avg_precision = AveragePrecision(activated_output_transform)
+
"""
def __init__(self, activation=None, output_transform=lambda x: x):
- super(AveragePrecision, self).__init__(partial(average_precision_compute_fn, activation=activation),
- output_transform=output_transform)
+ super(AveragePrecision, self).__init__(average_precision_compute_fn, output_transform=output_transform)
diff --git a/ignite/contrib/metrics/roc_auc.py b/ignite/contrib/metrics/roc_auc.py
--- a/ignite/contrib/metrics/roc_auc.py
+++ b/ignite/contrib/metrics/roc_auc.py
@@ -1,16 +1,13 @@
-from functools import partial
from ignite.metrics import EpochMetric
-def roc_auc_compute_fn(y_preds, y_targets, activation=None):
+def roc_auc_compute_fn(y_preds, y_targets):
try:
from sklearn.metrics import roc_auc_score
except ImportError:
raise RuntimeError("This contrib module requires sklearn to be installed.")
y_true = y_targets.numpy()
- if activation is not None:
- y_preds = activation(y_preds)
y_pred = y_preds.numpy()
return roc_auc_score(y_true, y_pred)
@@ -22,14 +19,23 @@
sklearn.metrics.roc_auc_score.html#sklearn.metrics.roc_auc_score>`_ .
Args:
- activation (callable, optional): optional function to apply on prediction tensors,
- e.g. `activation=torch.sigmoid` to transform logits.
output_transform (callable, optional): a callable that is used to transform the
:class:`~ignite.engine.Engine`'s `process_function`'s output into the
form expected by the metric. This can be useful if, for example, you have a multi-output model and
you want to compute the metric with respect to one of the outputs.
+ ROC_AUC expects y to be comprised of 0's and 1's. y_pred must either be probability estimates or confidence
+ values. To apply an activation to y_pred, use output_transform as shown below:
+
+ .. code-block:: python
+
+ def activated_output_transform(output):
+ y_pred, y = output
+ y_pred = torch.sigmoid(y_pred)
+ return y_pred, y
+
+ roc_auc = ROC_AUC(activated_output_transform)
+
"""
- def __init__(self, activation=None, output_transform=lambda x: x):
- super(ROC_AUC, self).__init__(partial(roc_auc_compute_fn, activation=activation),
- output_transform=output_transform)
+ def __init__(self, output_transform=lambda x: x):
+ super(ROC_AUC, self).__init__(roc_auc_compute_fn, output_transform=output_transform)
| {"golden_diff": "diff --git a/ignite/contrib/metrics/average_precision.py b/ignite/contrib/metrics/average_precision.py\n--- a/ignite/contrib/metrics/average_precision.py\n+++ b/ignite/contrib/metrics/average_precision.py\n@@ -1,16 +1,13 @@\n-from functools import partial\n from ignite.metrics import EpochMetric\n \n \n-def average_precision_compute_fn(y_preds, y_targets, activation=None):\n+def average_precision_compute_fn(y_preds, y_targets):\n try:\n from sklearn.metrics import average_precision_score\n except ImportError:\n raise RuntimeError(\"This contrib module requires sklearn to be installed.\")\n \n y_true = y_targets.numpy()\n- if activation is not None:\n- y_preds = activation(y_preds)\n y_pred = y_preds.numpy()\n return average_precision_score(y_true, y_pred)\n \n@@ -21,14 +18,23 @@\n sklearn.metrics.average_precision_score.html#sklearn.metrics.average_precision_score>`_ .\n \n Args:\n- activation (callable, optional): optional function to apply on prediction tensors,\n- e.g. `activation=torch.sigmoid` to transform logits.\n output_transform (callable, optional): a callable that is used to transform the\n :class:`~ignite.engine.Engine`'s `process_function`'s output into the\n form expected by the metric. This can be useful if, for example, you have a multi-output model and\n you want to compute the metric with respect to one of the outputs.\n \n+ AveragePrecision expects y to be comprised of 0's and 1's. y_pred must either be probability estimates or\n+ confidence values. To apply an activation to y_pred, use output_transform as shown below:\n+\n+ .. code-block:: python\n+\n+ def activated_output_transform(output):\n+ y_pred, y = output\n+ y_pred = torch.softmax(y_pred)\n+ return y_pred, y\n+\n+ avg_precision = AveragePrecision(activated_output_transform)\n+\n \"\"\"\n def __init__(self, activation=None, output_transform=lambda x: x):\n- super(AveragePrecision, self).__init__(partial(average_precision_compute_fn, activation=activation),\n- output_transform=output_transform)\n+ super(AveragePrecision, self).__init__(average_precision_compute_fn, output_transform=output_transform)\ndiff --git a/ignite/contrib/metrics/roc_auc.py b/ignite/contrib/metrics/roc_auc.py\n--- a/ignite/contrib/metrics/roc_auc.py\n+++ b/ignite/contrib/metrics/roc_auc.py\n@@ -1,16 +1,13 @@\n-from functools import partial\n from ignite.metrics import EpochMetric\n \n \n-def roc_auc_compute_fn(y_preds, y_targets, activation=None):\n+def roc_auc_compute_fn(y_preds, y_targets):\n try:\n from sklearn.metrics import roc_auc_score\n except ImportError:\n raise RuntimeError(\"This contrib module requires sklearn to be installed.\")\n \n y_true = y_targets.numpy()\n- if activation is not None:\n- y_preds = activation(y_preds)\n y_pred = y_preds.numpy()\n return roc_auc_score(y_true, y_pred)\n \n@@ -22,14 +19,23 @@\n sklearn.metrics.roc_auc_score.html#sklearn.metrics.roc_auc_score>`_ .\n \n Args:\n- activation (callable, optional): optional function to apply on prediction tensors,\n- e.g. `activation=torch.sigmoid` to transform logits.\n output_transform (callable, optional): a callable that is used to transform the\n :class:`~ignite.engine.Engine`'s `process_function`'s output into the\n form expected by the metric. This can be useful if, for example, you have a multi-output model and\n you want to compute the metric with respect to one of the outputs.\n \n+ ROC_AUC expects y to be comprised of 0's and 1's. y_pred must either be probability estimates or confidence\n+ values. To apply an activation to y_pred, use output_transform as shown below:\n+\n+ .. code-block:: python\n+\n+ def activated_output_transform(output):\n+ y_pred, y = output\n+ y_pred = torch.sigmoid(y_pred)\n+ return y_pred, y\n+\n+ roc_auc = ROC_AUC(activated_output_transform)\n+\n \"\"\"\n- def __init__(self, activation=None, output_transform=lambda x: x):\n- super(ROC_AUC, self).__init__(partial(roc_auc_compute_fn, activation=activation),\n- output_transform=output_transform)\n+ def __init__(self, output_transform=lambda x: x):\n+ super(ROC_AUC, self).__init__(roc_auc_compute_fn, output_transform=output_transform)\n", "issue": "Remove `activation` argument from AveragePrecision and ROC_AUC\nI propose to remove `activation` argument from `AveragePrecision` and `ROC_AUC`:\r\nhttps://github.com/pytorch/ignite/blob/862ab0073e461a32bf5b53fa015f88c143ae9079/ignite/contrib/metrics/roc_auc.py#L23-L28\r\nto be coherent with `Accuracy` et friends and the usage of `output_transform` to apply the binarization/activation :\r\nhttps://github.com/pytorch/ignite/blob/862ab0073e461a32bf5b53fa015f88c143ae9079/ignite/metrics/accuracy.py#L81-L93\r\n\r\n\n", "before_files": [{"content": "from functools import partial\nfrom ignite.metrics import EpochMetric\n\n\ndef roc_auc_compute_fn(y_preds, y_targets, activation=None):\n try:\n from sklearn.metrics import roc_auc_score\n except ImportError:\n raise RuntimeError(\"This contrib module requires sklearn to be installed.\")\n\n y_true = y_targets.numpy()\n if activation is not None:\n y_preds = activation(y_preds)\n y_pred = y_preds.numpy()\n return roc_auc_score(y_true, y_pred)\n\n\nclass ROC_AUC(EpochMetric):\n \"\"\"Computes Area Under the Receiver Operating Characteristic Curve (ROC AUC)\n accumulating predictions and the ground-truth during an epoch and applying\n `sklearn.metrics.roc_auc_score <http://scikit-learn.org/stable/modules/generated/\n sklearn.metrics.roc_auc_score.html#sklearn.metrics.roc_auc_score>`_ .\n\n Args:\n activation (callable, optional): optional function to apply on prediction tensors,\n e.g. `activation=torch.sigmoid` to transform logits.\n output_transform (callable, optional): a callable that is used to transform the\n :class:`~ignite.engine.Engine`'s `process_function`'s output into the\n form expected by the metric. This can be useful if, for example, you have a multi-output model and\n you want to compute the metric with respect to one of the outputs.\n\n \"\"\"\n def __init__(self, activation=None, output_transform=lambda x: x):\n super(ROC_AUC, self).__init__(partial(roc_auc_compute_fn, activation=activation),\n output_transform=output_transform)\n", "path": "ignite/contrib/metrics/roc_auc.py"}, {"content": "from functools import partial\nfrom ignite.metrics import EpochMetric\n\n\ndef average_precision_compute_fn(y_preds, y_targets, activation=None):\n try:\n from sklearn.metrics import average_precision_score\n except ImportError:\n raise RuntimeError(\"This contrib module requires sklearn to be installed.\")\n\n y_true = y_targets.numpy()\n if activation is not None:\n y_preds = activation(y_preds)\n y_pred = y_preds.numpy()\n return average_precision_score(y_true, y_pred)\n\n\nclass AveragePrecision(EpochMetric):\n \"\"\"Computes Average Precision accumulating predictions and the ground-truth during an epoch\n and applying `sklearn.metrics.average_precision_score <http://scikit-learn.org/stable/modules/generated/\n sklearn.metrics.average_precision_score.html#sklearn.metrics.average_precision_score>`_ .\n\n Args:\n activation (callable, optional): optional function to apply on prediction tensors,\n e.g. `activation=torch.sigmoid` to transform logits.\n output_transform (callable, optional): a callable that is used to transform the\n :class:`~ignite.engine.Engine`'s `process_function`'s output into the\n form expected by the metric. This can be useful if, for example, you have a multi-output model and\n you want to compute the metric with respect to one of the outputs.\n\n \"\"\"\n def __init__(self, activation=None, output_transform=lambda x: x):\n super(AveragePrecision, self).__init__(partial(average_precision_compute_fn, activation=activation),\n output_transform=output_transform)\n", "path": "ignite/contrib/metrics/average_precision.py"}], "after_files": [{"content": "from ignite.metrics import EpochMetric\n\n\ndef roc_auc_compute_fn(y_preds, y_targets):\n try:\n from sklearn.metrics import roc_auc_score\n except ImportError:\n raise RuntimeError(\"This contrib module requires sklearn to be installed.\")\n\n y_true = y_targets.numpy()\n y_pred = y_preds.numpy()\n return roc_auc_score(y_true, y_pred)\n\n\nclass ROC_AUC(EpochMetric):\n \"\"\"Computes Area Under the Receiver Operating Characteristic Curve (ROC AUC)\n accumulating predictions and the ground-truth during an epoch and applying\n `sklearn.metrics.roc_auc_score <http://scikit-learn.org/stable/modules/generated/\n sklearn.metrics.roc_auc_score.html#sklearn.metrics.roc_auc_score>`_ .\n\n Args:\n output_transform (callable, optional): a callable that is used to transform the\n :class:`~ignite.engine.Engine`'s `process_function`'s output into the\n form expected by the metric. This can be useful if, for example, you have a multi-output model and\n you want to compute the metric with respect to one of the outputs.\n\n ROC_AUC expects y to be comprised of 0's and 1's. y_pred must either be probability estimates or confidence\n values. To apply an activation to y_pred, use output_transform as shown below:\n\n .. code-block:: python\n\n def activated_output_transform(output):\n y_pred, y = output\n y_pred = torch.sigmoid(y_pred)\n return y_pred, y\n\n roc_auc = ROC_AUC(activated_output_transform)\n\n \"\"\"\n def __init__(self, output_transform=lambda x: x):\n super(ROC_AUC, self).__init__(roc_auc_compute_fn, output_transform=output_transform)\n", "path": "ignite/contrib/metrics/roc_auc.py"}, {"content": "from ignite.metrics import EpochMetric\n\n\ndef average_precision_compute_fn(y_preds, y_targets):\n try:\n from sklearn.metrics import average_precision_score\n except ImportError:\n raise RuntimeError(\"This contrib module requires sklearn to be installed.\")\n\n y_true = y_targets.numpy()\n y_pred = y_preds.numpy()\n return average_precision_score(y_true, y_pred)\n\n\nclass AveragePrecision(EpochMetric):\n \"\"\"Computes Average Precision accumulating predictions and the ground-truth during an epoch\n and applying `sklearn.metrics.average_precision_score <http://scikit-learn.org/stable/modules/generated/\n sklearn.metrics.average_precision_score.html#sklearn.metrics.average_precision_score>`_ .\n\n Args:\n output_transform (callable, optional): a callable that is used to transform the\n :class:`~ignite.engine.Engine`'s `process_function`'s output into the\n form expected by the metric. This can be useful if, for example, you have a multi-output model and\n you want to compute the metric with respect to one of the outputs.\n\n AveragePrecision expects y to be comprised of 0's and 1's. y_pred must either be probability estimates or\n confidence values. To apply an activation to y_pred, use output_transform as shown below:\n\n .. code-block:: python\n\n def activated_output_transform(output):\n y_pred, y = output\n y_pred = torch.softmax(y_pred)\n return y_pred, y\n\n avg_precision = AveragePrecision(activated_output_transform)\n\n \"\"\"\n def __init__(self, activation=None, output_transform=lambda x: x):\n super(AveragePrecision, self).__init__(average_precision_compute_fn, output_transform=output_transform)\n", "path": "ignite/contrib/metrics/average_precision.py"}]} | 1,254 | 1,012 |
gh_patches_debug_21131 | rasdani/github-patches | git_diff | pypi__warehouse-2023 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Search by topic broken ?
Browsing packages, then choosing the Internet / WWW9HTTP / Browser topics gives no result:
https://pypi.org/search/?q=&o=&c=Topic+%3A%3A+Internet+%3A%3A+WWW%2FHTTP+%3A%3A+Browsers
There should be at least the [mechanoid package](https://pypi.org/project/mechanoid/)
Using firefox 50.1:0 on Ubuntu 16.04
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `warehouse/cli/search/reindex.py`
Content:
```
1 # Licensed under the Apache License, Version 2.0 (the "License");
2 # you may not use this file except in compliance with the License.
3 # You may obtain a copy of the License at
4 #
5 # http://www.apache.org/licenses/LICENSE-2.0
6 #
7 # Unless required by applicable law or agreed to in writing, software
8 # distributed under the License is distributed on an "AS IS" BASIS,
9 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 # See the License for the specific language governing permissions and
11 # limitations under the License.
12
13 import binascii
14 import os
15
16 import click
17
18 from elasticsearch.helpers import parallel_bulk
19 from sqlalchemy.orm import lazyload, joinedload, load_only
20
21 from warehouse.cli.search import search
22 from warehouse.db import Session
23 from warehouse.packaging.models import Release, Project
24 from warehouse.packaging.search import Project as ProjectDocType
25 from warehouse.search import get_index
26 from warehouse.utils.db import windowed_query
27
28
29 def _project_docs(db):
30 releases = (
31 db.query(Release)
32 .options(load_only(
33 "summary", "description", "author",
34 "author_email", "maintainer", "maintainer_email",
35 "home_page", "download_url", "keywords", "platform",
36 "created"))
37 .options(lazyload("*"),
38 (joinedload(Release.project)
39 .load_only("normalized_name", "name")
40 .joinedload(Project.releases)
41 .load_only("version", "is_prerelease")),
42 joinedload(Release._classifiers).load_only("classifier"))
43 .distinct(Release.name)
44 .order_by(Release.name, Release._pypi_ordering.desc())
45 )
46 for release in windowed_query(releases, Release.name, 1000):
47 p = ProjectDocType.from_db(release)
48 p.full_clean()
49 yield p.to_dict(include_meta=True)
50
51
52 @search.command()
53 @click.pass_obj
54 def reindex(config, **kwargs):
55 """
56 Recreate the Search Index.
57 """
58 client = config.registry["elasticsearch.client"]
59 db = Session(bind=config.registry["sqlalchemy.engine"])
60 number_of_replicas = config.registry.get("elasticsearch.replicas", 0)
61 refresh_interval = config.registry.get("elasticsearch.interval", "1s")
62
63 # We use a randomly named index so that we can do a zero downtime reindex.
64 # Essentially we'll use a randomly named index which we will use until all
65 # of the data has been reindexed, at which point we'll point an alias at
66 # our randomly named index, and then delete the old randomly named index.
67
68 # Create the new index and associate all of our doc types with it.
69 index_base = config.registry["elasticsearch.index"]
70 random_token = binascii.hexlify(os.urandom(5)).decode("ascii")
71 new_index_name = "{}-{}".format(index_base, random_token)
72 doc_types = config.registry.get("search.doc_types", set())
73
74 # Create the new index with zero replicas and index refreshes disabled
75 # while we are bulk indexing.
76 new_index = get_index(
77 new_index_name,
78 doc_types,
79 using=client,
80 shards=config.registry.get("elasticsearch.shards", 1),
81 replicas=0,
82 interval="-1",
83 )
84
85 # From this point on, if any error occurs, we want to be able to delete our
86 # in progress index.
87 try:
88 db.execute("SET statement_timeout = '600s'")
89
90 for _ in parallel_bulk(client, _project_docs(db)):
91 pass
92 except:
93 new_index.delete()
94 raise
95 finally:
96 db.rollback()
97 db.close()
98
99 # Now that we've finished indexing all of our data we can optimize it and
100 # update the replicas and refresh intervals.
101 client.indices.forcemerge(index=new_index_name)
102 client.indices.put_settings(
103 index=new_index_name,
104 body={
105 "index": {
106 "number_of_replicas": number_of_replicas,
107 "refresh_interval": refresh_interval,
108 }
109 }
110 )
111
112 # Point the alias at our new randomly named index and delete the old index.
113 if client.indices.exists_alias(name=index_base):
114 to_delete = set()
115 actions = []
116 for name in client.indices.get_alias(name=index_base):
117 to_delete.add(name)
118 actions.append({"remove": {"index": name, "alias": index_base}})
119 actions.append({"add": {"index": new_index_name, "alias": index_base}})
120 client.indices.update_aliases({"actions": actions})
121 client.indices.delete(",".join(to_delete))
122 else:
123 client.indices.put_alias(name=index_base, index=new_index_name)
124
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/warehouse/cli/search/reindex.py b/warehouse/cli/search/reindex.py
--- a/warehouse/cli/search/reindex.py
+++ b/warehouse/cli/search/reindex.py
@@ -70,6 +70,7 @@
random_token = binascii.hexlify(os.urandom(5)).decode("ascii")
new_index_name = "{}-{}".format(index_base, random_token)
doc_types = config.registry.get("search.doc_types", set())
+ shards = config.registry.get("elasticsearch.shards", 1)
# Create the new index with zero replicas and index refreshes disabled
# while we are bulk indexing.
@@ -77,10 +78,11 @@
new_index_name,
doc_types,
using=client,
- shards=config.registry.get("elasticsearch.shards", 1),
+ shards=shards,
replicas=0,
interval="-1",
)
+ new_index.create(wait_for_active_shards=shards)
# From this point on, if any error occurs, we want to be able to delete our
# in progress index.
| {"golden_diff": "diff --git a/warehouse/cli/search/reindex.py b/warehouse/cli/search/reindex.py\n--- a/warehouse/cli/search/reindex.py\n+++ b/warehouse/cli/search/reindex.py\n@@ -70,6 +70,7 @@\n random_token = binascii.hexlify(os.urandom(5)).decode(\"ascii\")\n new_index_name = \"{}-{}\".format(index_base, random_token)\n doc_types = config.registry.get(\"search.doc_types\", set())\n+ shards = config.registry.get(\"elasticsearch.shards\", 1)\n \n # Create the new index with zero replicas and index refreshes disabled\n # while we are bulk indexing.\n@@ -77,10 +78,11 @@\n new_index_name,\n doc_types,\n using=client,\n- shards=config.registry.get(\"elasticsearch.shards\", 1),\n+ shards=shards,\n replicas=0,\n interval=\"-1\",\n )\n+ new_index.create(wait_for_active_shards=shards)\n \n # From this point on, if any error occurs, we want to be able to delete our\n # in progress index.\n", "issue": "Search by topic broken ?\nBrowsing packages, then choosing the Internet / WWW9HTTP / Browser topics gives no result:\r\n\r\nhttps://pypi.org/search/?q=&o=&c=Topic+%3A%3A+Internet+%3A%3A+WWW%2FHTTP+%3A%3A+Browsers\r\n\r\nThere should be at least the [mechanoid package](https://pypi.org/project/mechanoid/)\r\n\r\nUsing firefox 50.1:0 on Ubuntu 16.04\n", "before_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport binascii\nimport os\n\nimport click\n\nfrom elasticsearch.helpers import parallel_bulk\nfrom sqlalchemy.orm import lazyload, joinedload, load_only\n\nfrom warehouse.cli.search import search\nfrom warehouse.db import Session\nfrom warehouse.packaging.models import Release, Project\nfrom warehouse.packaging.search import Project as ProjectDocType\nfrom warehouse.search import get_index\nfrom warehouse.utils.db import windowed_query\n\n\ndef _project_docs(db):\n releases = (\n db.query(Release)\n .options(load_only(\n \"summary\", \"description\", \"author\",\n \"author_email\", \"maintainer\", \"maintainer_email\",\n \"home_page\", \"download_url\", \"keywords\", \"platform\",\n \"created\"))\n .options(lazyload(\"*\"),\n (joinedload(Release.project)\n .load_only(\"normalized_name\", \"name\")\n .joinedload(Project.releases)\n .load_only(\"version\", \"is_prerelease\")),\n joinedload(Release._classifiers).load_only(\"classifier\"))\n .distinct(Release.name)\n .order_by(Release.name, Release._pypi_ordering.desc())\n )\n for release in windowed_query(releases, Release.name, 1000):\n p = ProjectDocType.from_db(release)\n p.full_clean()\n yield p.to_dict(include_meta=True)\n\n\[email protected]()\[email protected]_obj\ndef reindex(config, **kwargs):\n \"\"\"\n Recreate the Search Index.\n \"\"\"\n client = config.registry[\"elasticsearch.client\"]\n db = Session(bind=config.registry[\"sqlalchemy.engine\"])\n number_of_replicas = config.registry.get(\"elasticsearch.replicas\", 0)\n refresh_interval = config.registry.get(\"elasticsearch.interval\", \"1s\")\n\n # We use a randomly named index so that we can do a zero downtime reindex.\n # Essentially we'll use a randomly named index which we will use until all\n # of the data has been reindexed, at which point we'll point an alias at\n # our randomly named index, and then delete the old randomly named index.\n\n # Create the new index and associate all of our doc types with it.\n index_base = config.registry[\"elasticsearch.index\"]\n random_token = binascii.hexlify(os.urandom(5)).decode(\"ascii\")\n new_index_name = \"{}-{}\".format(index_base, random_token)\n doc_types = config.registry.get(\"search.doc_types\", set())\n\n # Create the new index with zero replicas and index refreshes disabled\n # while we are bulk indexing.\n new_index = get_index(\n new_index_name,\n doc_types,\n using=client,\n shards=config.registry.get(\"elasticsearch.shards\", 1),\n replicas=0,\n interval=\"-1\",\n )\n\n # From this point on, if any error occurs, we want to be able to delete our\n # in progress index.\n try:\n db.execute(\"SET statement_timeout = '600s'\")\n\n for _ in parallel_bulk(client, _project_docs(db)):\n pass\n except:\n new_index.delete()\n raise\n finally:\n db.rollback()\n db.close()\n\n # Now that we've finished indexing all of our data we can optimize it and\n # update the replicas and refresh intervals.\n client.indices.forcemerge(index=new_index_name)\n client.indices.put_settings(\n index=new_index_name,\n body={\n \"index\": {\n \"number_of_replicas\": number_of_replicas,\n \"refresh_interval\": refresh_interval,\n }\n }\n )\n\n # Point the alias at our new randomly named index and delete the old index.\n if client.indices.exists_alias(name=index_base):\n to_delete = set()\n actions = []\n for name in client.indices.get_alias(name=index_base):\n to_delete.add(name)\n actions.append({\"remove\": {\"index\": name, \"alias\": index_base}})\n actions.append({\"add\": {\"index\": new_index_name, \"alias\": index_base}})\n client.indices.update_aliases({\"actions\": actions})\n client.indices.delete(\",\".join(to_delete))\n else:\n client.indices.put_alias(name=index_base, index=new_index_name)\n", "path": "warehouse/cli/search/reindex.py"}], "after_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport binascii\nimport os\n\nimport click\n\nfrom elasticsearch.helpers import parallel_bulk\nfrom sqlalchemy.orm import lazyload, joinedload, load_only\n\nfrom warehouse.cli.search import search\nfrom warehouse.db import Session\nfrom warehouse.packaging.models import Release, Project\nfrom warehouse.packaging.search import Project as ProjectDocType\nfrom warehouse.search import get_index\nfrom warehouse.utils.db import windowed_query\n\n\ndef _project_docs(db):\n releases = (\n db.query(Release)\n .options(load_only(\n \"summary\", \"description\", \"author\",\n \"author_email\", \"maintainer\", \"maintainer_email\",\n \"home_page\", \"download_url\", \"keywords\", \"platform\",\n \"created\"))\n .options(lazyload(\"*\"),\n (joinedload(Release.project)\n .load_only(\"normalized_name\", \"name\")\n .joinedload(Project.releases)\n .load_only(\"version\", \"is_prerelease\")),\n joinedload(Release._classifiers).load_only(\"classifier\"))\n .distinct(Release.name)\n .order_by(Release.name, Release._pypi_ordering.desc())\n )\n for release in windowed_query(releases, Release.name, 1000):\n p = ProjectDocType.from_db(release)\n p.full_clean()\n yield p.to_dict(include_meta=True)\n\n\[email protected]()\[email protected]_obj\ndef reindex(config, **kwargs):\n \"\"\"\n Recreate the Search Index.\n \"\"\"\n client = config.registry[\"elasticsearch.client\"]\n db = Session(bind=config.registry[\"sqlalchemy.engine\"])\n number_of_replicas = config.registry.get(\"elasticsearch.replicas\", 0)\n refresh_interval = config.registry.get(\"elasticsearch.interval\", \"1s\")\n\n # We use a randomly named index so that we can do a zero downtime reindex.\n # Essentially we'll use a randomly named index which we will use until all\n # of the data has been reindexed, at which point we'll point an alias at\n # our randomly named index, and then delete the old randomly named index.\n\n # Create the new index and associate all of our doc types with it.\n index_base = config.registry[\"elasticsearch.index\"]\n random_token = binascii.hexlify(os.urandom(5)).decode(\"ascii\")\n new_index_name = \"{}-{}\".format(index_base, random_token)\n doc_types = config.registry.get(\"search.doc_types\", set())\n shards = config.registry.get(\"elasticsearch.shards\", 1)\n\n # Create the new index with zero replicas and index refreshes disabled\n # while we are bulk indexing.\n new_index = get_index(\n new_index_name,\n doc_types,\n using=client,\n shards=shards,\n replicas=0,\n interval=\"-1\",\n )\n new_index.create(wait_for_active_shards=shards)\n\n # From this point on, if any error occurs, we want to be able to delete our\n # in progress index.\n try:\n db.execute(\"SET statement_timeout = '600s'\")\n\n for _ in parallel_bulk(client, _project_docs(db)):\n pass\n except:\n new_index.delete()\n raise\n finally:\n db.rollback()\n db.close()\n\n # Now that we've finished indexing all of our data we can optimize it and\n # update the replicas and refresh intervals.\n client.indices.forcemerge(index=new_index_name)\n client.indices.put_settings(\n index=new_index_name,\n body={\n \"index\": {\n \"number_of_replicas\": number_of_replicas,\n \"refresh_interval\": refresh_interval,\n }\n }\n )\n\n # Point the alias at our new randomly named index and delete the old index.\n if client.indices.exists_alias(name=index_base):\n to_delete = set()\n actions = []\n for name in client.indices.get_alias(name=index_base):\n to_delete.add(name)\n actions.append({\"remove\": {\"index\": name, \"alias\": index_base}})\n actions.append({\"add\": {\"index\": new_index_name, \"alias\": index_base}})\n client.indices.update_aliases({\"actions\": actions})\n client.indices.delete(\",\".join(to_delete))\n else:\n client.indices.put_alias(name=index_base, index=new_index_name)\n", "path": "warehouse/cli/search/reindex.py"}]} | 1,637 | 243 |
gh_patches_debug_29581 | rasdani/github-patches | git_diff | svthalia__concrexit-2709 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Food API: AttributeError: 'Event' object has no attribute 'member_registration'
Sentry Issue: [CONCREXIT-KG](https://sentry.io/organizations/thalia/issues/3768057031/?referrer=github_integration)
```
AttributeError: 'Event' object has no attribute 'member_registration'
(11 additional frame(s) were not displayed)
...
File "rest_framework/serializers.py", line 253, in data
self._data = self.to_representation(self.instance)
File "rest_framework/serializers.py", line 522, in to_representation
ret[field.field_name] = field.to_representation(attribute)
File "rest_framework/serializers.py", line 522, in to_representation
ret[field.field_name] = field.to_representation(attribute)
File "rest_framework/fields.py", line 1838, in to_representation
return method(value)
File "events/api/v2/serializers/event.py", line 83, in _registration_status
if self.context["request"].member and len(instance.member_registration) > 0:
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `website/pizzas/api/v2/views.py`
Content:
```
1 from oauth2_provider.contrib.rest_framework import IsAuthenticatedOrTokenHasScope
2 from rest_framework import filters as framework_filters
3 from rest_framework import status
4 from rest_framework.generics import (
5 CreateAPIView,
6 DestroyAPIView,
7 ListAPIView,
8 RetrieveAPIView,
9 UpdateAPIView,
10 get_object_or_404,
11 )
12 from rest_framework.response import Response
13
14 from payments.exceptions import PaymentError
15 from payments.services import delete_payment
16 from pizzas.api.v2 import filters
17 from pizzas.api.v2.serializers import (
18 FoodOrderCreateSerializer,
19 FoodOrderSerializer,
20 FoodOrderUpdateSerializer,
21 ProductSerializer,
22 )
23 from pizzas.api.v2.serializers.food_event import FoodEventSerializer
24 from pizzas.models import FoodEvent, FoodOrder, Product
25 from thaliawebsite.api.v2.permissions import IsAuthenticatedOrTokenHasScopeForMethod
26
27
28 class FoodEventListView(ListAPIView):
29 """Returns an overview of all food events."""
30
31 serializer_class = FoodEventSerializer
32 queryset = FoodEvent.objects.all()
33 filter_backends = (
34 framework_filters.OrderingFilter,
35 filters.FoodEventDateFilterBackend,
36 )
37 ordering_fields = ("start", "end")
38 permission_classes = [
39 IsAuthenticatedOrTokenHasScope,
40 ]
41 required_scopes = ["food:read"]
42
43
44 class FoodEventDetailView(RetrieveAPIView):
45 """Returns one single food event."""
46
47 serializer_class = FoodEventSerializer
48 queryset = FoodEvent.objects.all()
49 permission_classes = [
50 IsAuthenticatedOrTokenHasScope,
51 ]
52 required_scopes = ["food:read"]
53
54
55 class FoodEventProductsListView(ListAPIView):
56 """Returns an overview of all products."""
57
58 serializer_class = ProductSerializer
59 queryset = Product.available_products.all()
60 filter_backends = (framework_filters.SearchFilter,)
61 search_fields = ("name",)
62 permission_classes = [
63 IsAuthenticatedOrTokenHasScope,
64 ]
65 required_scopes = ["food:read"]
66
67
68 class FoodEventOrderDetailView(
69 RetrieveAPIView, CreateAPIView, UpdateAPIView, DestroyAPIView
70 ):
71 """Returns details of a food order."""
72
73 permission_classes = [
74 IsAuthenticatedOrTokenHasScopeForMethod,
75 ]
76 required_scopes_per_method = {
77 "GET": ["food:read"],
78 "POST": ["food:order"],
79 "PUT": ["food:order"],
80 "PATCH": ["food:order"],
81 "DELETE": ["food:order"],
82 }
83
84 def get_serializer_class(self):
85 if self.request.method.lower() == "get":
86 return FoodOrderSerializer
87 if self.request.method.lower() == "post":
88 return FoodOrderCreateSerializer
89 return FoodOrderUpdateSerializer
90
91 def get_queryset(self):
92 return FoodOrder.objects.filter(food_event=self.food_event)
93
94 def get_object(self):
95 queryset = self.filter_queryset(self.get_queryset())
96 obj = get_object_or_404(queryset, member=self.request.member)
97
98 # May raise a permission denied
99 self.check_object_permissions(self.request, obj)
100
101 return obj
102
103 def dispatch(self, request, *args, **kwargs):
104 self.food_event = get_object_or_404(FoodEvent, pk=self.kwargs.get("pk"))
105 try:
106 return super().dispatch(request, *args, **kwargs)
107 except PaymentError as e:
108 return Response(
109 str(e),
110 status=status.HTTP_403_FORBIDDEN,
111 )
112
113 def update(self, request, *args, **kwargs):
114 instance = self.get_object()
115
116 if instance.payment:
117 delete_payment(instance, member=request.member, ignore_change_window=True)
118
119 super().update(request, *args, **kwargs)
120
121 return Response(
122 FoodOrderSerializer(instance, context=self.get_serializer_context()).data
123 )
124
125 def create(self, request, *args, **kwargs):
126 serializer = self.get_serializer(data=request.data)
127 serializer.is_valid(raise_exception=True)
128 self.perform_create(serializer)
129 return Response(
130 FoodOrderSerializer(
131 serializer.instance, context=self.get_serializer_context()
132 ).data,
133 status=status.HTTP_201_CREATED,
134 )
135
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/website/pizzas/api/v2/views.py b/website/pizzas/api/v2/views.py
--- a/website/pizzas/api/v2/views.py
+++ b/website/pizzas/api/v2/views.py
@@ -1,3 +1,5 @@
+from django.db.models import Prefetch
+
from oauth2_provider.contrib.rest_framework import IsAuthenticatedOrTokenHasScope
from rest_framework import filters as framework_filters
from rest_framework import status
@@ -11,6 +13,7 @@
)
from rest_framework.response import Response
+from events.models.event_registration import EventRegistration
from payments.exceptions import PaymentError
from payments.services import delete_payment
from pizzas.api.v2 import filters
@@ -45,12 +48,25 @@
"""Returns one single food event."""
serializer_class = FoodEventSerializer
- queryset = FoodEvent.objects.all()
permission_classes = [
IsAuthenticatedOrTokenHasScope,
]
required_scopes = ["food:read"]
+ def get_queryset(self):
+ events = FoodEvent.objects.all()
+ if self.request.member:
+ events = events.prefetch_related(
+ Prefetch(
+ "event__eventregistration_set",
+ to_attr="member_registration",
+ queryset=EventRegistration.objects.filter(
+ member=self.request.member
+ ).select_properties("queue_position"),
+ )
+ )
+ return events
+
class FoodEventProductsListView(ListAPIView):
"""Returns an overview of all products."""
| {"golden_diff": "diff --git a/website/pizzas/api/v2/views.py b/website/pizzas/api/v2/views.py\n--- a/website/pizzas/api/v2/views.py\n+++ b/website/pizzas/api/v2/views.py\n@@ -1,3 +1,5 @@\n+from django.db.models import Prefetch\n+\n from oauth2_provider.contrib.rest_framework import IsAuthenticatedOrTokenHasScope\n from rest_framework import filters as framework_filters\n from rest_framework import status\n@@ -11,6 +13,7 @@\n )\n from rest_framework.response import Response\n \n+from events.models.event_registration import EventRegistration\n from payments.exceptions import PaymentError\n from payments.services import delete_payment\n from pizzas.api.v2 import filters\n@@ -45,12 +48,25 @@\n \"\"\"Returns one single food event.\"\"\"\n \n serializer_class = FoodEventSerializer\n- queryset = FoodEvent.objects.all()\n permission_classes = [\n IsAuthenticatedOrTokenHasScope,\n ]\n required_scopes = [\"food:read\"]\n \n+ def get_queryset(self):\n+ events = FoodEvent.objects.all()\n+ if self.request.member:\n+ events = events.prefetch_related(\n+ Prefetch(\n+ \"event__eventregistration_set\",\n+ to_attr=\"member_registration\",\n+ queryset=EventRegistration.objects.filter(\n+ member=self.request.member\n+ ).select_properties(\"queue_position\"),\n+ )\n+ )\n+ return events\n+\n \n class FoodEventProductsListView(ListAPIView):\n \"\"\"Returns an overview of all products.\"\"\"\n", "issue": "Food API: AttributeError: 'Event' object has no attribute 'member_registration'\nSentry Issue: [CONCREXIT-KG](https://sentry.io/organizations/thalia/issues/3768057031/?referrer=github_integration)\n\n```\nAttributeError: 'Event' object has no attribute 'member_registration'\n(11 additional frame(s) were not displayed)\n...\n File \"rest_framework/serializers.py\", line 253, in data\n self._data = self.to_representation(self.instance)\n File \"rest_framework/serializers.py\", line 522, in to_representation\n ret[field.field_name] = field.to_representation(attribute)\n File \"rest_framework/serializers.py\", line 522, in to_representation\n ret[field.field_name] = field.to_representation(attribute)\n File \"rest_framework/fields.py\", line 1838, in to_representation\n return method(value)\n File \"events/api/v2/serializers/event.py\", line 83, in _registration_status\n if self.context[\"request\"].member and len(instance.member_registration) > 0:\n```\n", "before_files": [{"content": "from oauth2_provider.contrib.rest_framework import IsAuthenticatedOrTokenHasScope\nfrom rest_framework import filters as framework_filters\nfrom rest_framework import status\nfrom rest_framework.generics import (\n CreateAPIView,\n DestroyAPIView,\n ListAPIView,\n RetrieveAPIView,\n UpdateAPIView,\n get_object_or_404,\n)\nfrom rest_framework.response import Response\n\nfrom payments.exceptions import PaymentError\nfrom payments.services import delete_payment\nfrom pizzas.api.v2 import filters\nfrom pizzas.api.v2.serializers import (\n FoodOrderCreateSerializer,\n FoodOrderSerializer,\n FoodOrderUpdateSerializer,\n ProductSerializer,\n)\nfrom pizzas.api.v2.serializers.food_event import FoodEventSerializer\nfrom pizzas.models import FoodEvent, FoodOrder, Product\nfrom thaliawebsite.api.v2.permissions import IsAuthenticatedOrTokenHasScopeForMethod\n\n\nclass FoodEventListView(ListAPIView):\n \"\"\"Returns an overview of all food events.\"\"\"\n\n serializer_class = FoodEventSerializer\n queryset = FoodEvent.objects.all()\n filter_backends = (\n framework_filters.OrderingFilter,\n filters.FoodEventDateFilterBackend,\n )\n ordering_fields = (\"start\", \"end\")\n permission_classes = [\n IsAuthenticatedOrTokenHasScope,\n ]\n required_scopes = [\"food:read\"]\n\n\nclass FoodEventDetailView(RetrieveAPIView):\n \"\"\"Returns one single food event.\"\"\"\n\n serializer_class = FoodEventSerializer\n queryset = FoodEvent.objects.all()\n permission_classes = [\n IsAuthenticatedOrTokenHasScope,\n ]\n required_scopes = [\"food:read\"]\n\n\nclass FoodEventProductsListView(ListAPIView):\n \"\"\"Returns an overview of all products.\"\"\"\n\n serializer_class = ProductSerializer\n queryset = Product.available_products.all()\n filter_backends = (framework_filters.SearchFilter,)\n search_fields = (\"name\",)\n permission_classes = [\n IsAuthenticatedOrTokenHasScope,\n ]\n required_scopes = [\"food:read\"]\n\n\nclass FoodEventOrderDetailView(\n RetrieveAPIView, CreateAPIView, UpdateAPIView, DestroyAPIView\n):\n \"\"\"Returns details of a food order.\"\"\"\n\n permission_classes = [\n IsAuthenticatedOrTokenHasScopeForMethod,\n ]\n required_scopes_per_method = {\n \"GET\": [\"food:read\"],\n \"POST\": [\"food:order\"],\n \"PUT\": [\"food:order\"],\n \"PATCH\": [\"food:order\"],\n \"DELETE\": [\"food:order\"],\n }\n\n def get_serializer_class(self):\n if self.request.method.lower() == \"get\":\n return FoodOrderSerializer\n if self.request.method.lower() == \"post\":\n return FoodOrderCreateSerializer\n return FoodOrderUpdateSerializer\n\n def get_queryset(self):\n return FoodOrder.objects.filter(food_event=self.food_event)\n\n def get_object(self):\n queryset = self.filter_queryset(self.get_queryset())\n obj = get_object_or_404(queryset, member=self.request.member)\n\n # May raise a permission denied\n self.check_object_permissions(self.request, obj)\n\n return obj\n\n def dispatch(self, request, *args, **kwargs):\n self.food_event = get_object_or_404(FoodEvent, pk=self.kwargs.get(\"pk\"))\n try:\n return super().dispatch(request, *args, **kwargs)\n except PaymentError as e:\n return Response(\n str(e),\n status=status.HTTP_403_FORBIDDEN,\n )\n\n def update(self, request, *args, **kwargs):\n instance = self.get_object()\n\n if instance.payment:\n delete_payment(instance, member=request.member, ignore_change_window=True)\n\n super().update(request, *args, **kwargs)\n\n return Response(\n FoodOrderSerializer(instance, context=self.get_serializer_context()).data\n )\n\n def create(self, request, *args, **kwargs):\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n self.perform_create(serializer)\n return Response(\n FoodOrderSerializer(\n serializer.instance, context=self.get_serializer_context()\n ).data,\n status=status.HTTP_201_CREATED,\n )\n", "path": "website/pizzas/api/v2/views.py"}], "after_files": [{"content": "from django.db.models import Prefetch\n\nfrom oauth2_provider.contrib.rest_framework import IsAuthenticatedOrTokenHasScope\nfrom rest_framework import filters as framework_filters\nfrom rest_framework import status\nfrom rest_framework.generics import (\n CreateAPIView,\n DestroyAPIView,\n ListAPIView,\n RetrieveAPIView,\n UpdateAPIView,\n get_object_or_404,\n)\nfrom rest_framework.response import Response\n\nfrom events.models.event_registration import EventRegistration\nfrom payments.exceptions import PaymentError\nfrom payments.services import delete_payment\nfrom pizzas.api.v2 import filters\nfrom pizzas.api.v2.serializers import (\n FoodOrderCreateSerializer,\n FoodOrderSerializer,\n FoodOrderUpdateSerializer,\n ProductSerializer,\n)\nfrom pizzas.api.v2.serializers.food_event import FoodEventSerializer\nfrom pizzas.models import FoodEvent, FoodOrder, Product\nfrom thaliawebsite.api.v2.permissions import IsAuthenticatedOrTokenHasScopeForMethod\n\n\nclass FoodEventListView(ListAPIView):\n \"\"\"Returns an overview of all food events.\"\"\"\n\n serializer_class = FoodEventSerializer\n queryset = FoodEvent.objects.all()\n filter_backends = (\n framework_filters.OrderingFilter,\n filters.FoodEventDateFilterBackend,\n )\n ordering_fields = (\"start\", \"end\")\n permission_classes = [\n IsAuthenticatedOrTokenHasScope,\n ]\n required_scopes = [\"food:read\"]\n\n\nclass FoodEventDetailView(RetrieveAPIView):\n \"\"\"Returns one single food event.\"\"\"\n\n serializer_class = FoodEventSerializer\n permission_classes = [\n IsAuthenticatedOrTokenHasScope,\n ]\n required_scopes = [\"food:read\"]\n\n def get_queryset(self):\n events = FoodEvent.objects.all()\n if self.request.member:\n events = events.prefetch_related(\n Prefetch(\n \"event__eventregistration_set\",\n to_attr=\"member_registration\",\n queryset=EventRegistration.objects.filter(\n member=self.request.member\n ).select_properties(\"queue_position\"),\n )\n )\n return events\n\n\nclass FoodEventProductsListView(ListAPIView):\n \"\"\"Returns an overview of all products.\"\"\"\n\n serializer_class = ProductSerializer\n queryset = Product.available_products.all()\n filter_backends = (framework_filters.SearchFilter,)\n search_fields = (\"name\",)\n permission_classes = [\n IsAuthenticatedOrTokenHasScope,\n ]\n required_scopes = [\"food:read\"]\n\n\nclass FoodEventOrderDetailView(\n RetrieveAPIView, CreateAPIView, UpdateAPIView, DestroyAPIView\n):\n \"\"\"Returns details of a food order.\"\"\"\n\n permission_classes = [\n IsAuthenticatedOrTokenHasScopeForMethod,\n ]\n required_scopes_per_method = {\n \"GET\": [\"food:read\"],\n \"POST\": [\"food:order\"],\n \"PUT\": [\"food:order\"],\n \"PATCH\": [\"food:order\"],\n \"DELETE\": [\"food:order\"],\n }\n\n def get_serializer_class(self):\n if self.request.method.lower() == \"get\":\n return FoodOrderSerializer\n if self.request.method.lower() == \"post\":\n return FoodOrderCreateSerializer\n return FoodOrderUpdateSerializer\n\n def get_queryset(self):\n return FoodOrder.objects.filter(food_event=self.food_event)\n\n def get_object(self):\n queryset = self.filter_queryset(self.get_queryset())\n obj = get_object_or_404(queryset, member=self.request.member)\n\n # May raise a permission denied\n self.check_object_permissions(self.request, obj)\n\n return obj\n\n def dispatch(self, request, *args, **kwargs):\n self.food_event = get_object_or_404(FoodEvent, pk=self.kwargs.get(\"pk\"))\n try:\n return super().dispatch(request, *args, **kwargs)\n except PaymentError as e:\n return Response(\n str(e),\n status=status.HTTP_403_FORBIDDEN,\n )\n\n def update(self, request, *args, **kwargs):\n instance = self.get_object()\n\n if instance.payment:\n delete_payment(instance, member=request.member, ignore_change_window=True)\n\n super().update(request, *args, **kwargs)\n\n return Response(\n FoodOrderSerializer(instance, context=self.get_serializer_context()).data\n )\n\n def create(self, request, *args, **kwargs):\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n self.perform_create(serializer)\n return Response(\n FoodOrderSerializer(\n serializer.instance, context=self.get_serializer_context()\n ).data,\n status=status.HTTP_201_CREATED,\n )\n", "path": "website/pizzas/api/v2/views.py"}]} | 1,675 | 325 |
gh_patches_debug_19964 | rasdani/github-patches | git_diff | matrix-org__synapse-6151 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Uploading a too large file: wrong error code
When uploading a too large file, synapse responds with a 413 (OK) but with errcode M_UNKNOWN. According to the [spec](https://matrix.org/docs/spec/client_server/latest#post-matrix-media-r0-upload), it should be "M_TOO_LARGE"
Received responseCode: 413
Received responseBody:
`{"errcode":"M_UNKNOWN","error":"Upload request body is too large"}`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `synapse/rest/media/v1/upload_resource.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 # Copyright 2014-2016 OpenMarket Ltd
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import logging
17
18 from twisted.web.server import NOT_DONE_YET
19
20 from synapse.api.errors import SynapseError
21 from synapse.http.server import (
22 DirectServeResource,
23 respond_with_json,
24 wrap_json_request_handler,
25 )
26 from synapse.http.servlet import parse_string
27
28 logger = logging.getLogger(__name__)
29
30
31 class UploadResource(DirectServeResource):
32 isLeaf = True
33
34 def __init__(self, hs, media_repo):
35 super().__init__()
36
37 self.media_repo = media_repo
38 self.filepaths = media_repo.filepaths
39 self.store = hs.get_datastore()
40 self.clock = hs.get_clock()
41 self.server_name = hs.hostname
42 self.auth = hs.get_auth()
43 self.max_upload_size = hs.config.max_upload_size
44 self.clock = hs.get_clock()
45
46 def render_OPTIONS(self, request):
47 respond_with_json(request, 200, {}, send_cors=True)
48 return NOT_DONE_YET
49
50 @wrap_json_request_handler
51 async def _async_render_POST(self, request):
52 requester = await self.auth.get_user_by_req(request)
53 # TODO: The checks here are a bit late. The content will have
54 # already been uploaded to a tmp file at this point
55 content_length = request.getHeader(b"Content-Length").decode("ascii")
56 if content_length is None:
57 raise SynapseError(msg="Request must specify a Content-Length", code=400)
58 if int(content_length) > self.max_upload_size:
59 raise SynapseError(msg="Upload request body is too large", code=413)
60
61 upload_name = parse_string(request, b"filename", encoding=None)
62 if upload_name:
63 try:
64 upload_name = upload_name.decode("utf8")
65 except UnicodeDecodeError:
66 raise SynapseError(
67 msg="Invalid UTF-8 filename parameter: %r" % (upload_name), code=400
68 )
69
70 headers = request.requestHeaders
71
72 if headers.hasHeader(b"Content-Type"):
73 media_type = headers.getRawHeaders(b"Content-Type")[0].decode("ascii")
74 else:
75 raise SynapseError(msg="Upload request missing 'Content-Type'", code=400)
76
77 # if headers.hasHeader(b"Content-Disposition"):
78 # disposition = headers.getRawHeaders(b"Content-Disposition")[0]
79 # TODO(markjh): parse content-dispostion
80
81 content_uri = await self.media_repo.create_content(
82 media_type, upload_name, request.content, content_length, requester.user
83 )
84
85 logger.info("Uploaded content with URI %r", content_uri)
86
87 respond_with_json(request, 200, {"content_uri": content_uri}, send_cors=True)
88
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py
--- a/synapse/rest/media/v1/upload_resource.py
+++ b/synapse/rest/media/v1/upload_resource.py
@@ -17,7 +17,7 @@
from twisted.web.server import NOT_DONE_YET
-from synapse.api.errors import SynapseError
+from synapse.api.errors import Codes, SynapseError
from synapse.http.server import (
DirectServeResource,
respond_with_json,
@@ -56,7 +56,11 @@
if content_length is None:
raise SynapseError(msg="Request must specify a Content-Length", code=400)
if int(content_length) > self.max_upload_size:
- raise SynapseError(msg="Upload request body is too large", code=413)
+ raise SynapseError(
+ msg="Upload request body is too large",
+ code=413,
+ errcode=Codes.TOO_LARGE,
+ )
upload_name = parse_string(request, b"filename", encoding=None)
if upload_name:
| {"golden_diff": "diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py\n--- a/synapse/rest/media/v1/upload_resource.py\n+++ b/synapse/rest/media/v1/upload_resource.py\n@@ -17,7 +17,7 @@\n \n from twisted.web.server import NOT_DONE_YET\n \n-from synapse.api.errors import SynapseError\n+from synapse.api.errors import Codes, SynapseError\n from synapse.http.server import (\n DirectServeResource,\n respond_with_json,\n@@ -56,7 +56,11 @@\n if content_length is None:\n raise SynapseError(msg=\"Request must specify a Content-Length\", code=400)\n if int(content_length) > self.max_upload_size:\n- raise SynapseError(msg=\"Upload request body is too large\", code=413)\n+ raise SynapseError(\n+ msg=\"Upload request body is too large\",\n+ code=413,\n+ errcode=Codes.TOO_LARGE,\n+ )\n \n upload_name = parse_string(request, b\"filename\", encoding=None)\n if upload_name:\n", "issue": "Uploading a too large file: wrong error code\nWhen uploading a too large file, synapse responds with a 413 (OK) but with errcode M_UNKNOWN. According to the [spec](https://matrix.org/docs/spec/client_server/latest#post-matrix-media-r0-upload), it should be \"M_TOO_LARGE\"\r\n\r\nReceived responseCode: 413\r\nReceived responseBody:\r\n`{\"errcode\":\"M_UNKNOWN\",\"error\":\"Upload request body is too large\"}`\r\n\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# Copyright 2014-2016 OpenMarket Ltd\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport logging\n\nfrom twisted.web.server import NOT_DONE_YET\n\nfrom synapse.api.errors import SynapseError\nfrom synapse.http.server import (\n DirectServeResource,\n respond_with_json,\n wrap_json_request_handler,\n)\nfrom synapse.http.servlet import parse_string\n\nlogger = logging.getLogger(__name__)\n\n\nclass UploadResource(DirectServeResource):\n isLeaf = True\n\n def __init__(self, hs, media_repo):\n super().__init__()\n\n self.media_repo = media_repo\n self.filepaths = media_repo.filepaths\n self.store = hs.get_datastore()\n self.clock = hs.get_clock()\n self.server_name = hs.hostname\n self.auth = hs.get_auth()\n self.max_upload_size = hs.config.max_upload_size\n self.clock = hs.get_clock()\n\n def render_OPTIONS(self, request):\n respond_with_json(request, 200, {}, send_cors=True)\n return NOT_DONE_YET\n\n @wrap_json_request_handler\n async def _async_render_POST(self, request):\n requester = await self.auth.get_user_by_req(request)\n # TODO: The checks here are a bit late. The content will have\n # already been uploaded to a tmp file at this point\n content_length = request.getHeader(b\"Content-Length\").decode(\"ascii\")\n if content_length is None:\n raise SynapseError(msg=\"Request must specify a Content-Length\", code=400)\n if int(content_length) > self.max_upload_size:\n raise SynapseError(msg=\"Upload request body is too large\", code=413)\n\n upload_name = parse_string(request, b\"filename\", encoding=None)\n if upload_name:\n try:\n upload_name = upload_name.decode(\"utf8\")\n except UnicodeDecodeError:\n raise SynapseError(\n msg=\"Invalid UTF-8 filename parameter: %r\" % (upload_name), code=400\n )\n\n headers = request.requestHeaders\n\n if headers.hasHeader(b\"Content-Type\"):\n media_type = headers.getRawHeaders(b\"Content-Type\")[0].decode(\"ascii\")\n else:\n raise SynapseError(msg=\"Upload request missing 'Content-Type'\", code=400)\n\n # if headers.hasHeader(b\"Content-Disposition\"):\n # disposition = headers.getRawHeaders(b\"Content-Disposition\")[0]\n # TODO(markjh): parse content-dispostion\n\n content_uri = await self.media_repo.create_content(\n media_type, upload_name, request.content, content_length, requester.user\n )\n\n logger.info(\"Uploaded content with URI %r\", content_uri)\n\n respond_with_json(request, 200, {\"content_uri\": content_uri}, send_cors=True)\n", "path": "synapse/rest/media/v1/upload_resource.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n# Copyright 2014-2016 OpenMarket Ltd\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport logging\n\nfrom twisted.web.server import NOT_DONE_YET\n\nfrom synapse.api.errors import Codes, SynapseError\nfrom synapse.http.server import (\n DirectServeResource,\n respond_with_json,\n wrap_json_request_handler,\n)\nfrom synapse.http.servlet import parse_string\n\nlogger = logging.getLogger(__name__)\n\n\nclass UploadResource(DirectServeResource):\n isLeaf = True\n\n def __init__(self, hs, media_repo):\n super().__init__()\n\n self.media_repo = media_repo\n self.filepaths = media_repo.filepaths\n self.store = hs.get_datastore()\n self.clock = hs.get_clock()\n self.server_name = hs.hostname\n self.auth = hs.get_auth()\n self.max_upload_size = hs.config.max_upload_size\n self.clock = hs.get_clock()\n\n def render_OPTIONS(self, request):\n respond_with_json(request, 200, {}, send_cors=True)\n return NOT_DONE_YET\n\n @wrap_json_request_handler\n async def _async_render_POST(self, request):\n requester = await self.auth.get_user_by_req(request)\n # TODO: The checks here are a bit late. The content will have\n # already been uploaded to a tmp file at this point\n content_length = request.getHeader(b\"Content-Length\").decode(\"ascii\")\n if content_length is None:\n raise SynapseError(msg=\"Request must specify a Content-Length\", code=400)\n if int(content_length) > self.max_upload_size:\n raise SynapseError(\n msg=\"Upload request body is too large\",\n code=413,\n errcode=Codes.TOO_LARGE,\n )\n\n upload_name = parse_string(request, b\"filename\", encoding=None)\n if upload_name:\n try:\n upload_name = upload_name.decode(\"utf8\")\n except UnicodeDecodeError:\n raise SynapseError(\n msg=\"Invalid UTF-8 filename parameter: %r\" % (upload_name), code=400\n )\n\n headers = request.requestHeaders\n\n if headers.hasHeader(b\"Content-Type\"):\n media_type = headers.getRawHeaders(b\"Content-Type\")[0].decode(\"ascii\")\n else:\n raise SynapseError(msg=\"Upload request missing 'Content-Type'\", code=400)\n\n # if headers.hasHeader(b\"Content-Disposition\"):\n # disposition = headers.getRawHeaders(b\"Content-Disposition\")[0]\n # TODO(markjh): parse content-dispostion\n\n content_uri = await self.media_repo.create_content(\n media_type, upload_name, request.content, content_length, requester.user\n )\n\n logger.info(\"Uploaded content with URI %r\", content_uri)\n\n respond_with_json(request, 200, {\"content_uri\": content_uri}, send_cors=True)\n", "path": "synapse/rest/media/v1/upload_resource.py"}]} | 1,258 | 248 |
gh_patches_debug_5760 | rasdani/github-patches | git_diff | NVIDIA__NVFlare-363 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Users are not warned when running poc command
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nvflare/lighter/poc.py`
Content:
```
1 # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import argparse
16 import os
17 import pathlib
18 import shutil
19
20
21 def clone_client(num_clients: int):
22 current_path = os.getcwd()
23 poc_folder = os.path.join(current_path, "poc")
24 src_folder = os.path.join(poc_folder, "client")
25 for index in range(1, num_clients + 1):
26 dst_folder = os.path.join(poc_folder, f"site-{index}")
27 shutil.copytree(src_folder, dst_folder)
28 start_sh = open(os.path.join(dst_folder, "startup", "start.sh"), "rt")
29 content = start_sh.read()
30 start_sh.close()
31 content = content.replace("NNN", f"{index}")
32 with open(os.path.join(dst_folder, "startup", "start.sh"), "wt") as f:
33 f.write(content)
34 shutil.rmtree(src_folder)
35
36
37 def main():
38 parser = argparse.ArgumentParser()
39 parser.add_argument("-n", "--num_clients", type=int, default=1, help="number of client folders to create")
40
41 args = parser.parse_args()
42
43 file_dir_path = pathlib.Path(__file__).parent.absolute()
44 poc_zip_path = file_dir_path.parent / "poc.zip"
45 poc_folder_path = file_dir_path.parent / "poc"
46 answer = input("This will delete poc folder in current directory and create a new one. Is it OK to proceed? (y/N) ")
47 if answer.strip().upper() == "Y":
48 dest_poc_folder = os.path.join(os.getcwd(), "poc")
49 shutil.rmtree(dest_poc_folder, ignore_errors=True)
50 try:
51 shutil.unpack_archive(poc_zip_path)
52 except shutil.ReadError:
53 print(f"poc.zip not found at {poc_zip_path}, try to use template poc folder")
54 try:
55 shutil.copytree(poc_folder_path, dest_poc_folder)
56 except BaseException:
57 print(f"Unable to copy poc folder from {poc_folder_path}. Exit")
58 exit(1)
59 for root, dirs, files in os.walk(dest_poc_folder):
60 for file in files:
61 if file.endswith(".sh"):
62 os.chmod(os.path.join(root, file), 0o755)
63 clone_client(args.num_clients)
64 print("Successfully creating poc folder. Please read poc/Readme.rst for user guide.")
65
66
67 if __name__ == "__main__":
68 main()
69
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/nvflare/lighter/poc.py b/nvflare/lighter/poc.py
--- a/nvflare/lighter/poc.py
+++ b/nvflare/lighter/poc.py
@@ -62,6 +62,7 @@
os.chmod(os.path.join(root, file), 0o755)
clone_client(args.num_clients)
print("Successfully creating poc folder. Please read poc/Readme.rst for user guide.")
+ print("\n\nWARNING:\n******* Files generated by this poc command are NOT intended for production environments.")
if __name__ == "__main__":
| {"golden_diff": "diff --git a/nvflare/lighter/poc.py b/nvflare/lighter/poc.py\n--- a/nvflare/lighter/poc.py\n+++ b/nvflare/lighter/poc.py\n@@ -62,6 +62,7 @@\n os.chmod(os.path.join(root, file), 0o755)\n clone_client(args.num_clients)\n print(\"Successfully creating poc folder. Please read poc/Readme.rst for user guide.\")\n+ print(\"\\n\\nWARNING:\\n******* Files generated by this poc command are NOT intended for production environments.\")\n \n \n if __name__ == \"__main__\":\n", "issue": "Users are not warned when running poc command\n\n", "before_files": [{"content": "# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nimport pathlib\nimport shutil\n\n\ndef clone_client(num_clients: int):\n current_path = os.getcwd()\n poc_folder = os.path.join(current_path, \"poc\")\n src_folder = os.path.join(poc_folder, \"client\")\n for index in range(1, num_clients + 1):\n dst_folder = os.path.join(poc_folder, f\"site-{index}\")\n shutil.copytree(src_folder, dst_folder)\n start_sh = open(os.path.join(dst_folder, \"startup\", \"start.sh\"), \"rt\")\n content = start_sh.read()\n start_sh.close()\n content = content.replace(\"NNN\", f\"{index}\")\n with open(os.path.join(dst_folder, \"startup\", \"start.sh\"), \"wt\") as f:\n f.write(content)\n shutil.rmtree(src_folder)\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"-n\", \"--num_clients\", type=int, default=1, help=\"number of client folders to create\")\n\n args = parser.parse_args()\n\n file_dir_path = pathlib.Path(__file__).parent.absolute()\n poc_zip_path = file_dir_path.parent / \"poc.zip\"\n poc_folder_path = file_dir_path.parent / \"poc\"\n answer = input(\"This will delete poc folder in current directory and create a new one. Is it OK to proceed? (y/N) \")\n if answer.strip().upper() == \"Y\":\n dest_poc_folder = os.path.join(os.getcwd(), \"poc\")\n shutil.rmtree(dest_poc_folder, ignore_errors=True)\n try:\n shutil.unpack_archive(poc_zip_path)\n except shutil.ReadError:\n print(f\"poc.zip not found at {poc_zip_path}, try to use template poc folder\")\n try:\n shutil.copytree(poc_folder_path, dest_poc_folder)\n except BaseException:\n print(f\"Unable to copy poc folder from {poc_folder_path}. Exit\")\n exit(1)\n for root, dirs, files in os.walk(dest_poc_folder):\n for file in files:\n if file.endswith(\".sh\"):\n os.chmod(os.path.join(root, file), 0o755)\n clone_client(args.num_clients)\n print(\"Successfully creating poc folder. Please read poc/Readme.rst for user guide.\")\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "nvflare/lighter/poc.py"}], "after_files": [{"content": "# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nimport pathlib\nimport shutil\n\n\ndef clone_client(num_clients: int):\n current_path = os.getcwd()\n poc_folder = os.path.join(current_path, \"poc\")\n src_folder = os.path.join(poc_folder, \"client\")\n for index in range(1, num_clients + 1):\n dst_folder = os.path.join(poc_folder, f\"site-{index}\")\n shutil.copytree(src_folder, dst_folder)\n start_sh = open(os.path.join(dst_folder, \"startup\", \"start.sh\"), \"rt\")\n content = start_sh.read()\n start_sh.close()\n content = content.replace(\"NNN\", f\"{index}\")\n with open(os.path.join(dst_folder, \"startup\", \"start.sh\"), \"wt\") as f:\n f.write(content)\n shutil.rmtree(src_folder)\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"-n\", \"--num_clients\", type=int, default=1, help=\"number of client folders to create\")\n\n args = parser.parse_args()\n\n file_dir_path = pathlib.Path(__file__).parent.absolute()\n poc_zip_path = file_dir_path.parent / \"poc.zip\"\n poc_folder_path = file_dir_path.parent / \"poc\"\n answer = input(\"This will delete poc folder in current directory and create a new one. Is it OK to proceed? (y/N) \")\n if answer.strip().upper() == \"Y\":\n dest_poc_folder = os.path.join(os.getcwd(), \"poc\")\n shutil.rmtree(dest_poc_folder, ignore_errors=True)\n try:\n shutil.unpack_archive(poc_zip_path)\n except shutil.ReadError:\n print(f\"poc.zip not found at {poc_zip_path}, try to use template poc folder\")\n try:\n shutil.copytree(poc_folder_path, dest_poc_folder)\n except BaseException:\n print(f\"Unable to copy poc folder from {poc_folder_path}. Exit\")\n exit(1)\n for root, dirs, files in os.walk(dest_poc_folder):\n for file in files:\n if file.endswith(\".sh\"):\n os.chmod(os.path.join(root, file), 0o755)\n clone_client(args.num_clients)\n print(\"Successfully creating poc folder. Please read poc/Readme.rst for user guide.\")\n print(\"\\n\\nWARNING:\\n******* Files generated by this poc command are NOT intended for production environments.\")\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "nvflare/lighter/poc.py"}]} | 1,053 | 134 |
gh_patches_debug_14571 | rasdani/github-patches | git_diff | wagtail__wagtail-1811 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
wagtailsearch.views.frontend needs updating to use non-deprecated search method
Currently using Page.search, which is deprecated in Wagtail 1.2 - should be updated to use the QuerySet-based mechanism instead.
https://github.com/torchbox/wagtail/blob/master/wagtail/wagtailsearch/views/frontend.py#L41
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `wagtail/wagtailsearch/views/frontend.py`
Content:
```
1 from django.conf import settings
2 from django.shortcuts import render
3 from django.http import JsonResponse
4 from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
5
6 from wagtail.wagtailcore import models
7 from wagtail.wagtailsearch.models import Query
8
9
10 def search(
11 request,
12 template=None,
13 template_ajax=None,
14 results_per_page=10,
15 use_json=False,
16 json_attrs=['title', 'url'],
17 show_unpublished=False,
18 search_title_only=False,
19 extra_filters={},
20 path=None):
21
22 # Get default templates
23 if template is None:
24 if hasattr(settings, 'WAGTAILSEARCH_RESULTS_TEMPLATE'):
25 template = settings.WAGTAILSEARCH_RESULTS_TEMPLATE
26 else:
27 template = 'wagtailsearch/search_results.html'
28
29 if template_ajax is None:
30 if hasattr(settings, 'WAGTAILSEARCH_RESULTS_TEMPLATE_AJAX'):
31 template_ajax = settings.WAGTAILSEARCH_RESULTS_TEMPLATE_AJAX
32 else:
33 template_ajax = template
34
35 # Get query string and page from GET paramters
36 query_string = request.GET.get('q', '')
37 page = request.GET.get('page', request.GET.get('p', 1))
38
39 # Search
40 if query_string != '':
41 search_results = models.Page.search(
42 query_string,
43 show_unpublished=show_unpublished,
44 search_title_only=search_title_only,
45 extra_filters=extra_filters,
46 path=path if path else request.site.root_page.path
47 )
48
49 # Get query object
50 query = Query.get(query_string)
51
52 # Add hit
53 query.add_hit()
54
55 # Pagination
56 paginator = Paginator(search_results, results_per_page)
57 try:
58 search_results = paginator.page(page)
59 except PageNotAnInteger:
60 search_results = paginator.page(1)
61 except EmptyPage:
62 search_results = paginator.page(paginator.num_pages)
63 else:
64 query = None
65 search_results = None
66
67 if use_json:
68 # Return a json response
69 if search_results:
70 search_results_json = []
71 for result in search_results:
72 result_specific = result.specific
73
74 search_results_json.append(dict(
75 (attr, getattr(result_specific, attr))
76 for attr in json_attrs
77 if hasattr(result_specific, attr)
78 ))
79
80 return JsonResponse(search_results_json, safe=False)
81 else:
82 return JsonResponse([], safe=False)
83 else: # Render a template
84 if request.is_ajax() and template_ajax:
85 template = template_ajax
86
87 return render(request, template, dict(
88 query_string=query_string,
89 search_results=search_results,
90 is_ajax=request.is_ajax(),
91 query=query
92 ))
93
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/wagtail/wagtailsearch/views/frontend.py b/wagtail/wagtailsearch/views/frontend.py
--- a/wagtail/wagtailsearch/views/frontend.py
+++ b/wagtail/wagtailsearch/views/frontend.py
@@ -38,13 +38,18 @@
# Search
if query_string != '':
- search_results = models.Page.search(
- query_string,
- show_unpublished=show_unpublished,
- search_title_only=search_title_only,
- extra_filters=extra_filters,
- path=path if path else request.site.root_page.path
- )
+ pages = models.Page.objects.filter(path__startswith=(path or request.site.root_page.path))
+
+ if not show_unpublished:
+ pages = pages.live()
+
+ if extra_filters:
+ pages = pages.filter(**extra_filters)
+
+ if search_title_only:
+ search_results = pages.search(query_string, fields=['title'])
+ else:
+ search_results = pages.search(query_string)
# Get query object
query = Query.get(query_string)
| {"golden_diff": "diff --git a/wagtail/wagtailsearch/views/frontend.py b/wagtail/wagtailsearch/views/frontend.py\n--- a/wagtail/wagtailsearch/views/frontend.py\n+++ b/wagtail/wagtailsearch/views/frontend.py\n@@ -38,13 +38,18 @@\n \n # Search\n if query_string != '':\n- search_results = models.Page.search(\n- query_string,\n- show_unpublished=show_unpublished,\n- search_title_only=search_title_only,\n- extra_filters=extra_filters,\n- path=path if path else request.site.root_page.path\n- )\n+ pages = models.Page.objects.filter(path__startswith=(path or request.site.root_page.path))\n+\n+ if not show_unpublished:\n+ pages = pages.live()\n+\n+ if extra_filters:\n+ pages = pages.filter(**extra_filters)\n+\n+ if search_title_only:\n+ search_results = pages.search(query_string, fields=['title'])\n+ else:\n+ search_results = pages.search(query_string)\n \n # Get query object\n query = Query.get(query_string)\n", "issue": "wagtailsearch.views.frontend needs updating to use non-deprecated search method\nCurrently using Page.search, which is deprecated in Wagtail 1.2 - should be updated to use the QuerySet-based mechanism instead.\n\nhttps://github.com/torchbox/wagtail/blob/master/wagtail/wagtailsearch/views/frontend.py#L41\n\n", "before_files": [{"content": "from django.conf import settings\nfrom django.shortcuts import render\nfrom django.http import JsonResponse\nfrom django.core.paginator import Paginator, EmptyPage, PageNotAnInteger\n\nfrom wagtail.wagtailcore import models\nfrom wagtail.wagtailsearch.models import Query\n\n\ndef search(\n request,\n template=None,\n template_ajax=None,\n results_per_page=10,\n use_json=False,\n json_attrs=['title', 'url'],\n show_unpublished=False,\n search_title_only=False,\n extra_filters={},\n path=None):\n\n # Get default templates\n if template is None:\n if hasattr(settings, 'WAGTAILSEARCH_RESULTS_TEMPLATE'):\n template = settings.WAGTAILSEARCH_RESULTS_TEMPLATE\n else:\n template = 'wagtailsearch/search_results.html'\n\n if template_ajax is None:\n if hasattr(settings, 'WAGTAILSEARCH_RESULTS_TEMPLATE_AJAX'):\n template_ajax = settings.WAGTAILSEARCH_RESULTS_TEMPLATE_AJAX\n else:\n template_ajax = template\n\n # Get query string and page from GET paramters\n query_string = request.GET.get('q', '')\n page = request.GET.get('page', request.GET.get('p', 1))\n\n # Search\n if query_string != '':\n search_results = models.Page.search(\n query_string,\n show_unpublished=show_unpublished,\n search_title_only=search_title_only,\n extra_filters=extra_filters,\n path=path if path else request.site.root_page.path\n )\n\n # Get query object\n query = Query.get(query_string)\n\n # Add hit\n query.add_hit()\n\n # Pagination\n paginator = Paginator(search_results, results_per_page)\n try:\n search_results = paginator.page(page)\n except PageNotAnInteger:\n search_results = paginator.page(1)\n except EmptyPage:\n search_results = paginator.page(paginator.num_pages)\n else:\n query = None\n search_results = None\n\n if use_json:\n # Return a json response\n if search_results:\n search_results_json = []\n for result in search_results:\n result_specific = result.specific\n\n search_results_json.append(dict(\n (attr, getattr(result_specific, attr))\n for attr in json_attrs\n if hasattr(result_specific, attr)\n ))\n\n return JsonResponse(search_results_json, safe=False)\n else:\n return JsonResponse([], safe=False)\n else: # Render a template\n if request.is_ajax() and template_ajax:\n template = template_ajax\n\n return render(request, template, dict(\n query_string=query_string,\n search_results=search_results,\n is_ajax=request.is_ajax(),\n query=query\n ))\n", "path": "wagtail/wagtailsearch/views/frontend.py"}], "after_files": [{"content": "from django.conf import settings\nfrom django.shortcuts import render\nfrom django.http import JsonResponse\nfrom django.core.paginator import Paginator, EmptyPage, PageNotAnInteger\n\nfrom wagtail.wagtailcore import models\nfrom wagtail.wagtailsearch.models import Query\n\n\ndef search(\n request,\n template=None,\n template_ajax=None,\n results_per_page=10,\n use_json=False,\n json_attrs=['title', 'url'],\n show_unpublished=False,\n search_title_only=False,\n extra_filters={},\n path=None):\n\n # Get default templates\n if template is None:\n if hasattr(settings, 'WAGTAILSEARCH_RESULTS_TEMPLATE'):\n template = settings.WAGTAILSEARCH_RESULTS_TEMPLATE\n else:\n template = 'wagtailsearch/search_results.html'\n\n if template_ajax is None:\n if hasattr(settings, 'WAGTAILSEARCH_RESULTS_TEMPLATE_AJAX'):\n template_ajax = settings.WAGTAILSEARCH_RESULTS_TEMPLATE_AJAX\n else:\n template_ajax = template\n\n # Get query string and page from GET paramters\n query_string = request.GET.get('q', '')\n page = request.GET.get('page', request.GET.get('p', 1))\n\n # Search\n if query_string != '':\n pages = models.Page.objects.filter(path__startswith=(path or request.site.root_page.path))\n\n if not show_unpublished:\n pages = pages.live()\n\n if extra_filters:\n pages = pages.filter(**extra_filters)\n\n if search_title_only:\n search_results = pages.search(query_string, fields=['title'])\n else:\n search_results = pages.search(query_string)\n\n # Get query object\n query = Query.get(query_string)\n\n # Add hit\n query.add_hit()\n\n # Pagination\n paginator = Paginator(search_results, results_per_page)\n try:\n search_results = paginator.page(page)\n except PageNotAnInteger:\n search_results = paginator.page(1)\n except EmptyPage:\n search_results = paginator.page(paginator.num_pages)\n else:\n query = None\n search_results = None\n\n if use_json:\n # Return a json response\n if search_results:\n search_results_json = []\n for result in search_results:\n result_specific = result.specific\n\n search_results_json.append(dict(\n (attr, getattr(result_specific, attr))\n for attr in json_attrs\n if hasattr(result_specific, attr)\n ))\n\n return JsonResponse(search_results_json, safe=False)\n else:\n return JsonResponse([], safe=False)\n else: # Render a template\n if request.is_ajax() and template_ajax:\n template = template_ajax\n\n return render(request, template, dict(\n query_string=query_string,\n search_results=search_results,\n is_ajax=request.is_ajax(),\n query=query\n ))\n", "path": "wagtail/wagtailsearch/views/frontend.py"}]} | 1,084 | 238 |
gh_patches_debug_61667 | rasdani/github-patches | git_diff | pallets__click-2714 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Docs wrongly links PRs and Issues to flask

Environment:
- Python version: N/A
- Click version: N/A
Docs wrongly links PRs and Issues to flask

Environment:
- Python version: N/A
- Click version: N/A
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs/conf.py`
Content:
```
1 from pallets_sphinx_themes import get_version
2 from pallets_sphinx_themes import ProjectLink
3
4 # Project --------------------------------------------------------------
5
6 project = "Click"
7 copyright = "2014 Pallets"
8 author = "Pallets"
9 release, version = get_version("Click")
10
11 # General --------------------------------------------------------------
12
13 default_role = "code"
14 extensions = [
15 "sphinx.ext.autodoc",
16 "sphinx.ext.extlinks",
17 "sphinx.ext.intersphinx",
18 "sphinx_tabs.tabs",
19 "sphinxcontrib.log_cabinet",
20 "pallets_sphinx_themes",
21 ]
22 autodoc_member_order = "bysource"
23 autodoc_typehints = "description"
24 autodoc_preserve_defaults = True
25 extlinks = {
26 "issue": ("https://github.com/pallets/flask/issues/%s", "#%s"),
27 "pr": ("https://github.com/pallets/flask/pull/%s", "#%s"),
28 }
29 intersphinx_mapping = {
30 "python": ("https://docs.python.org/3/", None),
31 }
32
33 # HTML -----------------------------------------------------------------
34
35 html_theme = "click"
36 html_theme_options = {"index_sidebar_logo": False}
37 html_context = {
38 "project_links": [
39 ProjectLink("Donate", "https://palletsprojects.com/donate"),
40 ProjectLink("PyPI Releases", "https://pypi.org/project/click/"),
41 ProjectLink("Source Code", "https://github.com/pallets/click/"),
42 ProjectLink("Issue Tracker", "https://github.com/pallets/click/issues/"),
43 ProjectLink("Chat", "https://discord.gg/pallets"),
44 ]
45 }
46 html_sidebars = {
47 "index": ["project.html", "localtoc.html", "searchbox.html", "ethicalads.html"],
48 "**": ["localtoc.html", "relations.html", "searchbox.html", "ethicalads.html"],
49 }
50 singlehtml_sidebars = {"index": ["project.html", "localtoc.html", "ethicalads.html"]}
51 html_static_path = ["_static"]
52 html_favicon = "_static/click-icon.png"
53 html_logo = "_static/click-logo-sidebar.png"
54 html_title = f"Click Documentation ({version})"
55 html_show_sourcelink = False
56
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -23,8 +23,8 @@
autodoc_typehints = "description"
autodoc_preserve_defaults = True
extlinks = {
- "issue": ("https://github.com/pallets/flask/issues/%s", "#%s"),
- "pr": ("https://github.com/pallets/flask/pull/%s", "#%s"),
+ "issue": ("https://github.com/pallets/click/issues/%s", "#%s"),
+ "pr": ("https://github.com/pallets/click/pull/%s", "#%s"),
}
intersphinx_mapping = {
"python": ("https://docs.python.org/3/", None),
| {"golden_diff": "diff --git a/docs/conf.py b/docs/conf.py\n--- a/docs/conf.py\n+++ b/docs/conf.py\n@@ -23,8 +23,8 @@\n autodoc_typehints = \"description\"\n autodoc_preserve_defaults = True\n extlinks = {\n- \"issue\": (\"https://github.com/pallets/flask/issues/%s\", \"#%s\"),\n- \"pr\": (\"https://github.com/pallets/flask/pull/%s\", \"#%s\"),\n+ \"issue\": (\"https://github.com/pallets/click/issues/%s\", \"#%s\"),\n+ \"pr\": (\"https://github.com/pallets/click/pull/%s\", \"#%s\"),\n }\n intersphinx_mapping = {\n \"python\": (\"https://docs.python.org/3/\", None),\n", "issue": "Docs wrongly links PRs and Issues to flask\n\r\n\r\nEnvironment:\r\n\r\n- Python version: N/A\r\n- Click version: N/A\r\n\nDocs wrongly links PRs and Issues to flask\n\r\n\r\nEnvironment:\r\n\r\n- Python version: N/A\r\n- Click version: N/A\r\n\n", "before_files": [{"content": "from pallets_sphinx_themes import get_version\nfrom pallets_sphinx_themes import ProjectLink\n\n# Project --------------------------------------------------------------\n\nproject = \"Click\"\ncopyright = \"2014 Pallets\"\nauthor = \"Pallets\"\nrelease, version = get_version(\"Click\")\n\n# General --------------------------------------------------------------\n\ndefault_role = \"code\"\nextensions = [\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.extlinks\",\n \"sphinx.ext.intersphinx\",\n \"sphinx_tabs.tabs\",\n \"sphinxcontrib.log_cabinet\",\n \"pallets_sphinx_themes\",\n]\nautodoc_member_order = \"bysource\"\nautodoc_typehints = \"description\"\nautodoc_preserve_defaults = True\nextlinks = {\n \"issue\": (\"https://github.com/pallets/flask/issues/%s\", \"#%s\"),\n \"pr\": (\"https://github.com/pallets/flask/pull/%s\", \"#%s\"),\n}\nintersphinx_mapping = {\n \"python\": (\"https://docs.python.org/3/\", None),\n}\n\n# HTML -----------------------------------------------------------------\n\nhtml_theme = \"click\"\nhtml_theme_options = {\"index_sidebar_logo\": False}\nhtml_context = {\n \"project_links\": [\n ProjectLink(\"Donate\", \"https://palletsprojects.com/donate\"),\n ProjectLink(\"PyPI Releases\", \"https://pypi.org/project/click/\"),\n ProjectLink(\"Source Code\", \"https://github.com/pallets/click/\"),\n ProjectLink(\"Issue Tracker\", \"https://github.com/pallets/click/issues/\"),\n ProjectLink(\"Chat\", \"https://discord.gg/pallets\"),\n ]\n}\nhtml_sidebars = {\n \"index\": [\"project.html\", \"localtoc.html\", \"searchbox.html\", \"ethicalads.html\"],\n \"**\": [\"localtoc.html\", \"relations.html\", \"searchbox.html\", \"ethicalads.html\"],\n}\nsinglehtml_sidebars = {\"index\": [\"project.html\", \"localtoc.html\", \"ethicalads.html\"]}\nhtml_static_path = [\"_static\"]\nhtml_favicon = \"_static/click-icon.png\"\nhtml_logo = \"_static/click-logo-sidebar.png\"\nhtml_title = f\"Click Documentation ({version})\"\nhtml_show_sourcelink = False\n", "path": "docs/conf.py"}], "after_files": [{"content": "from pallets_sphinx_themes import get_version\nfrom pallets_sphinx_themes import ProjectLink\n\n# Project --------------------------------------------------------------\n\nproject = \"Click\"\ncopyright = \"2014 Pallets\"\nauthor = \"Pallets\"\nrelease, version = get_version(\"Click\")\n\n# General --------------------------------------------------------------\n\ndefault_role = \"code\"\nextensions = [\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.extlinks\",\n \"sphinx.ext.intersphinx\",\n \"sphinx_tabs.tabs\",\n \"sphinxcontrib.log_cabinet\",\n \"pallets_sphinx_themes\",\n]\nautodoc_member_order = \"bysource\"\nautodoc_typehints = \"description\"\nautodoc_preserve_defaults = True\nextlinks = {\n \"issue\": (\"https://github.com/pallets/click/issues/%s\", \"#%s\"),\n \"pr\": (\"https://github.com/pallets/click/pull/%s\", \"#%s\"),\n}\nintersphinx_mapping = {\n \"python\": (\"https://docs.python.org/3/\", None),\n}\n\n# HTML -----------------------------------------------------------------\n\nhtml_theme = \"click\"\nhtml_theme_options = {\"index_sidebar_logo\": False}\nhtml_context = {\n \"project_links\": [\n ProjectLink(\"Donate\", \"https://palletsprojects.com/donate\"),\n ProjectLink(\"PyPI Releases\", \"https://pypi.org/project/click/\"),\n ProjectLink(\"Source Code\", \"https://github.com/pallets/click/\"),\n ProjectLink(\"Issue Tracker\", \"https://github.com/pallets/click/issues/\"),\n ProjectLink(\"Chat\", \"https://discord.gg/pallets\"),\n ]\n}\nhtml_sidebars = {\n \"index\": [\"project.html\", \"localtoc.html\", \"searchbox.html\", \"ethicalads.html\"],\n \"**\": [\"localtoc.html\", \"relations.html\", \"searchbox.html\", \"ethicalads.html\"],\n}\nsinglehtml_sidebars = {\"index\": [\"project.html\", \"localtoc.html\", \"ethicalads.html\"]}\nhtml_static_path = [\"_static\"]\nhtml_favicon = \"_static/click-icon.png\"\nhtml_logo = \"_static/click-logo-sidebar.png\"\nhtml_title = f\"Click Documentation ({version})\"\nhtml_show_sourcelink = False\n", "path": "docs/conf.py"}]} | 998 | 173 |
gh_patches_debug_1188 | rasdani/github-patches | git_diff | spack__spack-18268 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Installation issue: dbus (missing libsm dependency)
<!-- Thanks for taking the time to report this build failure. To proceed with the report please:
1. Title the issue "Installation issue: <name-of-the-package>".
2. Provide the information required below.
We encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively! -->
I am trying to install visit, and I am hitting an error when it tries to install dbus. This appears
to be due to dbus depending on libSM (and through that libuuid), but not declaring that dependency in Spack. So in my build of visit, the libuuid dependency is picked up and set to use the spack installed libuuid via some other package visit depends on, but dbus ends up using the system installed libSM, and there is a mismatch between the two. But the dbus
package should not be linking against system libSM.
### Steps to reproduce the issue
I am trying to install visit, and I am hitting an error when it tries to install dbus. This appears
to be
spack install [email protected]%[email protected] ^[email protected]
eventually aborts with
CCLD dbus-run-session
/lib/../lib64/libSM.so: undefined reference to `uuid_unparse_lower@UUID_1.0'
/lib/../lib64/libSM.so: undefined reference to `uuid_generate@UUID_1.0'
collect2: error: ld returned 1 exit status
Error appears due to the attempt to link the system /lib64/libSM.so
### Information on your system
spack debug report
* **Spack:** 0.14.2
* **Python:** 2.7.16
* **Platform:** linux-rhel7-broadwell
### Additional information
[spack-build-env.txt](https://github.com/spack/spack/files/5125717/spack-build-env.txt)
[spack-build-out.txt](https://github.com/spack/spack/files/5125718/spack-build-out.txt)
No maintainers for dbus
### General information
<!-- These boxes can be checked by replacing [ ] with [x] or by clicking them after submitting the issue. -->
- [x ] I have run `spack debug report` and reported the version of Spack/Python/Platform
- [x] I have run `spack maintainers <name-of-the-package>` and @mentioned any maintainers
- [x ] I have uploaded the build log and environment files
- [ x] I have searched the issues of this repo and believe this is not a duplicate
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `var/spack/repos/builtin/packages/dbus/package.py`
Content:
```
1 # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
2 # Spack Project Developers. See the top-level COPYRIGHT file for details.
3 #
4 # SPDX-License-Identifier: (Apache-2.0 OR MIT)
5
6 from spack import *
7
8
9 class Dbus(Package):
10 """D-Bus is a message bus system, a simple way for applications to
11 talk to one another. D-Bus supplies both a system daemon (for
12 events such new hardware device printer queue ) and a
13 per-user-login-session daemon (for general IPC needs among user
14 applications). Also, the message bus is built on top of a
15 general one-to-one message passing framework, which can be used
16 by any two applications to communicate directly (without going
17 through the message bus daemon)."""
18
19 homepage = "http://dbus.freedesktop.org/"
20 url = "http://dbus.freedesktop.org/releases/dbus/dbus-1.8.8.tar.gz"
21
22 version('1.12.8', sha256='e2dc99e7338303393b6663a98320aba6a63421bcdaaf571c8022f815e5896eb3')
23 version('1.11.2', sha256='5abc4c57686fa82669ad0039830788f9b03fdc4fff487f0ccf6c9d56ba2645c9')
24 version('1.9.0', sha256='38ebc695b5cbbd239e0f149aa5d5395f0051a0fec1b74f21ff2921b22a31c171')
25 version('1.8.8', sha256='dfab263649a979d0fff64a30cac374891a8e9940350e41f3bbd7679af32bd1fd')
26 version('1.8.6', sha256='eded83ca007b719f32761e60fd8b9ffd0f5796a4caf455b01b5a5ef740ebd23f')
27 version('1.8.4', sha256='3ef63dc8d0111042071ee7f7bafa0650c6ce2d7be957ef0b7ec269495a651ff8')
28 version('1.8.2', sha256='5689f7411165adc953f37974e276a3028db94447c76e8dd92efe910c6d3bae08')
29
30 depends_on('pkgconfig', type='build')
31 depends_on('expat')
32 depends_on('glib')
33
34 def install(self, spec, prefix):
35 configure(
36 "--prefix=%s" % prefix,
37 "--disable-systemd",
38 "--disable-launchd")
39 make()
40 make("install")
41
42 # dbus needs a machine id generated after install
43 dbus_uuidgen = Executable(join_path(prefix.bin, 'dbus-uuidgen'))
44 dbus_uuidgen('--ensure')
45
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/var/spack/repos/builtin/packages/dbus/package.py b/var/spack/repos/builtin/packages/dbus/package.py
--- a/var/spack/repos/builtin/packages/dbus/package.py
+++ b/var/spack/repos/builtin/packages/dbus/package.py
@@ -30,6 +30,7 @@
depends_on('pkgconfig', type='build')
depends_on('expat')
depends_on('glib')
+ depends_on('libsm')
def install(self, spec, prefix):
configure(
| {"golden_diff": "diff --git a/var/spack/repos/builtin/packages/dbus/package.py b/var/spack/repos/builtin/packages/dbus/package.py\n--- a/var/spack/repos/builtin/packages/dbus/package.py\n+++ b/var/spack/repos/builtin/packages/dbus/package.py\n@@ -30,6 +30,7 @@\n depends_on('pkgconfig', type='build')\n depends_on('expat')\n depends_on('glib')\n+ depends_on('libsm')\n \n def install(self, spec, prefix):\n configure(\n", "issue": "Installation issue: dbus (missing libsm dependency)\n\r\n<!-- Thanks for taking the time to report this build failure. To proceed with the report please:\r\n\r\n1. Title the issue \"Installation issue: <name-of-the-package>\".\r\n2. Provide the information required below.\r\n\r\nWe encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively! -->\r\n\r\nI am trying to install visit, and I am hitting an error when it tries to install dbus. This appears\r\nto be due to dbus depending on libSM (and through that libuuid), but not declaring that dependency in Spack. So in my build of visit, the libuuid dependency is picked up and set to use the spack installed libuuid via some other package visit depends on, but dbus ends up using the system installed libSM, and there is a mismatch between the two. But the dbus\r\npackage should not be linking against system libSM.\r\n\r\n### Steps to reproduce the issue\r\nI am trying to install visit, and I am hitting an error when it tries to install dbus. This appears\r\nto be \r\nspack install [email protected]%[email protected] ^[email protected]\r\neventually aborts with\r\n CCLD dbus-run-session\r\n/lib/../lib64/libSM.so: undefined reference to `uuid_unparse_lower@UUID_1.0'\r\n/lib/../lib64/libSM.so: undefined reference to `uuid_generate@UUID_1.0'\r\ncollect2: error: ld returned 1 exit status\r\n\r\nError appears due to the attempt to link the system /lib64/libSM.so\r\n\r\n### Information on your system\r\nspack debug report\r\n* **Spack:** 0.14.2\r\n* **Python:** 2.7.16\r\n* **Platform:** linux-rhel7-broadwell\r\n\r\n### Additional information\r\n[spack-build-env.txt](https://github.com/spack/spack/files/5125717/spack-build-env.txt)\r\n[spack-build-out.txt](https://github.com/spack/spack/files/5125718/spack-build-out.txt)\r\n\r\nNo maintainers for dbus\r\n\r\n### General information\r\n\r\n<!-- These boxes can be checked by replacing [ ] with [x] or by clicking them after submitting the issue. -->\r\n- [x ] I have run `spack debug report` and reported the version of Spack/Python/Platform\r\n- [x] I have run `spack maintainers <name-of-the-package>` and @mentioned any maintainers\r\n- [x ] I have uploaded the build log and environment files\r\n- [ x] I have searched the issues of this repo and believe this is not a duplicate\r\n\n", "before_files": [{"content": "# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other\n# Spack Project Developers. See the top-level COPYRIGHT file for details.\n#\n# SPDX-License-Identifier: (Apache-2.0 OR MIT)\n\nfrom spack import *\n\n\nclass Dbus(Package):\n \"\"\"D-Bus is a message bus system, a simple way for applications to\n talk to one another. D-Bus supplies both a system daemon (for\n events such new hardware device printer queue ) and a\n per-user-login-session daemon (for general IPC needs among user\n applications). Also, the message bus is built on top of a\n general one-to-one message passing framework, which can be used\n by any two applications to communicate directly (without going\n through the message bus daemon).\"\"\"\n\n homepage = \"http://dbus.freedesktop.org/\"\n url = \"http://dbus.freedesktop.org/releases/dbus/dbus-1.8.8.tar.gz\"\n\n version('1.12.8', sha256='e2dc99e7338303393b6663a98320aba6a63421bcdaaf571c8022f815e5896eb3')\n version('1.11.2', sha256='5abc4c57686fa82669ad0039830788f9b03fdc4fff487f0ccf6c9d56ba2645c9')\n version('1.9.0', sha256='38ebc695b5cbbd239e0f149aa5d5395f0051a0fec1b74f21ff2921b22a31c171')\n version('1.8.8', sha256='dfab263649a979d0fff64a30cac374891a8e9940350e41f3bbd7679af32bd1fd')\n version('1.8.6', sha256='eded83ca007b719f32761e60fd8b9ffd0f5796a4caf455b01b5a5ef740ebd23f')\n version('1.8.4', sha256='3ef63dc8d0111042071ee7f7bafa0650c6ce2d7be957ef0b7ec269495a651ff8')\n version('1.8.2', sha256='5689f7411165adc953f37974e276a3028db94447c76e8dd92efe910c6d3bae08')\n\n depends_on('pkgconfig', type='build')\n depends_on('expat')\n depends_on('glib')\n\n def install(self, spec, prefix):\n configure(\n \"--prefix=%s\" % prefix,\n \"--disable-systemd\",\n \"--disable-launchd\")\n make()\n make(\"install\")\n\n # dbus needs a machine id generated after install\n dbus_uuidgen = Executable(join_path(prefix.bin, 'dbus-uuidgen'))\n dbus_uuidgen('--ensure')\n", "path": "var/spack/repos/builtin/packages/dbus/package.py"}], "after_files": [{"content": "# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other\n# Spack Project Developers. See the top-level COPYRIGHT file for details.\n#\n# SPDX-License-Identifier: (Apache-2.0 OR MIT)\n\nfrom spack import *\n\n\nclass Dbus(Package):\n \"\"\"D-Bus is a message bus system, a simple way for applications to\n talk to one another. D-Bus supplies both a system daemon (for\n events such new hardware device printer queue ) and a\n per-user-login-session daemon (for general IPC needs among user\n applications). Also, the message bus is built on top of a\n general one-to-one message passing framework, which can be used\n by any two applications to communicate directly (without going\n through the message bus daemon).\"\"\"\n\n homepage = \"http://dbus.freedesktop.org/\"\n url = \"http://dbus.freedesktop.org/releases/dbus/dbus-1.8.8.tar.gz\"\n\n version('1.12.8', sha256='e2dc99e7338303393b6663a98320aba6a63421bcdaaf571c8022f815e5896eb3')\n version('1.11.2', sha256='5abc4c57686fa82669ad0039830788f9b03fdc4fff487f0ccf6c9d56ba2645c9')\n version('1.9.0', sha256='38ebc695b5cbbd239e0f149aa5d5395f0051a0fec1b74f21ff2921b22a31c171')\n version('1.8.8', sha256='dfab263649a979d0fff64a30cac374891a8e9940350e41f3bbd7679af32bd1fd')\n version('1.8.6', sha256='eded83ca007b719f32761e60fd8b9ffd0f5796a4caf455b01b5a5ef740ebd23f')\n version('1.8.4', sha256='3ef63dc8d0111042071ee7f7bafa0650c6ce2d7be957ef0b7ec269495a651ff8')\n version('1.8.2', sha256='5689f7411165adc953f37974e276a3028db94447c76e8dd92efe910c6d3bae08')\n\n depends_on('pkgconfig', type='build')\n depends_on('expat')\n depends_on('glib')\n depends_on('libsm')\n\n def install(self, spec, prefix):\n configure(\n \"--prefix=%s\" % prefix,\n \"--disable-systemd\",\n \"--disable-launchd\")\n make()\n make(\"install\")\n\n # dbus needs a machine id generated after install\n dbus_uuidgen = Executable(join_path(prefix.bin, 'dbus-uuidgen'))\n dbus_uuidgen('--ensure')\n", "path": "var/spack/repos/builtin/packages/dbus/package.py"}]} | 1,756 | 114 |
gh_patches_debug_23335 | rasdani/github-patches | git_diff | Cog-Creators__Red-DiscordBot-2919 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Mod] Bot doesn't record name changes correctly
# Other bugs
#### What were you trying to do?
Check past names using `[p]names` command
#### What were you expecting to happen?
Get past names
#### What actually happened?
I didn't get any names, because we're checking for it in wrong event (`on_member_update` instead of `on_user_update`).
#### How can we reproduce this issue?
1. Load mod cog
2. Change username
3. Use `[p]names` on yourself.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `redbot/cogs/mod/events.py`
Content:
```
1 import logging
2 from datetime import datetime
3 from collections import defaultdict, deque
4
5 import discord
6 from redbot.core import i18n, modlog, commands
7 from redbot.core.utils.mod import is_mod_or_superior
8 from .abc import MixinMeta
9
10 _ = i18n.Translator("Mod", __file__)
11 log = logging.getLogger("red.mod")
12
13
14 class Events(MixinMeta):
15 """
16 This is a mixin for the core mod cog
17 Has a bunch of things split off to here.
18 """
19
20 async def check_duplicates(self, message):
21 guild = message.guild
22 author = message.author
23
24 guild_cache = self.cache.get(guild.id, None)
25 if guild_cache is None:
26 repeats = await self.settings.guild(guild).delete_repeats()
27 if repeats == -1:
28 return False
29 guild_cache = self.cache[guild.id] = defaultdict(lambda: deque(maxlen=repeats))
30
31 if not message.content:
32 return False
33
34 guild_cache[author].append(message.content)
35 msgs = guild_cache[author]
36 if len(msgs) == msgs.maxlen and len(set(msgs)) == 1:
37 try:
38 await message.delete()
39 return True
40 except discord.HTTPException:
41 pass
42 return False
43
44 async def check_mention_spam(self, message):
45 guild = message.guild
46 author = message.author
47
48 max_mentions = await self.settings.guild(guild).ban_mention_spam()
49 if max_mentions:
50 mentions = set(message.mentions)
51 if len(mentions) >= max_mentions:
52 try:
53 await guild.ban(author, reason=_("Mention spam (Autoban)"))
54 except discord.HTTPException:
55 log.info(
56 "Failed to ban member for mention spam in server {}.".format(guild.id)
57 )
58 else:
59 try:
60 await modlog.create_case(
61 self.bot,
62 guild,
63 message.created_at,
64 "ban",
65 author,
66 guild.me,
67 _("Mention spam (Autoban)"),
68 until=None,
69 channel=None,
70 )
71 except RuntimeError as e:
72 print(e)
73 return False
74 return True
75 return False
76
77 @commands.Cog.listener()
78 async def on_message(self, message):
79 author = message.author
80 if message.guild is None or self.bot.user == author:
81 return
82 valid_user = isinstance(author, discord.Member) and not author.bot
83 if not valid_user:
84 return
85
86 # Bots and mods or superior are ignored from the filter
87 mod_or_superior = await is_mod_or_superior(self.bot, obj=author)
88 if mod_or_superior:
89 return
90 # As are anyone configured to be
91 if await self.bot.is_automod_immune(message):
92 return
93 deleted = await self.check_duplicates(message)
94 if not deleted:
95 await self.check_mention_spam(message)
96
97 @commands.Cog.listener()
98 async def on_member_update(self, before: discord.Member, after: discord.Member):
99 if before.name != after.name:
100 async with self.settings.user(before).past_names() as name_list:
101 while None in name_list: # clean out null entries from a bug
102 name_list.remove(None)
103 if after.name in name_list:
104 # Ensure order is maintained without duplicates occuring
105 name_list.remove(after.name)
106 name_list.append(after.name)
107 while len(name_list) > 20:
108 name_list.pop(0)
109
110 if before.nick != after.nick and after.nick is not None:
111 async with self.settings.member(before).past_nicks() as nick_list:
112 while None in nick_list: # clean out null entries from a bug
113 nick_list.remove(None)
114 if after.nick in nick_list:
115 nick_list.remove(after.nick)
116 nick_list.append(after.nick)
117 while len(nick_list) > 20:
118 nick_list.pop(0)
119
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/redbot/cogs/mod/events.py b/redbot/cogs/mod/events.py
--- a/redbot/cogs/mod/events.py
+++ b/redbot/cogs/mod/events.py
@@ -95,7 +95,7 @@
await self.check_mention_spam(message)
@commands.Cog.listener()
- async def on_member_update(self, before: discord.Member, after: discord.Member):
+ async def on_user_update(self, before: discord.User, after: discord.User):
if before.name != after.name:
async with self.settings.user(before).past_names() as name_list:
while None in name_list: # clean out null entries from a bug
@@ -107,6 +107,8 @@
while len(name_list) > 20:
name_list.pop(0)
+ @commands.Cog.listener()
+ async def on_member_update(self, before: discord.Member, after: discord.Member):
if before.nick != after.nick and after.nick is not None:
async with self.settings.member(before).past_nicks() as nick_list:
while None in nick_list: # clean out null entries from a bug
| {"golden_diff": "diff --git a/redbot/cogs/mod/events.py b/redbot/cogs/mod/events.py\n--- a/redbot/cogs/mod/events.py\n+++ b/redbot/cogs/mod/events.py\n@@ -95,7 +95,7 @@\n await self.check_mention_spam(message)\n \n @commands.Cog.listener()\n- async def on_member_update(self, before: discord.Member, after: discord.Member):\n+ async def on_user_update(self, before: discord.User, after: discord.User):\n if before.name != after.name:\n async with self.settings.user(before).past_names() as name_list:\n while None in name_list: # clean out null entries from a bug\n@@ -107,6 +107,8 @@\n while len(name_list) > 20:\n name_list.pop(0)\n \n+ @commands.Cog.listener()\n+ async def on_member_update(self, before: discord.Member, after: discord.Member):\n if before.nick != after.nick and after.nick is not None:\n async with self.settings.member(before).past_nicks() as nick_list:\n while None in nick_list: # clean out null entries from a bug\n", "issue": "[Mod] Bot doesn't record name changes correctly\n# Other bugs\r\n\r\n#### What were you trying to do?\r\n\r\nCheck past names using `[p]names` command\r\n\r\n#### What were you expecting to happen?\r\n\r\nGet past names\r\n\r\n#### What actually happened?\r\n\r\nI didn't get any names, because we're checking for it in wrong event (`on_member_update` instead of `on_user_update`).\r\n\r\n#### How can we reproduce this issue?\r\n\r\n1. Load mod cog\r\n2. Change username\r\n3. Use `[p]names` on yourself.\r\n\n", "before_files": [{"content": "import logging\nfrom datetime import datetime\nfrom collections import defaultdict, deque\n\nimport discord\nfrom redbot.core import i18n, modlog, commands\nfrom redbot.core.utils.mod import is_mod_or_superior\nfrom .abc import MixinMeta\n\n_ = i18n.Translator(\"Mod\", __file__)\nlog = logging.getLogger(\"red.mod\")\n\n\nclass Events(MixinMeta):\n \"\"\"\n This is a mixin for the core mod cog\n Has a bunch of things split off to here.\n \"\"\"\n\n async def check_duplicates(self, message):\n guild = message.guild\n author = message.author\n\n guild_cache = self.cache.get(guild.id, None)\n if guild_cache is None:\n repeats = await self.settings.guild(guild).delete_repeats()\n if repeats == -1:\n return False\n guild_cache = self.cache[guild.id] = defaultdict(lambda: deque(maxlen=repeats))\n\n if not message.content:\n return False\n\n guild_cache[author].append(message.content)\n msgs = guild_cache[author]\n if len(msgs) == msgs.maxlen and len(set(msgs)) == 1:\n try:\n await message.delete()\n return True\n except discord.HTTPException:\n pass\n return False\n\n async def check_mention_spam(self, message):\n guild = message.guild\n author = message.author\n\n max_mentions = await self.settings.guild(guild).ban_mention_spam()\n if max_mentions:\n mentions = set(message.mentions)\n if len(mentions) >= max_mentions:\n try:\n await guild.ban(author, reason=_(\"Mention spam (Autoban)\"))\n except discord.HTTPException:\n log.info(\n \"Failed to ban member for mention spam in server {}.\".format(guild.id)\n )\n else:\n try:\n await modlog.create_case(\n self.bot,\n guild,\n message.created_at,\n \"ban\",\n author,\n guild.me,\n _(\"Mention spam (Autoban)\"),\n until=None,\n channel=None,\n )\n except RuntimeError as e:\n print(e)\n return False\n return True\n return False\n\n @commands.Cog.listener()\n async def on_message(self, message):\n author = message.author\n if message.guild is None or self.bot.user == author:\n return\n valid_user = isinstance(author, discord.Member) and not author.bot\n if not valid_user:\n return\n\n # Bots and mods or superior are ignored from the filter\n mod_or_superior = await is_mod_or_superior(self.bot, obj=author)\n if mod_or_superior:\n return\n # As are anyone configured to be\n if await self.bot.is_automod_immune(message):\n return\n deleted = await self.check_duplicates(message)\n if not deleted:\n await self.check_mention_spam(message)\n\n @commands.Cog.listener()\n async def on_member_update(self, before: discord.Member, after: discord.Member):\n if before.name != after.name:\n async with self.settings.user(before).past_names() as name_list:\n while None in name_list: # clean out null entries from a bug\n name_list.remove(None)\n if after.name in name_list:\n # Ensure order is maintained without duplicates occuring\n name_list.remove(after.name)\n name_list.append(after.name)\n while len(name_list) > 20:\n name_list.pop(0)\n\n if before.nick != after.nick and after.nick is not None:\n async with self.settings.member(before).past_nicks() as nick_list:\n while None in nick_list: # clean out null entries from a bug\n nick_list.remove(None)\n if after.nick in nick_list:\n nick_list.remove(after.nick)\n nick_list.append(after.nick)\n while len(nick_list) > 20:\n nick_list.pop(0)\n", "path": "redbot/cogs/mod/events.py"}], "after_files": [{"content": "import logging\nfrom datetime import datetime\nfrom collections import defaultdict, deque\n\nimport discord\nfrom redbot.core import i18n, modlog, commands\nfrom redbot.core.utils.mod import is_mod_or_superior\nfrom .abc import MixinMeta\n\n_ = i18n.Translator(\"Mod\", __file__)\nlog = logging.getLogger(\"red.mod\")\n\n\nclass Events(MixinMeta):\n \"\"\"\n This is a mixin for the core mod cog\n Has a bunch of things split off to here.\n \"\"\"\n\n async def check_duplicates(self, message):\n guild = message.guild\n author = message.author\n\n guild_cache = self.cache.get(guild.id, None)\n if guild_cache is None:\n repeats = await self.settings.guild(guild).delete_repeats()\n if repeats == -1:\n return False\n guild_cache = self.cache[guild.id] = defaultdict(lambda: deque(maxlen=repeats))\n\n if not message.content:\n return False\n\n guild_cache[author].append(message.content)\n msgs = guild_cache[author]\n if len(msgs) == msgs.maxlen and len(set(msgs)) == 1:\n try:\n await message.delete()\n return True\n except discord.HTTPException:\n pass\n return False\n\n async def check_mention_spam(self, message):\n guild = message.guild\n author = message.author\n\n max_mentions = await self.settings.guild(guild).ban_mention_spam()\n if max_mentions:\n mentions = set(message.mentions)\n if len(mentions) >= max_mentions:\n try:\n await guild.ban(author, reason=_(\"Mention spam (Autoban)\"))\n except discord.HTTPException:\n log.info(\n \"Failed to ban member for mention spam in server {}.\".format(guild.id)\n )\n else:\n try:\n await modlog.create_case(\n self.bot,\n guild,\n message.created_at,\n \"ban\",\n author,\n guild.me,\n _(\"Mention spam (Autoban)\"),\n until=None,\n channel=None,\n )\n except RuntimeError as e:\n print(e)\n return False\n return True\n return False\n\n @commands.Cog.listener()\n async def on_message(self, message):\n author = message.author\n if message.guild is None or self.bot.user == author:\n return\n valid_user = isinstance(author, discord.Member) and not author.bot\n if not valid_user:\n return\n\n # Bots and mods or superior are ignored from the filter\n mod_or_superior = await is_mod_or_superior(self.bot, obj=author)\n if mod_or_superior:\n return\n # As are anyone configured to be\n if await self.bot.is_automod_immune(message):\n return\n deleted = await self.check_duplicates(message)\n if not deleted:\n await self.check_mention_spam(message)\n\n @commands.Cog.listener()\n async def on_user_update(self, before: discord.User, after: discord.User):\n if before.name != after.name:\n async with self.settings.user(before).past_names() as name_list:\n while None in name_list: # clean out null entries from a bug\n name_list.remove(None)\n if after.name in name_list:\n # Ensure order is maintained without duplicates occuring\n name_list.remove(after.name)\n name_list.append(after.name)\n while len(name_list) > 20:\n name_list.pop(0)\n\n @commands.Cog.listener()\n async def on_member_update(self, before: discord.Member, after: discord.Member):\n if before.nick != after.nick and after.nick is not None:\n async with self.settings.member(before).past_nicks() as nick_list:\n while None in nick_list: # clean out null entries from a bug\n nick_list.remove(None)\n if after.nick in nick_list:\n nick_list.remove(after.nick)\n nick_list.append(after.nick)\n while len(nick_list) > 20:\n nick_list.pop(0)\n", "path": "redbot/cogs/mod/events.py"}]} | 1,470 | 255 |
gh_patches_debug_38252 | rasdani/github-patches | git_diff | doccano__doccano-1261 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
No way to restrict text classification labels to exactly one label to assign
Most classification tasks require exactly one label for each instance. This is also true for most text classification tasks, for example with sentiment classificaiton, and the possible labels negative, neutral, positive, each instance should receive one of the three labels, assigning e.g. both neutral and positive would make not sense.
Yet the text classification task in doccano still does not allow to restrict assignment to a single label, annotators are free to assign as many labels as they want, including all of them!
This limits the use of doccano for text classification tasks rather severely. The option to allow for any number of labels (0 to all of them) would still be good to have for multilabel classification tasks (e.g. assigning topics), but that is a much rarer annotation task in general.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `app/api/views/annotation.py`
Content:
```
1 from django.shortcuts import get_object_or_404
2 from rest_framework import generics, status
3 from rest_framework.exceptions import ValidationError
4 from rest_framework.permissions import IsAuthenticated
5 from rest_framework.response import Response
6 from rest_framework.views import APIView
7
8 from ..models import Document, Project
9 from ..permissions import (IsAnnotationApprover, IsInProjectOrAdmin,
10 IsOwnAnnotation, IsProjectAdmin)
11 from ..serializers import ApproverSerializer
12
13
14 class AnnotationList(generics.ListCreateAPIView):
15 pagination_class = None
16 permission_classes = [IsAuthenticated & IsInProjectOrAdmin]
17 swagger_schema = None
18
19 def get_serializer_class(self):
20 project = get_object_or_404(Project, pk=self.kwargs['project_id'])
21 self.serializer_class = project.get_annotation_serializer()
22 return self.serializer_class
23
24 def get_queryset(self):
25 project = get_object_or_404(Project, pk=self.kwargs['project_id'])
26 model = project.get_annotation_class()
27 queryset = model.objects.filter(document=self.kwargs['doc_id'])
28 if not project.collaborative_annotation:
29 queryset = queryset.filter(user=self.request.user)
30 return queryset
31
32 def create(self, request, *args, **kwargs):
33 self.check_single_class_classification(self.kwargs['project_id'], self.kwargs['doc_id'], request.user)
34 request.data['document'] = self.kwargs['doc_id']
35 return super().create(request, args, kwargs)
36
37 def perform_create(self, serializer):
38 serializer.save(document_id=self.kwargs['doc_id'], user=self.request.user)
39
40 def delete(self, request, *args, **kwargs):
41 queryset = self.get_queryset()
42 queryset.all().delete()
43 return Response(status=status.HTTP_204_NO_CONTENT)
44
45 @staticmethod
46 def check_single_class_classification(project_id, doc_id, user):
47 project = get_object_or_404(Project, pk=project_id)
48 if not project.single_class_classification:
49 return
50
51 model = project.get_annotation_class()
52 annotations = model.objects.filter(document_id=doc_id)
53 if not project.collaborative_annotation:
54 annotations = annotations.filter(user=user)
55
56 if annotations.exists():
57 raise ValidationError('requested to create duplicate annotation for single-class-classification project')
58
59
60 class AnnotationDetail(generics.RetrieveUpdateDestroyAPIView):
61 lookup_url_kwarg = 'annotation_id'
62 swagger_schema = None
63
64 def get_permissions(self):
65 project = get_object_or_404(Project, pk=self.kwargs['project_id'])
66 if project.collaborative_annotation:
67 self.permission_classes = [IsAuthenticated & IsInProjectOrAdmin]
68 else:
69 self.permission_classes = [IsAuthenticated & IsInProjectOrAdmin & IsOwnAnnotation]
70 return super().get_permissions()
71
72 def get_serializer_class(self):
73 project = get_object_or_404(Project, pk=self.kwargs['project_id'])
74 self.serializer_class = project.get_annotation_serializer()
75 return self.serializer_class
76
77 def get_queryset(self):
78 project = get_object_or_404(Project, pk=self.kwargs['project_id'])
79 model = project.get_annotation_class()
80 self.queryset = model.objects.all()
81 return self.queryset
82
83
84 class ApproveLabelsAPI(APIView):
85 permission_classes = [IsAuthenticated & (IsAnnotationApprover | IsProjectAdmin)]
86
87 def post(self, request, *args, **kwargs):
88 approved = self.request.data.get('approved', True)
89 document = get_object_or_404(Document, pk=self.kwargs['doc_id'])
90 document.annotations_approved_by = self.request.user if approved else None
91 document.save()
92 return Response(ApproverSerializer(document).data)
93
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/app/api/views/annotation.py b/app/api/views/annotation.py
--- a/app/api/views/annotation.py
+++ b/app/api/views/annotation.py
@@ -1,6 +1,5 @@
from django.shortcuts import get_object_or_404
from rest_framework import generics, status
-from rest_framework.exceptions import ValidationError
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
@@ -16,21 +15,24 @@
permission_classes = [IsAuthenticated & IsInProjectOrAdmin]
swagger_schema = None
+ @property
+ def project(self):
+ return get_object_or_404(Project, pk=self.kwargs['project_id'])
+
def get_serializer_class(self):
- project = get_object_or_404(Project, pk=self.kwargs['project_id'])
- self.serializer_class = project.get_annotation_serializer()
+ self.serializer_class = self.project.get_annotation_serializer()
return self.serializer_class
def get_queryset(self):
- project = get_object_or_404(Project, pk=self.kwargs['project_id'])
- model = project.get_annotation_class()
+ model = self.project.get_annotation_class()
queryset = model.objects.filter(document=self.kwargs['doc_id'])
- if not project.collaborative_annotation:
+ if not self.project.collaborative_annotation:
queryset = queryset.filter(user=self.request.user)
return queryset
def create(self, request, *args, **kwargs):
- self.check_single_class_classification(self.kwargs['project_id'], self.kwargs['doc_id'], request.user)
+ if self.project.single_class_classification:
+ self.get_queryset().delete()
request.data['document'] = self.kwargs['doc_id']
return super().create(request, args, kwargs)
@@ -42,20 +44,6 @@
queryset.all().delete()
return Response(status=status.HTTP_204_NO_CONTENT)
- @staticmethod
- def check_single_class_classification(project_id, doc_id, user):
- project = get_object_or_404(Project, pk=project_id)
- if not project.single_class_classification:
- return
-
- model = project.get_annotation_class()
- annotations = model.objects.filter(document_id=doc_id)
- if not project.collaborative_annotation:
- annotations = annotations.filter(user=user)
-
- if annotations.exists():
- raise ValidationError('requested to create duplicate annotation for single-class-classification project')
-
class AnnotationDetail(generics.RetrieveUpdateDestroyAPIView):
lookup_url_kwarg = 'annotation_id'
| {"golden_diff": "diff --git a/app/api/views/annotation.py b/app/api/views/annotation.py\n--- a/app/api/views/annotation.py\n+++ b/app/api/views/annotation.py\n@@ -1,6 +1,5 @@\n from django.shortcuts import get_object_or_404\n from rest_framework import generics, status\n-from rest_framework.exceptions import ValidationError\n from rest_framework.permissions import IsAuthenticated\n from rest_framework.response import Response\n from rest_framework.views import APIView\n@@ -16,21 +15,24 @@\n permission_classes = [IsAuthenticated & IsInProjectOrAdmin]\n swagger_schema = None\n \n+ @property\n+ def project(self):\n+ return get_object_or_404(Project, pk=self.kwargs['project_id'])\n+\n def get_serializer_class(self):\n- project = get_object_or_404(Project, pk=self.kwargs['project_id'])\n- self.serializer_class = project.get_annotation_serializer()\n+ self.serializer_class = self.project.get_annotation_serializer()\n return self.serializer_class\n \n def get_queryset(self):\n- project = get_object_or_404(Project, pk=self.kwargs['project_id'])\n- model = project.get_annotation_class()\n+ model = self.project.get_annotation_class()\n queryset = model.objects.filter(document=self.kwargs['doc_id'])\n- if not project.collaborative_annotation:\n+ if not self.project.collaborative_annotation:\n queryset = queryset.filter(user=self.request.user)\n return queryset\n \n def create(self, request, *args, **kwargs):\n- self.check_single_class_classification(self.kwargs['project_id'], self.kwargs['doc_id'], request.user)\n+ if self.project.single_class_classification:\n+ self.get_queryset().delete()\n request.data['document'] = self.kwargs['doc_id']\n return super().create(request, args, kwargs)\n \n@@ -42,20 +44,6 @@\n queryset.all().delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n \n- @staticmethod\n- def check_single_class_classification(project_id, doc_id, user):\n- project = get_object_or_404(Project, pk=project_id)\n- if not project.single_class_classification:\n- return\n-\n- model = project.get_annotation_class()\n- annotations = model.objects.filter(document_id=doc_id)\n- if not project.collaborative_annotation:\n- annotations = annotations.filter(user=user)\n-\n- if annotations.exists():\n- raise ValidationError('requested to create duplicate annotation for single-class-classification project')\n-\n \n class AnnotationDetail(generics.RetrieveUpdateDestroyAPIView):\n lookup_url_kwarg = 'annotation_id'\n", "issue": "No way to restrict text classification labels to exactly one label to assign\nMost classification tasks require exactly one label for each instance. This is also true for most text classification tasks, for example with sentiment classificaiton, and the possible labels negative, neutral, positive, each instance should receive one of the three labels, assigning e.g. both neutral and positive would make not sense.\r\n\r\nYet the text classification task in doccano still does not allow to restrict assignment to a single label, annotators are free to assign as many labels as they want, including all of them!\r\n\r\nThis limits the use of doccano for text classification tasks rather severely. The option to allow for any number of labels (0 to all of them) would still be good to have for multilabel classification tasks (e.g. assigning topics), but that is a much rarer annotation task in general. \n", "before_files": [{"content": "from django.shortcuts import get_object_or_404\nfrom rest_framework import generics, status\nfrom rest_framework.exceptions import ValidationError\nfrom rest_framework.permissions import IsAuthenticated\nfrom rest_framework.response import Response\nfrom rest_framework.views import APIView\n\nfrom ..models import Document, Project\nfrom ..permissions import (IsAnnotationApprover, IsInProjectOrAdmin,\n IsOwnAnnotation, IsProjectAdmin)\nfrom ..serializers import ApproverSerializer\n\n\nclass AnnotationList(generics.ListCreateAPIView):\n pagination_class = None\n permission_classes = [IsAuthenticated & IsInProjectOrAdmin]\n swagger_schema = None\n\n def get_serializer_class(self):\n project = get_object_or_404(Project, pk=self.kwargs['project_id'])\n self.serializer_class = project.get_annotation_serializer()\n return self.serializer_class\n\n def get_queryset(self):\n project = get_object_or_404(Project, pk=self.kwargs['project_id'])\n model = project.get_annotation_class()\n queryset = model.objects.filter(document=self.kwargs['doc_id'])\n if not project.collaborative_annotation:\n queryset = queryset.filter(user=self.request.user)\n return queryset\n\n def create(self, request, *args, **kwargs):\n self.check_single_class_classification(self.kwargs['project_id'], self.kwargs['doc_id'], request.user)\n request.data['document'] = self.kwargs['doc_id']\n return super().create(request, args, kwargs)\n\n def perform_create(self, serializer):\n serializer.save(document_id=self.kwargs['doc_id'], user=self.request.user)\n\n def delete(self, request, *args, **kwargs):\n queryset = self.get_queryset()\n queryset.all().delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n @staticmethod\n def check_single_class_classification(project_id, doc_id, user):\n project = get_object_or_404(Project, pk=project_id)\n if not project.single_class_classification:\n return\n\n model = project.get_annotation_class()\n annotations = model.objects.filter(document_id=doc_id)\n if not project.collaborative_annotation:\n annotations = annotations.filter(user=user)\n\n if annotations.exists():\n raise ValidationError('requested to create duplicate annotation for single-class-classification project')\n\n\nclass AnnotationDetail(generics.RetrieveUpdateDestroyAPIView):\n lookup_url_kwarg = 'annotation_id'\n swagger_schema = None\n\n def get_permissions(self):\n project = get_object_or_404(Project, pk=self.kwargs['project_id'])\n if project.collaborative_annotation:\n self.permission_classes = [IsAuthenticated & IsInProjectOrAdmin]\n else:\n self.permission_classes = [IsAuthenticated & IsInProjectOrAdmin & IsOwnAnnotation]\n return super().get_permissions()\n\n def get_serializer_class(self):\n project = get_object_or_404(Project, pk=self.kwargs['project_id'])\n self.serializer_class = project.get_annotation_serializer()\n return self.serializer_class\n\n def get_queryset(self):\n project = get_object_or_404(Project, pk=self.kwargs['project_id'])\n model = project.get_annotation_class()\n self.queryset = model.objects.all()\n return self.queryset\n\n\nclass ApproveLabelsAPI(APIView):\n permission_classes = [IsAuthenticated & (IsAnnotationApprover | IsProjectAdmin)]\n\n def post(self, request, *args, **kwargs):\n approved = self.request.data.get('approved', True)\n document = get_object_or_404(Document, pk=self.kwargs['doc_id'])\n document.annotations_approved_by = self.request.user if approved else None\n document.save()\n return Response(ApproverSerializer(document).data)\n", "path": "app/api/views/annotation.py"}], "after_files": [{"content": "from django.shortcuts import get_object_or_404\nfrom rest_framework import generics, status\nfrom rest_framework.permissions import IsAuthenticated\nfrom rest_framework.response import Response\nfrom rest_framework.views import APIView\n\nfrom ..models import Document, Project\nfrom ..permissions import (IsAnnotationApprover, IsInProjectOrAdmin,\n IsOwnAnnotation, IsProjectAdmin)\nfrom ..serializers import ApproverSerializer\n\n\nclass AnnotationList(generics.ListCreateAPIView):\n pagination_class = None\n permission_classes = [IsAuthenticated & IsInProjectOrAdmin]\n swagger_schema = None\n\n @property\n def project(self):\n return get_object_or_404(Project, pk=self.kwargs['project_id'])\n\n def get_serializer_class(self):\n self.serializer_class = self.project.get_annotation_serializer()\n return self.serializer_class\n\n def get_queryset(self):\n model = self.project.get_annotation_class()\n queryset = model.objects.filter(document=self.kwargs['doc_id'])\n if not self.project.collaborative_annotation:\n queryset = queryset.filter(user=self.request.user)\n return queryset\n\n def create(self, request, *args, **kwargs):\n if self.project.single_class_classification:\n self.get_queryset().delete()\n request.data['document'] = self.kwargs['doc_id']\n return super().create(request, args, kwargs)\n\n def perform_create(self, serializer):\n serializer.save(document_id=self.kwargs['doc_id'], user=self.request.user)\n\n def delete(self, request, *args, **kwargs):\n queryset = self.get_queryset()\n queryset.all().delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\nclass AnnotationDetail(generics.RetrieveUpdateDestroyAPIView):\n lookup_url_kwarg = 'annotation_id'\n swagger_schema = None\n\n def get_permissions(self):\n project = get_object_or_404(Project, pk=self.kwargs['project_id'])\n if project.collaborative_annotation:\n self.permission_classes = [IsAuthenticated & IsInProjectOrAdmin]\n else:\n self.permission_classes = [IsAuthenticated & IsInProjectOrAdmin & IsOwnAnnotation]\n return super().get_permissions()\n\n def get_serializer_class(self):\n project = get_object_or_404(Project, pk=self.kwargs['project_id'])\n self.serializer_class = project.get_annotation_serializer()\n return self.serializer_class\n\n def get_queryset(self):\n project = get_object_or_404(Project, pk=self.kwargs['project_id'])\n model = project.get_annotation_class()\n self.queryset = model.objects.all()\n return self.queryset\n\n\nclass ApproveLabelsAPI(APIView):\n permission_classes = [IsAuthenticated & (IsAnnotationApprover | IsProjectAdmin)]\n\n def post(self, request, *args, **kwargs):\n approved = self.request.data.get('approved', True)\n document = get_object_or_404(Document, pk=self.kwargs['doc_id'])\n document.annotations_approved_by = self.request.user if approved else None\n document.save()\n return Response(ApproverSerializer(document).data)\n", "path": "app/api/views/annotation.py"}]} | 1,392 | 561 |
gh_patches_debug_380 | rasdani/github-patches | git_diff | dotkom__onlineweb4-1931 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
SSO base template should extend the base template
## What kind of an issue is this?
- [x] Bug report
## What is the expected behaviour?
The template for the SSO app should extend the base template so we don't have to maintain multiple base templates.
## What is the current behaviour?
It's a custom template, which looks copy/pasted from the base template.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `apps/sso/views.py`
Content:
```
1 # -*- encoding: utf-8 -*-
2
3 import logging
4
5 from django.contrib.auth.decorators import login_required
6 from django.shortcuts import render
7 from oauth2_provider.views.base import AuthorizationView as DefaultAuthorizationView # flake8: noqa
8 from oauth2_provider.views.base import RevokeTokenView, TokenView
9
10 _log = logging.getLogger('SSO')
11
12
13 @login_required
14 def index(request):
15 """
16 This is the main SSO view
17 """
18
19 context = {}
20
21 return render(request, 'sso/index.html', context)
22
23
24 class AuthorizationView(DefaultAuthorizationView):
25 template_name = 'sso/authorize.html'
26
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/apps/sso/views.py b/apps/sso/views.py
--- a/apps/sso/views.py
+++ b/apps/sso/views.py
@@ -18,7 +18,7 @@
context = {}
- return render(request, 'sso/index.html', context)
+ return render(request, 'sso/authorize.html', context)
class AuthorizationView(DefaultAuthorizationView):
| {"golden_diff": "diff --git a/apps/sso/views.py b/apps/sso/views.py\n--- a/apps/sso/views.py\n+++ b/apps/sso/views.py\n@@ -18,7 +18,7 @@\n \n context = {}\n \n- return render(request, 'sso/index.html', context)\n+ return render(request, 'sso/authorize.html', context)\n \n \n class AuthorizationView(DefaultAuthorizationView):\n", "issue": "SSO base template should extend the base template\n## What kind of an issue is this?\r\n\r\n- [x] Bug report\r\n\r\n\r\n## What is the expected behaviour?\r\n\r\nThe template for the SSO app should extend the base template so we don't have to maintain multiple base templates.\r\n\r\n\r\n## What is the current behaviour?\r\n\r\nIt's a custom template, which looks copy/pasted from the base template.\r\n\n", "before_files": [{"content": "# -*- encoding: utf-8 -*-\n\nimport logging\n\nfrom django.contrib.auth.decorators import login_required\nfrom django.shortcuts import render\nfrom oauth2_provider.views.base import AuthorizationView as DefaultAuthorizationView # flake8: noqa\nfrom oauth2_provider.views.base import RevokeTokenView, TokenView\n\n_log = logging.getLogger('SSO')\n\n\n@login_required\ndef index(request):\n \"\"\"\n This is the main SSO view\n \"\"\"\n\n context = {}\n\n return render(request, 'sso/index.html', context)\n\n\nclass AuthorizationView(DefaultAuthorizationView):\n template_name = 'sso/authorize.html'\n", "path": "apps/sso/views.py"}], "after_files": [{"content": "# -*- encoding: utf-8 -*-\n\nimport logging\n\nfrom django.contrib.auth.decorators import login_required\nfrom django.shortcuts import render\nfrom oauth2_provider.views.base import AuthorizationView as DefaultAuthorizationView # flake8: noqa\nfrom oauth2_provider.views.base import RevokeTokenView, TokenView\n\n_log = logging.getLogger('SSO')\n\n\n@login_required\ndef index(request):\n \"\"\"\n This is the main SSO view\n \"\"\"\n\n context = {}\n\n return render(request, 'sso/authorize.html', context)\n\n\nclass AuthorizationView(DefaultAuthorizationView):\n template_name = 'sso/authorize.html'\n", "path": "apps/sso/views.py"}]} | 523 | 87 |
gh_patches_debug_9208 | rasdani/github-patches | git_diff | plotly__dash-1643 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Missing classifiers for Python 3.8/3.9
Python 3.8 and 3.9 are missing from the classifiers in `setup.py`:
https://github.com/plotly/dash/blob/358c5089c929b2e99996f9d4ee6ec634f65437fe/setup.py#L55-L63
But there is no restriction to 3.7 or below in `python_requires`:
https://github.com/plotly/dash/blob/358c5089c929b2e99996f9d4ee6ec634f65437fe/setup.py#L29
Anecdotally, I've been using Dash on Python 3.9 with no issues. Reporting as an issue as I can't see any mention of Python 3.8 and 3.9 compatibility so far!
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 import io
2 from setuptools import setup, find_packages
3
4 main_ns = {}
5 exec(open("dash/version.py").read(), main_ns) # pylint: disable=exec-used
6
7
8 def read_req_file(req_type):
9 with open("requires-{}.txt".format(req_type)) as fp:
10 requires = (line.strip() for line in fp)
11 return [req for req in requires if req and not req.startswith("#")]
12
13
14 setup(
15 name="dash",
16 version=main_ns["__version__"],
17 author="Chris Parmer",
18 author_email="[email protected]",
19 packages=find_packages(exclude=["tests*"]),
20 include_package_data=True,
21 license="MIT",
22 description=(
23 "A Python framework for building reactive web-apps. "
24 "Developed by Plotly."
25 ),
26 long_description=io.open("README.md", encoding="utf-8").read(),
27 long_description_content_type="text/markdown",
28 install_requires=read_req_file("install"),
29 python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*",
30 extras_require={
31 "dev": read_req_file("dev"),
32 "testing": read_req_file("testing"),
33 },
34 entry_points={
35 "console_scripts": [
36 "dash-generate-components = "
37 "dash.development.component_generator:cli",
38 "renderer = dash.development.build_process:renderer",
39 ],
40 "pytest11": ["dash = dash.testing.plugin"],
41 },
42 url="https://plotly.com/dash",
43 classifiers=[
44 "Development Status :: 5 - Production/Stable",
45 "Environment :: Web Environment",
46 "Framework :: Dash",
47 "Framework :: Flask",
48 "Intended Audience :: Developers",
49 "Intended Audience :: Education",
50 "Intended Audience :: Financial and Insurance Industry",
51 "Intended Audience :: Healthcare Industry",
52 "Intended Audience :: Manufacturing",
53 "Intended Audience :: Science/Research",
54 "License :: OSI Approved :: MIT License",
55 "Programming Language :: Python",
56 "Programming Language :: Python :: 2",
57 "Programming Language :: Python :: 2.7",
58 "Programming Language :: Python :: 3",
59 "Programming Language :: Python :: 3.3",
60 "Programming Language :: Python :: 3.4",
61 "Programming Language :: Python :: 3.5",
62 "Programming Language :: Python :: 3.6",
63 "Programming Language :: Python :: 3.7",
64 "Topic :: Database :: Front-Ends",
65 "Topic :: Office/Business :: Financial :: Spreadsheet",
66 "Topic :: Scientific/Engineering :: Visualization",
67 "Topic :: Software Development :: Libraries :: Application Frameworks",
68 "Topic :: Software Development :: Widget Sets",
69 ],
70 )
71
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -61,6 +61,8 @@
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
"Topic :: Database :: Front-Ends",
"Topic :: Office/Business :: Financial :: Spreadsheet",
"Topic :: Scientific/Engineering :: Visualization",
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -61,6 +61,8 @@\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n+ \"Programming Language :: Python :: 3.8\",\n+ \"Programming Language :: Python :: 3.9\",\n \"Topic :: Database :: Front-Ends\",\n \"Topic :: Office/Business :: Financial :: Spreadsheet\",\n \"Topic :: Scientific/Engineering :: Visualization\",\n", "issue": "Missing classifiers for Python 3.8/3.9\nPython 3.8 and 3.9 are missing from the classifiers in `setup.py`:\r\n\r\nhttps://github.com/plotly/dash/blob/358c5089c929b2e99996f9d4ee6ec634f65437fe/setup.py#L55-L63\r\n\r\nBut there is no restriction to 3.7 or below in `python_requires`:\r\nhttps://github.com/plotly/dash/blob/358c5089c929b2e99996f9d4ee6ec634f65437fe/setup.py#L29\r\n\r\nAnecdotally, I've been using Dash on Python 3.9 with no issues. Reporting as an issue as I can't see any mention of Python 3.8 and 3.9 compatibility so far!\n", "before_files": [{"content": "import io\nfrom setuptools import setup, find_packages\n\nmain_ns = {}\nexec(open(\"dash/version.py\").read(), main_ns) # pylint: disable=exec-used\n\n\ndef read_req_file(req_type):\n with open(\"requires-{}.txt\".format(req_type)) as fp:\n requires = (line.strip() for line in fp)\n return [req for req in requires if req and not req.startswith(\"#\")]\n\n\nsetup(\n name=\"dash\",\n version=main_ns[\"__version__\"],\n author=\"Chris Parmer\",\n author_email=\"[email protected]\",\n packages=find_packages(exclude=[\"tests*\"]),\n include_package_data=True,\n license=\"MIT\",\n description=(\n \"A Python framework for building reactive web-apps. \"\n \"Developed by Plotly.\"\n ),\n long_description=io.open(\"README.md\", encoding=\"utf-8\").read(),\n long_description_content_type=\"text/markdown\",\n install_requires=read_req_file(\"install\"),\n python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*\",\n extras_require={\n \"dev\": read_req_file(\"dev\"),\n \"testing\": read_req_file(\"testing\"),\n },\n entry_points={\n \"console_scripts\": [\n \"dash-generate-components = \"\n \"dash.development.component_generator:cli\",\n \"renderer = dash.development.build_process:renderer\",\n ],\n \"pytest11\": [\"dash = dash.testing.plugin\"],\n },\n url=\"https://plotly.com/dash\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Environment :: Web Environment\",\n \"Framework :: Dash\",\n \"Framework :: Flask\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: Education\",\n \"Intended Audience :: Financial and Insurance Industry\",\n \"Intended Audience :: Healthcare Industry\",\n \"Intended Audience :: Manufacturing\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Topic :: Database :: Front-Ends\",\n \"Topic :: Office/Business :: Financial :: Spreadsheet\",\n \"Topic :: Scientific/Engineering :: Visualization\",\n \"Topic :: Software Development :: Libraries :: Application Frameworks\",\n \"Topic :: Software Development :: Widget Sets\",\n ],\n)\n", "path": "setup.py"}], "after_files": [{"content": "import io\nfrom setuptools import setup, find_packages\n\nmain_ns = {}\nexec(open(\"dash/version.py\").read(), main_ns) # pylint: disable=exec-used\n\n\ndef read_req_file(req_type):\n with open(\"requires-{}.txt\".format(req_type)) as fp:\n requires = (line.strip() for line in fp)\n return [req for req in requires if req and not req.startswith(\"#\")]\n\n\nsetup(\n name=\"dash\",\n version=main_ns[\"__version__\"],\n author=\"Chris Parmer\",\n author_email=\"[email protected]\",\n packages=find_packages(exclude=[\"tests*\"]),\n include_package_data=True,\n license=\"MIT\",\n description=(\n \"A Python framework for building reactive web-apps. \"\n \"Developed by Plotly.\"\n ),\n long_description=io.open(\"README.md\", encoding=\"utf-8\").read(),\n long_description_content_type=\"text/markdown\",\n install_requires=read_req_file(\"install\"),\n python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*\",\n extras_require={\n \"dev\": read_req_file(\"dev\"),\n \"testing\": read_req_file(\"testing\"),\n },\n entry_points={\n \"console_scripts\": [\n \"dash-generate-components = \"\n \"dash.development.component_generator:cli\",\n \"renderer = dash.development.build_process:renderer\",\n ],\n \"pytest11\": [\"dash = dash.testing.plugin\"],\n },\n url=\"https://plotly.com/dash\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Environment :: Web Environment\",\n \"Framework :: Dash\",\n \"Framework :: Flask\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: Education\",\n \"Intended Audience :: Financial and Insurance Industry\",\n \"Intended Audience :: Healthcare Industry\",\n \"Intended Audience :: Manufacturing\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Topic :: Database :: Front-Ends\",\n \"Topic :: Office/Business :: Financial :: Spreadsheet\",\n \"Topic :: Scientific/Engineering :: Visualization\",\n \"Topic :: Software Development :: Libraries :: Application Frameworks\",\n \"Topic :: Software Development :: Widget Sets\",\n ],\n)\n", "path": "setup.py"}]} | 1,186 | 127 |
gh_patches_debug_13492 | rasdani/github-patches | git_diff | alltheplaces__alltheplaces-2642 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Spider sheetz is broken
During the global build at 2021-08-11-14-42-19, spider **sheetz** failed with **526 features** and **1 errors**.
Here's [the log](https://data.alltheplaces.xyz/runs/2021-08-11-14-42-19/logs/sheetz.txt) and [the output](https://data.alltheplaces.xyz/runs/2021-08-11-14-42-19/output/sheetz.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-08-11-14-42-19/output/sheetz.geojson))
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `locations/spiders/sheetz.py`
Content:
```
1 import json
2 import re
3 import scrapy
4 from locations.items import GeojsonPointItem
5
6
7 class SheetzSpider(scrapy.Spider):
8 name = "sheetz"
9 item_attributes = {'brand': "Sheetz"}
10 allowed_domains = ["orderz.sheetz.com"]
11 start_urls = (
12 "https://orderz.sheetz.com/sas/store",
13 )
14
15 def parse(self, response):
16 stores = json.loads(response.body_as_unicode())
17
18 for store in stores:
19 properties = {
20 'addr_full': store['address'],
21 'city': store['city'],
22 'state': store['state'],
23 'postcode': store['zip'],
24 'ref': store['storeNumber'],
25 'phone': store.get('phone'),
26 'website': 'https://orderz.sheetz.com/#/main/location/store/'+store['storeNumber'],
27 'lat': float(store['latitude']),
28 'lon': float(store['longitude']),
29 'opening_hours': '24/7' if store['open24x7'] else None,
30 'extras': {
31 'amenity:chargingstation': store['evCharger'],
32 'amenity:fuel': True,
33 'atm': store['atm'],
34 'car_wash': store['carWash'],
35 'fax': store['fax'] if 'fax' in store else None,
36 'fuel:diesel': store['diesel'],
37 'fuel:e15': store['e15'],
38 'fuel:e85': store['e85'],
39 'fuel:kerosene': store['kerosene'],
40 'fuel:propane': store['propane'],
41 }
42 }
43
44 yield GeojsonPointItem(**properties)
45
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/locations/spiders/sheetz.py b/locations/spiders/sheetz.py
--- a/locations/spiders/sheetz.py
+++ b/locations/spiders/sheetz.py
@@ -24,8 +24,8 @@
'ref': store['storeNumber'],
'phone': store.get('phone'),
'website': 'https://orderz.sheetz.com/#/main/location/store/'+store['storeNumber'],
- 'lat': float(store['latitude']),
- 'lon': float(store['longitude']),
+ 'lat': store['latitude'],
+ 'lon': store['longitude'],
'opening_hours': '24/7' if store['open24x7'] else None,
'extras': {
'amenity:chargingstation': store['evCharger'],
| {"golden_diff": "diff --git a/locations/spiders/sheetz.py b/locations/spiders/sheetz.py\n--- a/locations/spiders/sheetz.py\n+++ b/locations/spiders/sheetz.py\n@@ -24,8 +24,8 @@\n 'ref': store['storeNumber'],\n 'phone': store.get('phone'),\n 'website': 'https://orderz.sheetz.com/#/main/location/store/'+store['storeNumber'],\n- 'lat': float(store['latitude']),\n- 'lon': float(store['longitude']),\n+ 'lat': store['latitude'],\n+ 'lon': store['longitude'],\n 'opening_hours': '24/7' if store['open24x7'] else None,\n 'extras': {\n 'amenity:chargingstation': store['evCharger'],\n", "issue": "Spider sheetz is broken\nDuring the global build at 2021-08-11-14-42-19, spider **sheetz** failed with **526 features** and **1 errors**.\n\nHere's [the log](https://data.alltheplaces.xyz/runs/2021-08-11-14-42-19/logs/sheetz.txt) and [the output](https://data.alltheplaces.xyz/runs/2021-08-11-14-42-19/output/sheetz.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-08-11-14-42-19/output/sheetz.geojson))\n", "before_files": [{"content": "import json\nimport re\nimport scrapy\nfrom locations.items import GeojsonPointItem\n\n\nclass SheetzSpider(scrapy.Spider):\n name = \"sheetz\"\n item_attributes = {'brand': \"Sheetz\"}\n allowed_domains = [\"orderz.sheetz.com\"]\n start_urls = (\n \"https://orderz.sheetz.com/sas/store\",\n )\n\n def parse(self, response):\n stores = json.loads(response.body_as_unicode())\n\n for store in stores:\n properties = {\n 'addr_full': store['address'],\n 'city': store['city'],\n 'state': store['state'],\n 'postcode': store['zip'],\n 'ref': store['storeNumber'],\n 'phone': store.get('phone'),\n 'website': 'https://orderz.sheetz.com/#/main/location/store/'+store['storeNumber'],\n 'lat': float(store['latitude']),\n 'lon': float(store['longitude']),\n 'opening_hours': '24/7' if store['open24x7'] else None,\n 'extras': {\n 'amenity:chargingstation': store['evCharger'],\n 'amenity:fuel': True,\n 'atm': store['atm'],\n 'car_wash': store['carWash'],\n 'fax': store['fax'] if 'fax' in store else None,\n 'fuel:diesel': store['diesel'],\n 'fuel:e15': store['e15'],\n 'fuel:e85': store['e85'],\n 'fuel:kerosene': store['kerosene'],\n 'fuel:propane': store['propane'],\n }\n }\n\n yield GeojsonPointItem(**properties)\n", "path": "locations/spiders/sheetz.py"}], "after_files": [{"content": "import json\nimport re\nimport scrapy\nfrom locations.items import GeojsonPointItem\n\n\nclass SheetzSpider(scrapy.Spider):\n name = \"sheetz\"\n item_attributes = {'brand': \"Sheetz\"}\n allowed_domains = [\"orderz.sheetz.com\"]\n start_urls = (\n \"https://orderz.sheetz.com/sas/store\",\n )\n\n def parse(self, response):\n stores = json.loads(response.body_as_unicode())\n\n for store in stores:\n properties = {\n 'addr_full': store['address'],\n 'city': store['city'],\n 'state': store['state'],\n 'postcode': store['zip'],\n 'ref': store['storeNumber'],\n 'phone': store.get('phone'),\n 'website': 'https://orderz.sheetz.com/#/main/location/store/'+store['storeNumber'],\n 'lat': store['latitude'],\n 'lon': store['longitude'],\n 'opening_hours': '24/7' if store['open24x7'] else None,\n 'extras': {\n 'amenity:chargingstation': store['evCharger'],\n 'amenity:fuel': True,\n 'atm': store['atm'],\n 'car_wash': store['carWash'],\n 'fax': store['fax'] if 'fax' in store else None,\n 'fuel:diesel': store['diesel'],\n 'fuel:e15': store['e15'],\n 'fuel:e85': store['e85'],\n 'fuel:kerosene': store['kerosene'],\n 'fuel:propane': store['propane'],\n }\n }\n\n yield GeojsonPointItem(**properties)\n", "path": "locations/spiders/sheetz.py"}]} | 892 | 177 |
gh_patches_debug_5235 | rasdani/github-patches | git_diff | rasterio__rasterio-618 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
YCbCr JPEG-in-TIFF breaks rio-info
One creates a YCbCr JPEG-in-TIFF with GDAL using `photometric=YCbCr` and `compress=JPEG` options. But reading the TIFFs tags to get the compression method returns "YCbCr JPEG", a value that's not in `rasterio.enums.Compression`.
Reference: http://www.gdal.org/frmt_gtiff.html
Solution: normalize "YCbCr JPEG" to "JPEG" and add source color space to rio-info's output.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `rasterio/enums.py`
Content:
```
1
2 from enum import Enum, IntEnum
3
4
5 class ColorInterp(IntEnum):
6 undefined=0
7 grey=1
8 gray=1
9 palette=2
10 red=3
11 green=4
12 blue=5
13 alpha=6
14 hue=7
15 saturation=8
16 lightness=9
17 cyan=10
18 magenta=11
19 yellow=12
20 black=13
21
22
23 class Resampling(Enum):
24 nearest='NEAREST'
25 gauss='GAUSS'
26 cubic='CUBIC'
27 average='AVERAGE'
28 mode='MODE'
29 average_magphase='AVERAGE_MAGPHASE'
30 none='NONE'
31
32
33 class Compression(Enum):
34 jpeg='JPEG'
35 lzw='LZW'
36 packbits='PACKBITS'
37 deflate='DEFLATE'
38 ccittrle='CCITTRLE'
39 ccittfax3='CCITTFAX3'
40 ccittfax4='CCITTFAX4'
41 lzma='LZMA'
42 none='NONE'
43
44
45 class Interleaving(Enum):
46 pixel='PIXEL'
47 line='LINE'
48 band='BAND'
49
50
51 class MaskFlags(IntEnum):
52 all_valid=1
53 per_dataset=2
54 alpha=4
55 nodata=8
56
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/rasterio/enums.py b/rasterio/enums.py
--- a/rasterio/enums.py
+++ b/rasterio/enums.py
@@ -18,6 +18,9 @@
magenta=11
yellow=12
black=13
+ Y=14
+ Cb=15
+ Cr=16
class Resampling(Enum):
@@ -53,3 +56,14 @@
per_dataset=2
alpha=4
nodata=8
+
+
+class PhotometricInterp(Enum):
+ black='MINISBLACK'
+ white='MINISWHITE'
+ rgb='RGB'
+ cmyk='CMYK'
+ ycbcr='YCbCr'
+ cielab='CIELAB'
+ icclab='ICCLAB'
+ itulab='ITULAB'
| {"golden_diff": "diff --git a/rasterio/enums.py b/rasterio/enums.py\n--- a/rasterio/enums.py\n+++ b/rasterio/enums.py\n@@ -18,6 +18,9 @@\n magenta=11\n yellow=12\n black=13\n+ Y=14\n+ Cb=15\n+ Cr=16\n \n \n class Resampling(Enum):\n@@ -53,3 +56,14 @@\n per_dataset=2\n alpha=4\n nodata=8\n+\n+\n+class PhotometricInterp(Enum):\n+ black='MINISBLACK'\n+ white='MINISWHITE'\n+ rgb='RGB'\n+ cmyk='CMYK'\n+ ycbcr='YCbCr'\n+ cielab='CIELAB'\n+ icclab='ICCLAB'\n+ itulab='ITULAB'\n", "issue": "YCbCr JPEG-in-TIFF breaks rio-info\nOne creates a YCbCr JPEG-in-TIFF with GDAL using `photometric=YCbCr` and `compress=JPEG` options. But reading the TIFFs tags to get the compression method returns \"YCbCr JPEG\", a value that's not in `rasterio.enums.Compression`.\n\nReference: http://www.gdal.org/frmt_gtiff.html\n\nSolution: normalize \"YCbCr JPEG\" to \"JPEG\" and add source color space to rio-info's output.\n\n", "before_files": [{"content": "\nfrom enum import Enum, IntEnum\n\n\nclass ColorInterp(IntEnum):\n undefined=0\n grey=1\n gray=1\n palette=2\n red=3\n green=4\n blue=5\n alpha=6\n hue=7\n saturation=8\n lightness=9\n cyan=10\n magenta=11\n yellow=12\n black=13\n\n\nclass Resampling(Enum):\n nearest='NEAREST'\n gauss='GAUSS'\n cubic='CUBIC'\n average='AVERAGE'\n mode='MODE'\n average_magphase='AVERAGE_MAGPHASE'\n none='NONE'\n\n\nclass Compression(Enum):\n jpeg='JPEG'\n lzw='LZW'\n packbits='PACKBITS'\n deflate='DEFLATE'\n ccittrle='CCITTRLE'\n ccittfax3='CCITTFAX3'\n ccittfax4='CCITTFAX4'\n lzma='LZMA'\n none='NONE'\n\n\nclass Interleaving(Enum):\n pixel='PIXEL'\n line='LINE'\n band='BAND'\n\n\nclass MaskFlags(IntEnum):\n all_valid=1\n per_dataset=2\n alpha=4\n nodata=8\n", "path": "rasterio/enums.py"}], "after_files": [{"content": "\nfrom enum import Enum, IntEnum\n\n\nclass ColorInterp(IntEnum):\n undefined=0\n grey=1\n gray=1\n palette=2\n red=3\n green=4\n blue=5\n alpha=6\n hue=7\n saturation=8\n lightness=9\n cyan=10\n magenta=11\n yellow=12\n black=13\n Y=14\n Cb=15\n Cr=16\n\n\nclass Resampling(Enum):\n nearest='NEAREST'\n gauss='GAUSS'\n cubic='CUBIC'\n average='AVERAGE'\n mode='MODE'\n average_magphase='AVERAGE_MAGPHASE'\n none='NONE'\n\n\nclass Compression(Enum):\n jpeg='JPEG'\n lzw='LZW'\n packbits='PACKBITS'\n deflate='DEFLATE'\n ccittrle='CCITTRLE'\n ccittfax3='CCITTFAX3'\n ccittfax4='CCITTFAX4'\n lzma='LZMA'\n none='NONE'\n\n\nclass Interleaving(Enum):\n pixel='PIXEL'\n line='LINE'\n band='BAND'\n\n\nclass MaskFlags(IntEnum):\n all_valid=1\n per_dataset=2\n alpha=4\n nodata=8\n\n\nclass PhotometricInterp(Enum):\n black='MINISBLACK'\n white='MINISWHITE'\n rgb='RGB'\n cmyk='CMYK'\n ycbcr='YCbCr'\n cielab='CIELAB'\n icclab='ICCLAB'\n itulab='ITULAB'\n", "path": "rasterio/enums.py"}]} | 762 | 207 |
gh_patches_debug_19931 | rasdani/github-patches | git_diff | CiviWiki__OpenCiviWiki-1089 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Migration of frontend_views
### Idea summary
Elaboration of issue #1070
### Further details
As mentioned in issue #1070, we need to migrate from
```py
url(r"^")
```
to
```py
path()
```
And for frontend_views the following points need to be kept in mind
- [ ] usage of django.urls module for path and include
- [ ] Including the following views using the include method
- [ ] About_view
- [ ] support_us_view
- [ ] how_it_works_view
- [ ] user_profile
- [ ] issue_thread
- [ ] base_view
- [ ] civi2csv
- [ ] use path for all of the above mentioned urls
- [ ] Use the same name for reverse match.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `project/frontend_views/urls.py`
Content:
```
1 from django.conf.urls import url
2 from . import views as v
3
4 urlpatterns = [
5 url(r"^about$", v.about_view, name="about"),
6 url(r"^support_us$", v.support_us_view, name="support us"),
7 url(r"^howitworks$", v.how_it_works_view, name="how it works"),
8 url(r"^profile/(?P<username>[a-zA-Z0-9-_]*)$", v.user_profile, name="profile"),
9 url(r"^profile/rep/(?P<username>\d+)$", v.user_profile, name="profile"),
10 url(r"^thread/(?P<thread_id>\w+)$", v.issue_thread, name="issue thread"),
11 url(r"^profile$", v.user_profile, name="default_profile"),
12 url(r"^$", v.base_view, name="base"),
13 url(r"^thread/(?P<thread_id>\w+)/csv$", v.civi2csv, name="civi2csv"),
14 ]
15
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/project/frontend_views/urls.py b/project/frontend_views/urls.py
--- a/project/frontend_views/urls.py
+++ b/project/frontend_views/urls.py
@@ -1,14 +1,13 @@
-from django.conf.urls import url
-from . import views as v
+from django.urls import path
+from frontend_views import views
urlpatterns = [
- url(r"^about$", v.about_view, name="about"),
- url(r"^support_us$", v.support_us_view, name="support us"),
- url(r"^howitworks$", v.how_it_works_view, name="how it works"),
- url(r"^profile/(?P<username>[a-zA-Z0-9-_]*)$", v.user_profile, name="profile"),
- url(r"^profile/rep/(?P<username>\d+)$", v.user_profile, name="profile"),
- url(r"^thread/(?P<thread_id>\w+)$", v.issue_thread, name="issue thread"),
- url(r"^profile$", v.user_profile, name="default_profile"),
- url(r"^$", v.base_view, name="base"),
- url(r"^thread/(?P<thread_id>\w+)/csv$", v.civi2csv, name="civi2csv"),
+ path("about/", views.about_view, name="about"),
+ path("support_us/", views.support_us_view, name="support us"),
+ path("howitworks/", views.how_it_works_view, name="how it works"),
+ path("profile/<str:username>/", views.user_profile, name="profile"),
+ path("thread/<int:thread_id>/", views.issue_thread, name="issue thread"),
+ path("profile/", views.user_profile, name="default_profile"),
+ path("", views.base_view, name="base"),
+ path("thread/<int:thread_id>/csv/", views.civi2csv, name="civi2csv"),
]
| {"golden_diff": "diff --git a/project/frontend_views/urls.py b/project/frontend_views/urls.py\n--- a/project/frontend_views/urls.py\n+++ b/project/frontend_views/urls.py\n@@ -1,14 +1,13 @@\n-from django.conf.urls import url\n-from . import views as v\n+from django.urls import path\n+from frontend_views import views\n \n urlpatterns = [\n- url(r\"^about$\", v.about_view, name=\"about\"),\n- url(r\"^support_us$\", v.support_us_view, name=\"support us\"),\n- url(r\"^howitworks$\", v.how_it_works_view, name=\"how it works\"),\n- url(r\"^profile/(?P<username>[a-zA-Z0-9-_]*)$\", v.user_profile, name=\"profile\"),\n- url(r\"^profile/rep/(?P<username>\\d+)$\", v.user_profile, name=\"profile\"),\n- url(r\"^thread/(?P<thread_id>\\w+)$\", v.issue_thread, name=\"issue thread\"),\n- url(r\"^profile$\", v.user_profile, name=\"default_profile\"),\n- url(r\"^$\", v.base_view, name=\"base\"),\n- url(r\"^thread/(?P<thread_id>\\w+)/csv$\", v.civi2csv, name=\"civi2csv\"),\n+ path(\"about/\", views.about_view, name=\"about\"),\n+ path(\"support_us/\", views.support_us_view, name=\"support us\"),\n+ path(\"howitworks/\", views.how_it_works_view, name=\"how it works\"),\n+ path(\"profile/<str:username>/\", views.user_profile, name=\"profile\"),\n+ path(\"thread/<int:thread_id>/\", views.issue_thread, name=\"issue thread\"),\n+ path(\"profile/\", views.user_profile, name=\"default_profile\"),\n+ path(\"\", views.base_view, name=\"base\"),\n+ path(\"thread/<int:thread_id>/csv/\", views.civi2csv, name=\"civi2csv\"),\n ]\n", "issue": "Migration of frontend_views \n### Idea summary\n\nElaboration of issue #1070\n\n### Further details\n\nAs mentioned in issue #1070, we need to migrate from\r\n```py\r\nurl(r\"^\")\r\n```\r\nto \r\n```py\r\npath()\r\n```\r\nAnd for frontend_views the following points need to be kept in mind\r\n- [ ] usage of django.urls module for path and include\r\n- [ ] Including the following views using the include method\r\n - [ ] About_view\r\n - [ ] support_us_view\r\n - [ ] how_it_works_view\r\n - [ ] user_profile\r\n - [ ] issue_thread\r\n - [ ] base_view\r\n - [ ] civi2csv\r\n - [ ] use path for all of the above mentioned urls\r\n - [ ] Use the same name for reverse match. \n", "before_files": [{"content": "from django.conf.urls import url\nfrom . import views as v\n\nurlpatterns = [\n url(r\"^about$\", v.about_view, name=\"about\"),\n url(r\"^support_us$\", v.support_us_view, name=\"support us\"),\n url(r\"^howitworks$\", v.how_it_works_view, name=\"how it works\"),\n url(r\"^profile/(?P<username>[a-zA-Z0-9-_]*)$\", v.user_profile, name=\"profile\"),\n url(r\"^profile/rep/(?P<username>\\d+)$\", v.user_profile, name=\"profile\"),\n url(r\"^thread/(?P<thread_id>\\w+)$\", v.issue_thread, name=\"issue thread\"),\n url(r\"^profile$\", v.user_profile, name=\"default_profile\"),\n url(r\"^$\", v.base_view, name=\"base\"),\n url(r\"^thread/(?P<thread_id>\\w+)/csv$\", v.civi2csv, name=\"civi2csv\"),\n]\n", "path": "project/frontend_views/urls.py"}], "after_files": [{"content": "from django.urls import path\nfrom frontend_views import views\n\nurlpatterns = [\n path(\"about/\", views.about_view, name=\"about\"),\n path(\"support_us/\", views.support_us_view, name=\"support us\"),\n path(\"howitworks/\", views.how_it_works_view, name=\"how it works\"),\n path(\"profile/<str:username>/\", views.user_profile, name=\"profile\"),\n path(\"thread/<int:thread_id>/\", views.issue_thread, name=\"issue thread\"),\n path(\"profile/\", views.user_profile, name=\"default_profile\"),\n path(\"\", views.base_view, name=\"base\"),\n path(\"thread/<int:thread_id>/csv/\", views.civi2csv, name=\"civi2csv\"),\n]\n", "path": "project/frontend_views/urls.py"}]} | 663 | 419 |
gh_patches_debug_36414 | rasdani/github-patches | git_diff | facebookresearch__hydra-1695 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
strict flag was removed from the compose API without a prior deprecation
Strict config composition functionality has become the default in Hydra 1.0 (See https://hydra.cc/docs/upgrades/0.11_to_1.0/strict_mode_flag_deprecated).
This flag was completely removed in Hydra 1.1.0.
Unfortunately, the Compose API strict flag was not deprecated and was thus an avoidable breaking change.
A followup PR will re-introduce the strict flag to the Compose API as a deprecated flag. That flag will be removed in the major version of Hydra.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `hydra/experimental/compose.py`
Content:
```
1 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 # DEPRECATED: remove in 1.2
3 import warnings
4 from typing import List, Optional
5
6 from omegaconf import DictConfig
7
8
9 def compose(
10 config_name: Optional[str] = None,
11 overrides: List[str] = [],
12 return_hydra_config: bool = False,
13 ) -> DictConfig:
14 from hydra import compose as real_compose
15
16 warnings.warn(
17 category=UserWarning,
18 message="hydra.experimental.compose() is no longer experimental."
19 " Use hydra.compose()",
20 )
21 return real_compose(
22 config_name=config_name,
23 overrides=overrides,
24 return_hydra_config=return_hydra_config,
25 )
26
```
Path: `hydra/compose.py`
Content:
```
1 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 from typing import List, Optional
3
4 from omegaconf import DictConfig, open_dict
5
6 from hydra.core.global_hydra import GlobalHydra
7 from hydra.types import RunMode
8
9
10 def compose(
11 config_name: Optional[str] = None,
12 overrides: List[str] = [],
13 return_hydra_config: bool = False,
14 ) -> DictConfig:
15 """
16 :param config_name: the name of the config
17 (usually the file name without the .yaml extension)
18 :param overrides: list of overrides for config file
19 :param return_hydra_config: True to return the hydra config node in the result
20 :return: the composed config
21 """
22 assert (
23 GlobalHydra().is_initialized()
24 ), "GlobalHydra is not initialized, use @hydra.main() or call one of the hydra initialization methods first"
25
26 gh = GlobalHydra.instance()
27 assert gh.hydra is not None
28 cfg = gh.hydra.compose_config(
29 config_name=config_name,
30 overrides=overrides,
31 run_mode=RunMode.RUN,
32 from_shell=False,
33 with_log_configuration=False,
34 )
35 assert isinstance(cfg, DictConfig)
36
37 if not return_hydra_config:
38 if "hydra" in cfg:
39 with open_dict(cfg):
40 del cfg["hydra"]
41 return cfg
42
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/hydra/compose.py b/hydra/compose.py
--- a/hydra/compose.py
+++ b/hydra/compose.py
@@ -1,7 +1,9 @@
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
+import warnings
+from textwrap import dedent
from typing import List, Optional
-from omegaconf import DictConfig, open_dict
+from omegaconf import DictConfig, OmegaConf, open_dict
from hydra.core.global_hydra import GlobalHydra
from hydra.types import RunMode
@@ -11,12 +13,14 @@
config_name: Optional[str] = None,
overrides: List[str] = [],
return_hydra_config: bool = False,
+ strict: Optional[bool] = None,
) -> DictConfig:
"""
:param config_name: the name of the config
(usually the file name without the .yaml extension)
:param overrides: list of overrides for config file
:param return_hydra_config: True to return the hydra config node in the result
+ :param strict: DEPRECATED. If true, returned config has struct mode disabled.
:return: the composed config
"""
assert (
@@ -38,4 +42,18 @@
if "hydra" in cfg:
with open_dict(cfg):
del cfg["hydra"]
+
+ if strict is not None:
+ # DEPRECATED: remove in 1.2
+ warnings.warn(
+ dedent(
+ """\
+
+ The strict flag in the compose API is deprecated and will be removed in the next version of Hydra.
+ See https://hydra.cc/docs/upgrades/0.11_to_1.0/strict_mode_flag_deprecated for more info.
+ """
+ )
+ )
+ OmegaConf.set_struct(cfg, strict)
+
return cfg
diff --git a/hydra/experimental/compose.py b/hydra/experimental/compose.py
--- a/hydra/experimental/compose.py
+++ b/hydra/experimental/compose.py
@@ -10,6 +10,7 @@
config_name: Optional[str] = None,
overrides: List[str] = [],
return_hydra_config: bool = False,
+ strict: Optional[bool] = None,
) -> DictConfig:
from hydra import compose as real_compose
@@ -22,4 +23,5 @@
config_name=config_name,
overrides=overrides,
return_hydra_config=return_hydra_config,
+ strict=strict,
)
| {"golden_diff": "diff --git a/hydra/compose.py b/hydra/compose.py\n--- a/hydra/compose.py\n+++ b/hydra/compose.py\n@@ -1,7 +1,9 @@\n # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n+import warnings\n+from textwrap import dedent\n from typing import List, Optional\n \n-from omegaconf import DictConfig, open_dict\n+from omegaconf import DictConfig, OmegaConf, open_dict\n \n from hydra.core.global_hydra import GlobalHydra\n from hydra.types import RunMode\n@@ -11,12 +13,14 @@\n config_name: Optional[str] = None,\n overrides: List[str] = [],\n return_hydra_config: bool = False,\n+ strict: Optional[bool] = None,\n ) -> DictConfig:\n \"\"\"\n :param config_name: the name of the config\n (usually the file name without the .yaml extension)\n :param overrides: list of overrides for config file\n :param return_hydra_config: True to return the hydra config node in the result\n+ :param strict: DEPRECATED. If true, returned config has struct mode disabled.\n :return: the composed config\n \"\"\"\n assert (\n@@ -38,4 +42,18 @@\n if \"hydra\" in cfg:\n with open_dict(cfg):\n del cfg[\"hydra\"]\n+\n+ if strict is not None:\n+ # DEPRECATED: remove in 1.2\n+ warnings.warn(\n+ dedent(\n+ \"\"\"\\\n+\n+ The strict flag in the compose API is deprecated and will be removed in the next version of Hydra.\n+ See https://hydra.cc/docs/upgrades/0.11_to_1.0/strict_mode_flag_deprecated for more info.\n+ \"\"\"\n+ )\n+ )\n+ OmegaConf.set_struct(cfg, strict)\n+\n return cfg\ndiff --git a/hydra/experimental/compose.py b/hydra/experimental/compose.py\n--- a/hydra/experimental/compose.py\n+++ b/hydra/experimental/compose.py\n@@ -10,6 +10,7 @@\n config_name: Optional[str] = None,\n overrides: List[str] = [],\n return_hydra_config: bool = False,\n+ strict: Optional[bool] = None,\n ) -> DictConfig:\n from hydra import compose as real_compose\n \n@@ -22,4 +23,5 @@\n config_name=config_name,\n overrides=overrides,\n return_hydra_config=return_hydra_config,\n+ strict=strict,\n )\n", "issue": "strict flag was removed from the compose API without a prior deprecation\nStrict config composition functionality has become the default in Hydra 1.0 (See https://hydra.cc/docs/upgrades/0.11_to_1.0/strict_mode_flag_deprecated).\r\n\r\nThis flag was completely removed in Hydra 1.1.0.\r\nUnfortunately, the Compose API strict flag was not deprecated and was thus an avoidable breaking change.\r\n\r\nA followup PR will re-introduce the strict flag to the Compose API as a deprecated flag. That flag will be removed in the major version of Hydra.\n", "before_files": [{"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n# DEPRECATED: remove in 1.2\nimport warnings\nfrom typing import List, Optional\n\nfrom omegaconf import DictConfig\n\n\ndef compose(\n config_name: Optional[str] = None,\n overrides: List[str] = [],\n return_hydra_config: bool = False,\n) -> DictConfig:\n from hydra import compose as real_compose\n\n warnings.warn(\n category=UserWarning,\n message=\"hydra.experimental.compose() is no longer experimental.\"\n \" Use hydra.compose()\",\n )\n return real_compose(\n config_name=config_name,\n overrides=overrides,\n return_hydra_config=return_hydra_config,\n )\n", "path": "hydra/experimental/compose.py"}, {"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom typing import List, Optional\n\nfrom omegaconf import DictConfig, open_dict\n\nfrom hydra.core.global_hydra import GlobalHydra\nfrom hydra.types import RunMode\n\n\ndef compose(\n config_name: Optional[str] = None,\n overrides: List[str] = [],\n return_hydra_config: bool = False,\n) -> DictConfig:\n \"\"\"\n :param config_name: the name of the config\n (usually the file name without the .yaml extension)\n :param overrides: list of overrides for config file\n :param return_hydra_config: True to return the hydra config node in the result\n :return: the composed config\n \"\"\"\n assert (\n GlobalHydra().is_initialized()\n ), \"GlobalHydra is not initialized, use @hydra.main() or call one of the hydra initialization methods first\"\n\n gh = GlobalHydra.instance()\n assert gh.hydra is not None\n cfg = gh.hydra.compose_config(\n config_name=config_name,\n overrides=overrides,\n run_mode=RunMode.RUN,\n from_shell=False,\n with_log_configuration=False,\n )\n assert isinstance(cfg, DictConfig)\n\n if not return_hydra_config:\n if \"hydra\" in cfg:\n with open_dict(cfg):\n del cfg[\"hydra\"]\n return cfg\n", "path": "hydra/compose.py"}], "after_files": [{"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n# DEPRECATED: remove in 1.2\nimport warnings\nfrom typing import List, Optional\n\nfrom omegaconf import DictConfig\n\n\ndef compose(\n config_name: Optional[str] = None,\n overrides: List[str] = [],\n return_hydra_config: bool = False,\n strict: Optional[bool] = None,\n) -> DictConfig:\n from hydra import compose as real_compose\n\n warnings.warn(\n category=UserWarning,\n message=\"hydra.experimental.compose() is no longer experimental.\"\n \" Use hydra.compose()\",\n )\n return real_compose(\n config_name=config_name,\n overrides=overrides,\n return_hydra_config=return_hydra_config,\n strict=strict,\n )\n", "path": "hydra/experimental/compose.py"}, {"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport warnings\nfrom textwrap import dedent\nfrom typing import List, Optional\n\nfrom omegaconf import DictConfig, OmegaConf, open_dict\n\nfrom hydra.core.global_hydra import GlobalHydra\nfrom hydra.types import RunMode\n\n\ndef compose(\n config_name: Optional[str] = None,\n overrides: List[str] = [],\n return_hydra_config: bool = False,\n strict: Optional[bool] = None,\n) -> DictConfig:\n \"\"\"\n :param config_name: the name of the config\n (usually the file name without the .yaml extension)\n :param overrides: list of overrides for config file\n :param return_hydra_config: True to return the hydra config node in the result\n :param strict: DEPRECATED. If true, returned config has struct mode disabled.\n :return: the composed config\n \"\"\"\n assert (\n GlobalHydra().is_initialized()\n ), \"GlobalHydra is not initialized, use @hydra.main() or call one of the hydra initialization methods first\"\n\n gh = GlobalHydra.instance()\n assert gh.hydra is not None\n cfg = gh.hydra.compose_config(\n config_name=config_name,\n overrides=overrides,\n run_mode=RunMode.RUN,\n from_shell=False,\n with_log_configuration=False,\n )\n assert isinstance(cfg, DictConfig)\n\n if not return_hydra_config:\n if \"hydra\" in cfg:\n with open_dict(cfg):\n del cfg[\"hydra\"]\n\n if strict is not None:\n # DEPRECATED: remove in 1.2\n warnings.warn(\n dedent(\n \"\"\"\\\n\n The strict flag in the compose API is deprecated and will be removed in the next version of Hydra.\n See https://hydra.cc/docs/upgrades/0.11_to_1.0/strict_mode_flag_deprecated for more info.\n \"\"\"\n )\n )\n OmegaConf.set_struct(cfg, strict)\n\n return cfg\n", "path": "hydra/compose.py"}]} | 985 | 582 |
gh_patches_debug_22393 | rasdani/github-patches | git_diff | pyload__pyload-1508 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[SkipRev] broken - cannot import name SkipDownload
SkipRev plugin failed to import with following error:
Errore durante l'importazione SkipRev: cannot import name SkipDownload
I already tried following, without success: http://forum.pyload.org/viewtopic.php?f=7&t=4335
Debian GNU/Linux 7.8 (wheezy) x64
Python 2.7.3
pyLoad 0.4.9
SkipRev 0.30
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `module/plugins/hooks/SkipRev.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 import re
4 import urllib
5 import urlparse
6
7 from types import MethodType
8
9 from module.PyFile import PyFile
10 from module.plugins.internal.Hook import Hook
11 from module.plugins.internal.Plugin import SkipDownload
12
13
14 class SkipRev(Hook):
15 __name__ = "SkipRev"
16 __type__ = "hook"
17 __version__ = "0.30"
18
19 __config__ = [("mode" , "Auto;Manual", "Choose recovery archives to skip" , "Auto"),
20 ("revtokeep", "int" , "Number of recovery archives to keep for package", 0 )]
21
22 __description__ = """Skip recovery archives (.rev)"""
23 __license__ = "GPLv3"
24 __authors__ = [("Walter Purcaro", "[email protected]")]
25
26
27 interval = 0 #@TODO: Remove in 0.4.10
28
29
30 def setup(self):
31 self.info = {} #@TODO: Remove in 0.4.10
32
33
34 @staticmethod
35 def _setup(self):
36 self.pyfile.plugin._setup()
37 if self.pyfile.hasStatus("skipped"):
38 raise SkipDownload(self.pyfile.statusname or self.pyfile.pluginname)
39
40
41 def _name(self, pyfile):
42 if hasattr(pyfile.pluginmodule, "getInfo"): #@NOTE: getInfo is deprecated in 0.4.10
43 return pyfile.pluginmodule.getInfo([pyfile.url]).next()[0]
44 else:
45 self.logWarning("Unable to grab file name")
46 return urlparse.urlparse(urllib.unquote(pyfile.url)).path.split('/')[-1]
47
48
49 def _pyfile(self, link):
50 return PyFile(self.core.files,
51 link.fid,
52 link.url,
53 link.name,
54 link.size,
55 link.status,
56 link.error,
57 link.plugin,
58 link.packageID,
59 link.order)
60
61
62 def downloadPreparing(self, pyfile):
63 name = self._name(pyfile)
64
65 if pyfile.statusname is _("unskipped") or not name.endswith(".rev") or not ".part" in name:
66 return
67
68 revtokeep = -1 if self.getConfig('mode') == "Auto" else self.getConfig('revtokeep')
69
70 if revtokeep:
71 status_list = (1, 4, 8, 9, 14) if revtokeep < 0 else (1, 3, 4, 8, 9, 14)
72 pyname = re.compile(r'%s\.part\d+\.rev$' % name.rsplit('.', 2)[0].replace('.', '\.'))
73
74 queued = [True for link in self.core.api.getPackageData(pyfile.package().id).links \
75 if link.status not in status_list and pyname.match(link.name)].count(True)
76
77 if not queued or queued < revtokeep: #: keep one rev at least in auto mode
78 return
79
80 pyfile.setCustomStatus("SkipRev", "skipped")
81
82 if not hasattr(pyfile.plugin, "_setup"):
83 # Work-around: inject status checker inside the preprocessing routine of the plugin
84 pyfile.plugin._setup = pyfile.plugin.setup
85 pyfile.plugin.setup = MethodType(self._setup, pyfile.plugin)
86
87
88 def downloadFailed(self, pyfile):
89 #: Check if pyfile is still "failed",
90 # maybe might has been restarted in meantime
91 if pyfile.status != 8 or pyfile.name.rsplit('.', 1)[-1].strip() not in ("rar", "rev"):
92 return
93
94 revtokeep = -1 if self.getConfig('mode') == "Auto" else self.getConfig('revtokeep')
95
96 if not revtokeep:
97 return
98
99 pyname = re.compile(r'%s\.part\d+\.rev$' % pyfile.name.rsplit('.', 2)[0].replace('.', '\.'))
100
101 for link in self.core.api.getPackageData(pyfile.package().id).links:
102 if link.status is 4 and pyname.match(link.name):
103 pylink = self._pyfile(link)
104
105 if revtokeep > -1 or pyfile.name.endswith(".rev"):
106 pylink.setStatus("queued")
107 else:
108 pylink.setCustomStatus(_("unskipped"), "queued")
109
110 self.core.files.save()
111 pylink.release()
112 return
113
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/module/plugins/hooks/SkipRev.py b/module/plugins/hooks/SkipRev.py
--- a/module/plugins/hooks/SkipRev.py
+++ b/module/plugins/hooks/SkipRev.py
@@ -8,13 +8,13 @@
from module.PyFile import PyFile
from module.plugins.internal.Hook import Hook
-from module.plugins.internal.Plugin import SkipDownload
+from module.plugins.internal.Plugin import Skip
class SkipRev(Hook):
__name__ = "SkipRev"
__type__ = "hook"
- __version__ = "0.30"
+ __version__ = "0.31"
__config__ = [("mode" , "Auto;Manual", "Choose recovery archives to skip" , "Auto"),
("revtokeep", "int" , "Number of recovery archives to keep for package", 0 )]
@@ -35,7 +35,7 @@
def _setup(self):
self.pyfile.plugin._setup()
if self.pyfile.hasStatus("skipped"):
- raise SkipDownload(self.pyfile.statusname or self.pyfile.pluginname)
+ raise Skip(self.pyfile.statusname or self.pyfile.pluginname)
def _name(self, pyfile):
| {"golden_diff": "diff --git a/module/plugins/hooks/SkipRev.py b/module/plugins/hooks/SkipRev.py\n--- a/module/plugins/hooks/SkipRev.py\n+++ b/module/plugins/hooks/SkipRev.py\n@@ -8,13 +8,13 @@\n \n from module.PyFile import PyFile\n from module.plugins.internal.Hook import Hook\n-from module.plugins.internal.Plugin import SkipDownload\n+from module.plugins.internal.Plugin import Skip\n \n \n class SkipRev(Hook):\n __name__ = \"SkipRev\"\n __type__ = \"hook\"\n- __version__ = \"0.30\"\n+ __version__ = \"0.31\"\n \n __config__ = [(\"mode\" , \"Auto;Manual\", \"Choose recovery archives to skip\" , \"Auto\"),\n (\"revtokeep\", \"int\" , \"Number of recovery archives to keep for package\", 0 )]\n@@ -35,7 +35,7 @@\n def _setup(self):\n self.pyfile.plugin._setup()\n if self.pyfile.hasStatus(\"skipped\"):\n- raise SkipDownload(self.pyfile.statusname or self.pyfile.pluginname)\n+ raise Skip(self.pyfile.statusname or self.pyfile.pluginname)\n \n \n def _name(self, pyfile):\n", "issue": "[SkipRev] broken - cannot import name SkipDownload\nSkipRev plugin failed to import with following error:\nErrore durante l'importazione SkipRev: cannot import name SkipDownload\n\nI already tried following, without success: http://forum.pyload.org/viewtopic.php?f=7&t=4335\n\nDebian GNU/Linux 7.8 (wheezy) x64\nPython 2.7.3\npyLoad 0.4.9\nSkipRev 0.30\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\nimport re\nimport urllib\nimport urlparse\n\nfrom types import MethodType\n\nfrom module.PyFile import PyFile\nfrom module.plugins.internal.Hook import Hook\nfrom module.plugins.internal.Plugin import SkipDownload\n\n\nclass SkipRev(Hook):\n __name__ = \"SkipRev\"\n __type__ = \"hook\"\n __version__ = \"0.30\"\n\n __config__ = [(\"mode\" , \"Auto;Manual\", \"Choose recovery archives to skip\" , \"Auto\"),\n (\"revtokeep\", \"int\" , \"Number of recovery archives to keep for package\", 0 )]\n\n __description__ = \"\"\"Skip recovery archives (.rev)\"\"\"\n __license__ = \"GPLv3\"\n __authors__ = [(\"Walter Purcaro\", \"[email protected]\")]\n\n\n interval = 0 #@TODO: Remove in 0.4.10\n\n\n def setup(self):\n self.info = {} #@TODO: Remove in 0.4.10\n\n\n @staticmethod\n def _setup(self):\n self.pyfile.plugin._setup()\n if self.pyfile.hasStatus(\"skipped\"):\n raise SkipDownload(self.pyfile.statusname or self.pyfile.pluginname)\n\n\n def _name(self, pyfile):\n if hasattr(pyfile.pluginmodule, \"getInfo\"): #@NOTE: getInfo is deprecated in 0.4.10\n return pyfile.pluginmodule.getInfo([pyfile.url]).next()[0]\n else:\n self.logWarning(\"Unable to grab file name\")\n return urlparse.urlparse(urllib.unquote(pyfile.url)).path.split('/')[-1]\n\n\n def _pyfile(self, link):\n return PyFile(self.core.files,\n link.fid,\n link.url,\n link.name,\n link.size,\n link.status,\n link.error,\n link.plugin,\n link.packageID,\n link.order)\n\n\n def downloadPreparing(self, pyfile):\n name = self._name(pyfile)\n\n if pyfile.statusname is _(\"unskipped\") or not name.endswith(\".rev\") or not \".part\" in name:\n return\n\n revtokeep = -1 if self.getConfig('mode') == \"Auto\" else self.getConfig('revtokeep')\n\n if revtokeep:\n status_list = (1, 4, 8, 9, 14) if revtokeep < 0 else (1, 3, 4, 8, 9, 14)\n pyname = re.compile(r'%s\\.part\\d+\\.rev$' % name.rsplit('.', 2)[0].replace('.', '\\.'))\n\n queued = [True for link in self.core.api.getPackageData(pyfile.package().id).links \\\n if link.status not in status_list and pyname.match(link.name)].count(True)\n\n if not queued or queued < revtokeep: #: keep one rev at least in auto mode\n return\n\n pyfile.setCustomStatus(\"SkipRev\", \"skipped\")\n\n if not hasattr(pyfile.plugin, \"_setup\"):\n # Work-around: inject status checker inside the preprocessing routine of the plugin\n pyfile.plugin._setup = pyfile.plugin.setup\n pyfile.plugin.setup = MethodType(self._setup, pyfile.plugin)\n\n\n def downloadFailed(self, pyfile):\n #: Check if pyfile is still \"failed\",\n # maybe might has been restarted in meantime\n if pyfile.status != 8 or pyfile.name.rsplit('.', 1)[-1].strip() not in (\"rar\", \"rev\"):\n return\n\n revtokeep = -1 if self.getConfig('mode') == \"Auto\" else self.getConfig('revtokeep')\n\n if not revtokeep:\n return\n\n pyname = re.compile(r'%s\\.part\\d+\\.rev$' % pyfile.name.rsplit('.', 2)[0].replace('.', '\\.'))\n\n for link in self.core.api.getPackageData(pyfile.package().id).links:\n if link.status is 4 and pyname.match(link.name):\n pylink = self._pyfile(link)\n\n if revtokeep > -1 or pyfile.name.endswith(\".rev\"):\n pylink.setStatus(\"queued\")\n else:\n pylink.setCustomStatus(_(\"unskipped\"), \"queued\")\n\n self.core.files.save()\n pylink.release()\n return\n", "path": "module/plugins/hooks/SkipRev.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\nimport re\nimport urllib\nimport urlparse\n\nfrom types import MethodType\n\nfrom module.PyFile import PyFile\nfrom module.plugins.internal.Hook import Hook\nfrom module.plugins.internal.Plugin import Skip\n\n\nclass SkipRev(Hook):\n __name__ = \"SkipRev\"\n __type__ = \"hook\"\n __version__ = \"0.31\"\n\n __config__ = [(\"mode\" , \"Auto;Manual\", \"Choose recovery archives to skip\" , \"Auto\"),\n (\"revtokeep\", \"int\" , \"Number of recovery archives to keep for package\", 0 )]\n\n __description__ = \"\"\"Skip recovery archives (.rev)\"\"\"\n __license__ = \"GPLv3\"\n __authors__ = [(\"Walter Purcaro\", \"[email protected]\")]\n\n\n interval = 0 #@TODO: Remove in 0.4.10\n\n\n def setup(self):\n self.info = {} #@TODO: Remove in 0.4.10\n\n\n @staticmethod\n def _setup(self):\n self.pyfile.plugin._setup()\n if self.pyfile.hasStatus(\"skipped\"):\n raise Skip(self.pyfile.statusname or self.pyfile.pluginname)\n\n\n def _name(self, pyfile):\n if hasattr(pyfile.pluginmodule, \"getInfo\"): #@NOTE: getInfo is deprecated in 0.4.10\n return pyfile.pluginmodule.getInfo([pyfile.url]).next()[0]\n else:\n self.logWarning(\"Unable to grab file name\")\n return urlparse.urlparse(urllib.unquote(pyfile.url)).path.split('/')[-1]\n\n\n def _pyfile(self, link):\n return PyFile(self.core.files,\n link.fid,\n link.url,\n link.name,\n link.size,\n link.status,\n link.error,\n link.plugin,\n link.packageID,\n link.order)\n\n\n def downloadPreparing(self, pyfile):\n name = self._name(pyfile)\n\n if pyfile.statusname is _(\"unskipped\") or not name.endswith(\".rev\") or not \".part\" in name:\n return\n\n revtokeep = -1 if self.getConfig('mode') == \"Auto\" else self.getConfig('revtokeep')\n\n if revtokeep:\n status_list = (1, 4, 8, 9, 14) if revtokeep < 0 else (1, 3, 4, 8, 9, 14)\n pyname = re.compile(r'%s\\.part\\d+\\.rev$' % name.rsplit('.', 2)[0].replace('.', '\\.'))\n\n queued = [True for link in self.core.api.getPackageData(pyfile.package().id).links \\\n if link.status not in status_list and pyname.match(link.name)].count(True)\n\n if not queued or queued < revtokeep: #: keep one rev at least in auto mode\n return\n\n pyfile.setCustomStatus(\"SkipRev\", \"skipped\")\n\n if not hasattr(pyfile.plugin, \"_setup\"):\n # Work-around: inject status checker inside the preprocessing routine of the plugin\n pyfile.plugin._setup = pyfile.plugin.setup\n pyfile.plugin.setup = MethodType(self._setup, pyfile.plugin)\n\n\n def downloadFailed(self, pyfile):\n #: Check if pyfile is still \"failed\",\n # maybe might has been restarted in meantime\n if pyfile.status != 8 or pyfile.name.rsplit('.', 1)[-1].strip() not in (\"rar\", \"rev\"):\n return\n\n revtokeep = -1 if self.getConfig('mode') == \"Auto\" else self.getConfig('revtokeep')\n\n if not revtokeep:\n return\n\n pyname = re.compile(r'%s\\.part\\d+\\.rev$' % pyfile.name.rsplit('.', 2)[0].replace('.', '\\.'))\n\n for link in self.core.api.getPackageData(pyfile.package().id).links:\n if link.status is 4 and pyname.match(link.name):\n pylink = self._pyfile(link)\n\n if revtokeep > -1 or pyfile.name.endswith(\".rev\"):\n pylink.setStatus(\"queued\")\n else:\n pylink.setCustomStatus(_(\"unskipped\"), \"queued\")\n\n self.core.files.save()\n pylink.release()\n return\n", "path": "module/plugins/hooks/SkipRev.py"}]} | 1,566 | 272 |
gh_patches_debug_19087 | rasdani/github-patches | git_diff | cloudtools__troposphere-869 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Validation error in CodeCommit Trigger
It appears that the validation for the CodeCommit Trigger class does not currently allow the use of any intrinsic functions for the `Events` property, and only accepts a list of hard coded values.
https://github.com/cloudtools/troposphere/blob/45582eb1d21a6cc9cfa608f626d8acbf0317f37f/troposphere/codecommit.py#L18-L32
We are trying to allow for the dynamic selection of the values with a parameter, but encounter errors when attempting to use a `Ref`. A snippet of our trigger definition, and the error encountered is below:
```
repo_trigger1 = codecommit.Trigger(
Name = Ref(trigger_1_name),
CustomData = Ref(trigger_1_custom_data),
DestinationArn = Ref(trigger_1_destination_arn),
Branches = Ref(trigger_1_branches),
Events = Ref(trigger_1_events),
)
```
We are able to successfully generate the template when changing the `Events` assignment to:
```Events = ["all"],```
I believe we just need to check if the value is one of the Helper functions before iterating through the events. I will try to get a fix pushed up for review.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `troposphere/codecommit.py`
Content:
```
1 # Copyright (c) 2016, Mark Peek <[email protected]>
2 # All rights reserved.
3 #
4 # See LICENSE file for full license.
5
6 from . import AWSObject, AWSProperty
7
8
9 class Trigger(AWSProperty):
10 props = {
11 'Branches': ([basestring], False),
12 'CustomData': (basestring, False),
13 'DestinationArn': (basestring, False),
14 'Events': ([basestring], False),
15 'Name': (basestring, False),
16 }
17
18 def validate(self):
19 valid = [
20 'all',
21 'createReference',
22 'deleteReference',
23 'updateReference',
24 ]
25 events = self.properties.get('Events')
26 if events:
27 if 'all' in events and len(events) != 1:
28 raise ValueError('Trigger events: all must be used alone')
29 else:
30 for e in events:
31 if e not in valid:
32 raise ValueError('Trigger: invalid event %s' % e)
33
34
35 class Repository(AWSObject):
36 resource_type = "AWS::CodeCommit::Repository"
37
38 props = {
39 'RepositoryDescription': (basestring, False),
40 'RepositoryName': (basestring, True),
41 'Triggers': ([Trigger], False),
42 }
43
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/troposphere/codecommit.py b/troposphere/codecommit.py
--- a/troposphere/codecommit.py
+++ b/troposphere/codecommit.py
@@ -3,7 +3,7 @@
#
# See LICENSE file for full license.
-from . import AWSObject, AWSProperty
+from . import AWSHelperFn, AWSObject, AWSProperty
class Trigger(AWSProperty):
@@ -23,12 +23,12 @@
'updateReference',
]
events = self.properties.get('Events')
- if events:
+ if events and not isinstance(events, AWSHelperFn):
if 'all' in events and len(events) != 1:
raise ValueError('Trigger events: all must be used alone')
else:
for e in events:
- if e not in valid:
+ if e not in valid and not isinstance(e, AWSHelperFn):
raise ValueError('Trigger: invalid event %s' % e)
| {"golden_diff": "diff --git a/troposphere/codecommit.py b/troposphere/codecommit.py\n--- a/troposphere/codecommit.py\n+++ b/troposphere/codecommit.py\n@@ -3,7 +3,7 @@\n #\n # See LICENSE file for full license.\n \n-from . import AWSObject, AWSProperty\n+from . import AWSHelperFn, AWSObject, AWSProperty\n \n \n class Trigger(AWSProperty):\n@@ -23,12 +23,12 @@\n 'updateReference',\n ]\n events = self.properties.get('Events')\n- if events:\n+ if events and not isinstance(events, AWSHelperFn):\n if 'all' in events and len(events) != 1:\n raise ValueError('Trigger events: all must be used alone')\n else:\n for e in events:\n- if e not in valid:\n+ if e not in valid and not isinstance(e, AWSHelperFn):\n raise ValueError('Trigger: invalid event %s' % e)\n", "issue": "Validation error in CodeCommit Trigger\nIt appears that the validation for the CodeCommit Trigger class does not currently allow the use of any intrinsic functions for the `Events` property, and only accepts a list of hard coded values.\r\n\r\nhttps://github.com/cloudtools/troposphere/blob/45582eb1d21a6cc9cfa608f626d8acbf0317f37f/troposphere/codecommit.py#L18-L32\r\n\r\nWe are trying to allow for the dynamic selection of the values with a parameter, but encounter errors when attempting to use a `Ref`. A snippet of our trigger definition, and the error encountered is below:\r\n\r\n```\r\nrepo_trigger1 = codecommit.Trigger(\r\n Name = Ref(trigger_1_name),\r\n CustomData = Ref(trigger_1_custom_data),\r\n DestinationArn = Ref(trigger_1_destination_arn),\r\n Branches = Ref(trigger_1_branches),\r\n Events = Ref(trigger_1_events),\r\n )\r\n```\r\nWe are able to successfully generate the template when changing the `Events` assignment to:\r\n\r\n```Events = [\"all\"],```\r\n\r\nI believe we just need to check if the value is one of the Helper functions before iterating through the events. I will try to get a fix pushed up for review.\n", "before_files": [{"content": "# Copyright (c) 2016, Mark Peek <[email protected]>\n# All rights reserved.\n#\n# See LICENSE file for full license.\n\nfrom . import AWSObject, AWSProperty\n\n\nclass Trigger(AWSProperty):\n props = {\n 'Branches': ([basestring], False),\n 'CustomData': (basestring, False),\n 'DestinationArn': (basestring, False),\n 'Events': ([basestring], False),\n 'Name': (basestring, False),\n }\n\n def validate(self):\n valid = [\n 'all',\n 'createReference',\n 'deleteReference',\n 'updateReference',\n ]\n events = self.properties.get('Events')\n if events:\n if 'all' in events and len(events) != 1:\n raise ValueError('Trigger events: all must be used alone')\n else:\n for e in events:\n if e not in valid:\n raise ValueError('Trigger: invalid event %s' % e)\n\n\nclass Repository(AWSObject):\n resource_type = \"AWS::CodeCommit::Repository\"\n\n props = {\n 'RepositoryDescription': (basestring, False),\n 'RepositoryName': (basestring, True),\n 'Triggers': ([Trigger], False),\n }\n", "path": "troposphere/codecommit.py"}], "after_files": [{"content": "# Copyright (c) 2016, Mark Peek <[email protected]>\n# All rights reserved.\n#\n# See LICENSE file for full license.\n\nfrom . import AWSHelperFn, AWSObject, AWSProperty\n\n\nclass Trigger(AWSProperty):\n props = {\n 'Branches': ([basestring], False),\n 'CustomData': (basestring, False),\n 'DestinationArn': (basestring, False),\n 'Events': ([basestring], False),\n 'Name': (basestring, False),\n }\n\n def validate(self):\n valid = [\n 'all',\n 'createReference',\n 'deleteReference',\n 'updateReference',\n ]\n events = self.properties.get('Events')\n if events and not isinstance(events, AWSHelperFn):\n if 'all' in events and len(events) != 1:\n raise ValueError('Trigger events: all must be used alone')\n else:\n for e in events:\n if e not in valid and not isinstance(e, AWSHelperFn):\n raise ValueError('Trigger: invalid event %s' % e)\n\n\nclass Repository(AWSObject):\n resource_type = \"AWS::CodeCommit::Repository\"\n\n props = {\n 'RepositoryDescription': (basestring, False),\n 'RepositoryName': (basestring, True),\n 'Triggers': ([Trigger], False),\n }\n", "path": "troposphere/codecommit.py"}]} | 884 | 209 |
gh_patches_debug_6298 | rasdani/github-patches | git_diff | vispy__vispy-1389 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Camera API documentation missing
I could not find a list of available cameras in the docs:
http://vispy.org/scene.html?highlight=cameras#module-vispy.scene.cameras
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `vispy/scene/cameras/__init__.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 # Copyright (c) Vispy Development Team. All Rights Reserved.
3 # Distributed under the (new) BSD License. See LICENSE.txt for more info.
4 """
5 Cameras are responsible for determining which part of a scene is displayed
6 in a viewbox and for handling user input to change the view.
7
8 Several Camera subclasses are available to customize the projection of the
9 scene such as 3D perspective and orthographic projections, 2D
10 scale/translation, and other specialty cameras. A variety of user interaction
11 styles are available for each camera including arcball, turntable,
12 first-person, and pan/zoom interactions.
13
14 Internally, Cameras work by setting the transform of a SubScene object such
15 that a certain part of the scene is mapped to the bounding rectangle of the
16 ViewBox.
17 """
18 from ._base import make_camera # noqa
19 from .base_camera import BaseCamera # noqa
20 from .panzoom import PanZoomCamera # noqa
21 from .arcball import ArcballCamera # noqa
22 from .turntable import TurntableCamera # noqa
23 from .fly import FlyCamera # noqa
24 from .magnify import MagnifyCamera, Magnify1DCamera # noqa
25
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/vispy/scene/cameras/__init__.py b/vispy/scene/cameras/__init__.py
--- a/vispy/scene/cameras/__init__.py
+++ b/vispy/scene/cameras/__init__.py
@@ -15,6 +15,9 @@
that a certain part of the scene is mapped to the bounding rectangle of the
ViewBox.
"""
+__all__ = ['ArcballCamera', 'BaseCamera', 'FlyCamera', 'MagnifyCamera',
+ 'Magnify1DCamera', 'PanZoomCamera', 'TurntableCamera']
+
from ._base import make_camera # noqa
from .base_camera import BaseCamera # noqa
from .panzoom import PanZoomCamera # noqa
| {"golden_diff": "diff --git a/vispy/scene/cameras/__init__.py b/vispy/scene/cameras/__init__.py\n--- a/vispy/scene/cameras/__init__.py\n+++ b/vispy/scene/cameras/__init__.py\n@@ -15,6 +15,9 @@\n that a certain part of the scene is mapped to the bounding rectangle of the \n ViewBox.\n \"\"\"\n+__all__ = ['ArcballCamera', 'BaseCamera', 'FlyCamera', 'MagnifyCamera',\n+ 'Magnify1DCamera', 'PanZoomCamera', 'TurntableCamera']\n+\n from ._base import make_camera # noqa\n from .base_camera import BaseCamera # noqa\n from .panzoom import PanZoomCamera # noqa\n", "issue": "Camera API documentation missing\nI could not find a list of available cameras in the docs:\n\nhttp://vispy.org/scene.html?highlight=cameras#module-vispy.scene.cameras\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# Copyright (c) Vispy Development Team. All Rights Reserved.\n# Distributed under the (new) BSD License. See LICENSE.txt for more info.\n\"\"\"\nCameras are responsible for determining which part of a scene is displayed\nin a viewbox and for handling user input to change the view.\n\nSeveral Camera subclasses are available to customize the projection of the \nscene such as 3D perspective and orthographic projections, 2D \nscale/translation, and other specialty cameras. A variety of user interaction\nstyles are available for each camera including arcball, turntable, \nfirst-person, and pan/zoom interactions.\n\nInternally, Cameras work by setting the transform of a SubScene object such \nthat a certain part of the scene is mapped to the bounding rectangle of the \nViewBox.\n\"\"\"\nfrom ._base import make_camera # noqa\nfrom .base_camera import BaseCamera # noqa\nfrom .panzoom import PanZoomCamera # noqa\nfrom .arcball import ArcballCamera # noqa\nfrom .turntable import TurntableCamera # noqa\nfrom .fly import FlyCamera # noqa\nfrom .magnify import MagnifyCamera, Magnify1DCamera # noqa\n", "path": "vispy/scene/cameras/__init__.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n# Copyright (c) 2015, Vispy Development Team.\n# Distributed under the (new) BSD License. See LICENSE.txt for more info.\n\"\"\"\nCameras are responsible for determining which part of a scene is displayed\nin a viewbox and for handling user input to change the view.\n\nSeveral Camera subclasses are available to customize the projection of the \nscene such as 3D perspective and orthographic projections, 2D \nscale/translation, and other specialty cameras. A variety of user interaction\nstyles are available for each camera including arcball, turntable, \nfirst-person, and pan/zoom interactions.\n\nInternally, Cameras work by setting the transform of a SubScene object such \nthat a certain part of the scene is mapped to the bounding rectangle of the \nViewBox.\n\"\"\"\n__all__ = ['ArcballCamera', 'BaseCamera', 'FlyCamera', 'MagnifyCamera',\n 'Magnify1DCamera', 'PanZoomCamera', 'TurntableCamera']\n\nfrom ._base import make_camera # noqa\nfrom .base_camera import BaseCamera # noqa\nfrom .panzoom import PanZoomCamera # noqa\nfrom .arcball import ArcballCamera # noqa\nfrom .turntable import TurntableCamera # noqa\nfrom .fly import FlyCamera # noqa\nfrom .magnify import MagnifyCamera, Magnify1DCamera # noqa\n", "path": "vispy/scene/cameras/__init__.py"}]} | 602 | 168 |
gh_patches_debug_22907 | rasdani/github-patches | git_diff | fedora-infra__bodhi-974 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
The non-NULL comment text migration needs to be inserted before 4df1fcd59050
This migration was developed against the develop branch, but I had forgotten that there was already a migration in the develop branch that isn't on the 2.2 branch. We'll need to change both migrations so that the new one is inserted before the prior one:
```
[vagrant@localhost vagrant]$ alembic upgrade head
WARNING:fedmsg.crypto.x509:Crypto disabled ImportError('No module named M2Crypto',)
INFO [alembic.runtime.migration] Context impl PostgresqlImpl.
INFO [alembic.runtime.migration] Will assume transactional DDL.
/usr/lib/python2.7/site-packages/alembic/util/messaging.py:69: UserWarning: Revision 4df1fcd59050 referenced from 4df1fcd59050 -> 37f38ddc4c8d (head), Do not allow NULL values in the text column of the comments table. is not present
warnings.warn(msg)
Traceback (most recent call last):
File "/usr/bin/alembic", line 9, in <module>
load_entry_point('alembic==0.8.3', 'console_scripts', 'alembic')()
File "/usr/lib/python2.7/site-packages/alembic/config.py", line 450, in main
CommandLine(prog=prog).main(argv=argv)
File "/usr/lib/python2.7/site-packages/alembic/config.py", line 444, in main
self.run_cmd(cfg, options)
File "/usr/lib/python2.7/site-packages/alembic/config.py", line 427, in run_cmd
**dict((k, getattr(options, k)) for k in kwarg)
File "/usr/lib/python2.7/site-packages/alembic/command.py", line 174, in upgrade
script.run_env()
File "/usr/lib/python2.7/site-packages/alembic/script/base.py", line 397, in run_env
util.load_python_file(self.dir, 'env.py')
File "/usr/lib/python2.7/site-packages/alembic/util/pyfiles.py", line 81, in load_python_file
module = load_module_py(module_id, path)
File "/usr/lib/python2.7/site-packages/alembic/util/compat.py", line 79, in load_module_py
mod = imp.load_source(module_id, path, fp)
File "alembic/env.py", line 72, in <module>
run_migrations_online()
File "alembic/env.py", line 65, in run_migrations_online
context.run_migrations()
File "<string>", line 8, in run_migrations
File "/usr/lib/python2.7/site-packages/alembic/runtime/environment.py", line 797, in run_migrations
self.get_context().run_migrations(**kw)
File "/usr/lib/python2.7/site-packages/alembic/runtime/migration.py", line 303, in run_migrations
for step in self._migrations_fn(heads, self):
File "/usr/lib/python2.7/site-packages/alembic/command.py", line 163, in upgrade
return script._upgrade_revs(revision, rev)
File "/usr/lib/python2.7/site-packages/alembic/script/base.py", line 310, in _upgrade_revs
revs = list(revs)
File "/usr/lib/python2.7/site-packages/alembic/script/revision.py", line 610, in _iterate_revisions
requested_lowers = self.get_revisions(lower)
File "/usr/lib/python2.7/site-packages/alembic/script/revision.py", line 299, in get_revisions
return sum([self.get_revisions(id_elem) for id_elem in id_], ())
File "/usr/lib/python2.7/site-packages/alembic/script/revision.py", line 301, in get_revisions
resolved_id, branch_label = self._resolve_revision_number(id_)
File "/usr/lib/python2.7/site-packages/alembic/script/revision.py", line 423, in _resolve_revision_number
self._revision_map
File "/usr/lib/python2.7/site-packages/alembic/util/langhelpers.py", line 241, in __get__
obj.__dict__[self.__name__] = result = self.fget(obj)
File "/usr/lib/python2.7/site-packages/alembic/script/revision.py", line 151, in _revision_map
down_revision = map_[downrev]
KeyError: '4df1fcd59050'
```
It needs to be possible to vagrant up on the 2.2 branch and then advance to the develop branch.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `alembic/versions/4df1fcd59050_.py`
Content:
```
1 """Add the new pending_signing_tag column to the releases table.
2
3 Revision ID: 4df1fcd59050
4 Revises: 3c72757fa59e
5 Create Date: 2016-09-16 18:51:19.514301
6
7 """
8 from alembic import op
9 import sqlalchemy as sa
10
11
12 # revision identifiers, used by Alembic.
13 revision = '4df1fcd59050'
14 down_revision = '3c72757fa59e'
15
16
17 def upgrade():
18 op.add_column('releases',
19 sa.Column('pending_signing_tag', sa.UnicodeText(), server_default='',
20 nullable=False))
21 # We only used the server_default to stop the new column from being NULL. Let's now drop the
22 # server default.
23 op.alter_column('releases', 'pending_signing_tag', server_default=None)
24
25
26 def downgrade():
27 op.drop_column('releases', 'pending_signing_tag')
28
```
Path: `alembic/versions/37f38ddc4c8d_.py`
Content:
```
1 """Do not allow NULL values in the text column of the comments table.
2
3 Revision ID: 37f38ddc4c8d
4 Revises: 4df1fcd59050
5 Create Date: 2016-09-21 19:51:04.946521
6
7 """
8
9 from alembic import op
10 import sqlalchemy as sa
11
12
13 # revision identifiers, used by Alembic.
14 revision = '37f38ddc4c8d'
15 down_revision = '4df1fcd59050'
16
17
18 def upgrade():
19 """
20 We will need to set all existing NULL comments to "", then change the column to disallow NULL comments.
21 """
22 # Build a fake mini version of the comments table so we can form an UPDATE statement.
23 comments = sa.sql.table('comments', sa.sql.column('text', sa.UnicodeText))
24 # Set existing NULL comments to "".
25 op.execute(comments.update().where(comments.c.text==None).values({'text': op.inline_literal('')}))
26
27 # Disallow new NULL comments.
28 op.alter_column('comments', 'text', existing_type=sa.TEXT(), nullable=False)
29
30
31 def downgrade():
32 op.alter_column('comments', 'text', existing_type=sa.TEXT(), nullable=True)
33
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/alembic/versions/37f38ddc4c8d_.py b/alembic/versions/37f38ddc4c8d_.py
--- a/alembic/versions/37f38ddc4c8d_.py
+++ b/alembic/versions/37f38ddc4c8d_.py
@@ -1,7 +1,7 @@
"""Do not allow NULL values in the text column of the comments table.
Revision ID: 37f38ddc4c8d
-Revises: 4df1fcd59050
+Revises: 3c72757fa59e
Create Date: 2016-09-21 19:51:04.946521
"""
@@ -12,7 +12,7 @@
# revision identifiers, used by Alembic.
revision = '37f38ddc4c8d'
-down_revision = '4df1fcd59050'
+down_revision = '3c72757fa59e'
def upgrade():
diff --git a/alembic/versions/4df1fcd59050_.py b/alembic/versions/4df1fcd59050_.py
--- a/alembic/versions/4df1fcd59050_.py
+++ b/alembic/versions/4df1fcd59050_.py
@@ -1,7 +1,7 @@
"""Add the new pending_signing_tag column to the releases table.
Revision ID: 4df1fcd59050
-Revises: 3c72757fa59e
+Revises: 37f38ddc4c8d
Create Date: 2016-09-16 18:51:19.514301
"""
@@ -11,7 +11,7 @@
# revision identifiers, used by Alembic.
revision = '4df1fcd59050'
-down_revision = '3c72757fa59e'
+down_revision = '37f38ddc4c8d'
def upgrade():
| {"golden_diff": "diff --git a/alembic/versions/37f38ddc4c8d_.py b/alembic/versions/37f38ddc4c8d_.py\n--- a/alembic/versions/37f38ddc4c8d_.py\n+++ b/alembic/versions/37f38ddc4c8d_.py\n@@ -1,7 +1,7 @@\n \"\"\"Do not allow NULL values in the text column of the comments table.\n \n Revision ID: 37f38ddc4c8d\n-Revises: 4df1fcd59050\n+Revises: 3c72757fa59e\n Create Date: 2016-09-21 19:51:04.946521\n \n \"\"\"\n@@ -12,7 +12,7 @@\n \n # revision identifiers, used by Alembic.\n revision = '37f38ddc4c8d'\n-down_revision = '4df1fcd59050'\n+down_revision = '3c72757fa59e'\n \n \n def upgrade():\ndiff --git a/alembic/versions/4df1fcd59050_.py b/alembic/versions/4df1fcd59050_.py\n--- a/alembic/versions/4df1fcd59050_.py\n+++ b/alembic/versions/4df1fcd59050_.py\n@@ -1,7 +1,7 @@\n \"\"\"Add the new pending_signing_tag column to the releases table.\n \n Revision ID: 4df1fcd59050\n-Revises: 3c72757fa59e\n+Revises: 37f38ddc4c8d\n Create Date: 2016-09-16 18:51:19.514301\n \n \"\"\"\n@@ -11,7 +11,7 @@\n \n # revision identifiers, used by Alembic.\n revision = '4df1fcd59050'\n-down_revision = '3c72757fa59e'\n+down_revision = '37f38ddc4c8d'\n \n \n def upgrade():\n", "issue": "The non-NULL comment text migration needs to be inserted before 4df1fcd59050\nThis migration was developed against the develop branch, but I had forgotten that there was already a migration in the develop branch that isn't on the 2.2 branch. We'll need to change both migrations so that the new one is inserted before the prior one:\n\n```\n[vagrant@localhost vagrant]$ alembic upgrade head\nWARNING:fedmsg.crypto.x509:Crypto disabled ImportError('No module named M2Crypto',)\nINFO [alembic.runtime.migration] Context impl PostgresqlImpl.\nINFO [alembic.runtime.migration] Will assume transactional DDL.\n/usr/lib/python2.7/site-packages/alembic/util/messaging.py:69: UserWarning: Revision 4df1fcd59050 referenced from 4df1fcd59050 -> 37f38ddc4c8d (head), Do not allow NULL values in the text column of the comments table. is not present\n warnings.warn(msg)\nTraceback (most recent call last):\n File \"/usr/bin/alembic\", line 9, in <module>\n load_entry_point('alembic==0.8.3', 'console_scripts', 'alembic')()\n File \"/usr/lib/python2.7/site-packages/alembic/config.py\", line 450, in main\n CommandLine(prog=prog).main(argv=argv)\n File \"/usr/lib/python2.7/site-packages/alembic/config.py\", line 444, in main\n self.run_cmd(cfg, options)\n File \"/usr/lib/python2.7/site-packages/alembic/config.py\", line 427, in run_cmd\n **dict((k, getattr(options, k)) for k in kwarg)\n File \"/usr/lib/python2.7/site-packages/alembic/command.py\", line 174, in upgrade\n script.run_env()\n File \"/usr/lib/python2.7/site-packages/alembic/script/base.py\", line 397, in run_env\n util.load_python_file(self.dir, 'env.py')\n File \"/usr/lib/python2.7/site-packages/alembic/util/pyfiles.py\", line 81, in load_python_file\n module = load_module_py(module_id, path)\n File \"/usr/lib/python2.7/site-packages/alembic/util/compat.py\", line 79, in load_module_py\n mod = imp.load_source(module_id, path, fp)\n File \"alembic/env.py\", line 72, in <module>\n run_migrations_online()\n File \"alembic/env.py\", line 65, in run_migrations_online\n context.run_migrations()\n File \"<string>\", line 8, in run_migrations\n File \"/usr/lib/python2.7/site-packages/alembic/runtime/environment.py\", line 797, in run_migrations\n self.get_context().run_migrations(**kw)\n File \"/usr/lib/python2.7/site-packages/alembic/runtime/migration.py\", line 303, in run_migrations\n for step in self._migrations_fn(heads, self):\n File \"/usr/lib/python2.7/site-packages/alembic/command.py\", line 163, in upgrade\n return script._upgrade_revs(revision, rev)\n File \"/usr/lib/python2.7/site-packages/alembic/script/base.py\", line 310, in _upgrade_revs\n revs = list(revs)\n File \"/usr/lib/python2.7/site-packages/alembic/script/revision.py\", line 610, in _iterate_revisions\n requested_lowers = self.get_revisions(lower)\n File \"/usr/lib/python2.7/site-packages/alembic/script/revision.py\", line 299, in get_revisions\n return sum([self.get_revisions(id_elem) for id_elem in id_], ())\n File \"/usr/lib/python2.7/site-packages/alembic/script/revision.py\", line 301, in get_revisions\n resolved_id, branch_label = self._resolve_revision_number(id_)\n File \"/usr/lib/python2.7/site-packages/alembic/script/revision.py\", line 423, in _resolve_revision_number\n self._revision_map\n File \"/usr/lib/python2.7/site-packages/alembic/util/langhelpers.py\", line 241, in __get__\n obj.__dict__[self.__name__] = result = self.fget(obj)\n File \"/usr/lib/python2.7/site-packages/alembic/script/revision.py\", line 151, in _revision_map\n down_revision = map_[downrev]\nKeyError: '4df1fcd59050'\n```\n\nIt needs to be possible to vagrant up on the 2.2 branch and then advance to the develop branch.\n\n", "before_files": [{"content": "\"\"\"Add the new pending_signing_tag column to the releases table.\n\nRevision ID: 4df1fcd59050\nRevises: 3c72757fa59e\nCreate Date: 2016-09-16 18:51:19.514301\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = '4df1fcd59050'\ndown_revision = '3c72757fa59e'\n\n\ndef upgrade():\n op.add_column('releases',\n sa.Column('pending_signing_tag', sa.UnicodeText(), server_default='',\n nullable=False))\n # We only used the server_default to stop the new column from being NULL. Let's now drop the\n # server default.\n op.alter_column('releases', 'pending_signing_tag', server_default=None)\n\n\ndef downgrade():\n op.drop_column('releases', 'pending_signing_tag')\n", "path": "alembic/versions/4df1fcd59050_.py"}, {"content": "\"\"\"Do not allow NULL values in the text column of the comments table.\n\nRevision ID: 37f38ddc4c8d\nRevises: 4df1fcd59050\nCreate Date: 2016-09-21 19:51:04.946521\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = '37f38ddc4c8d'\ndown_revision = '4df1fcd59050'\n\n\ndef upgrade():\n \"\"\"\n We will need to set all existing NULL comments to \"\", then change the column to disallow NULL comments.\n \"\"\"\n # Build a fake mini version of the comments table so we can form an UPDATE statement.\n comments = sa.sql.table('comments', sa.sql.column('text', sa.UnicodeText))\n # Set existing NULL comments to \"\".\n op.execute(comments.update().where(comments.c.text==None).values({'text': op.inline_literal('')}))\n\n # Disallow new NULL comments.\n op.alter_column('comments', 'text', existing_type=sa.TEXT(), nullable=False)\n\n\ndef downgrade():\n op.alter_column('comments', 'text', existing_type=sa.TEXT(), nullable=True)\n", "path": "alembic/versions/37f38ddc4c8d_.py"}], "after_files": [{"content": "\"\"\"Add the new pending_signing_tag column to the releases table.\n\nRevision ID: 4df1fcd59050\nRevises: 37f38ddc4c8d\nCreate Date: 2016-09-16 18:51:19.514301\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = '4df1fcd59050'\ndown_revision = '37f38ddc4c8d'\n\n\ndef upgrade():\n op.add_column('releases',\n sa.Column('pending_signing_tag', sa.UnicodeText(), server_default='',\n nullable=False))\n # We only used the server_default to stop the new column from being NULL. Let's now drop the\n # server default.\n op.alter_column('releases', 'pending_signing_tag', server_default=None)\n\n\ndef downgrade():\n op.drop_column('releases', 'pending_signing_tag')\n", "path": "alembic/versions/4df1fcd59050_.py"}, {"content": "\"\"\"Do not allow NULL values in the text column of the comments table.\n\nRevision ID: 37f38ddc4c8d\nRevises: 3c72757fa59e\nCreate Date: 2016-09-21 19:51:04.946521\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = '37f38ddc4c8d'\ndown_revision = '3c72757fa59e'\n\n\ndef upgrade():\n \"\"\"\n We will need to set all existing NULL comments to \"\", then change the column to disallow NULL comments.\n \"\"\"\n # Build a fake mini version of the comments table so we can form an UPDATE statement.\n comments = sa.sql.table('comments', sa.sql.column('text', sa.UnicodeText))\n # Set existing NULL comments to \"\".\n op.execute(comments.update().where(comments.c.text==None).values({'text': op.inline_literal('')}))\n\n # Disallow new NULL comments.\n op.alter_column('comments', 'text', existing_type=sa.TEXT(), nullable=False)\n\n\ndef downgrade():\n op.alter_column('comments', 'text', existing_type=sa.TEXT(), nullable=True)\n", "path": "alembic/versions/37f38ddc4c8d_.py"}]} | 2,024 | 543 |
gh_patches_debug_34075 | rasdani/github-patches | git_diff | scikit-hep__awkward-2102 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
behaviors with decorated mixin methods cannot be pickled
### Version of Awkward Array
main
### Description and code to reproduce
The mixin machinery introduces a closure, which `pickle` can't serialise.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/awkward/behaviors/mixins.py`
Content:
```
1 # BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
2
3
4 import sys
5
6 import awkward as ak
7
8
9 def mixin_class(registry, name=None):
10 """
11 Args:
12 registry (dict): The destination behavior mapping registry. Typically,
13 this would be the global registry #ak.behavior, but one may wish
14 to register methods in an alternative way.
15 name (str): The name to assign to the behaviour class.
16
17 This decorator can be used to register a behavior mixin class.
18
19 Any inherited behaviors will automatically be made available to the decorated
20 class.
21
22 See the "Mixin decorators" section of #ak.behavior for further details.
23 """
24
25 def register(cls):
26 cls_name = cls.__name__
27 if name is None:
28 behavior_name = cls_name
29 else:
30 behavior_name = name
31
32 record = type(
33 cls_name + "Record",
34 (cls, ak.highlevel.Record),
35 {"__module__": cls.__module__},
36 )
37 setattr(sys.modules[cls.__module__], cls_name + "Record", record)
38 registry[behavior_name] = record
39 array = type(
40 cls_name + "Array",
41 (cls, ak.highlevel.Array),
42 {"__module__": cls.__module__},
43 )
44 setattr(sys.modules[cls.__module__], cls_name + "Array", array)
45 registry["*", behavior_name] = array
46 for basecls in cls.mro():
47 for method in basecls.__dict__.values():
48 if hasattr(method, "_awkward_mixin"):
49 ufunc, rhs, transpose = method._awkward_mixin
50 if rhs is None:
51 registry.setdefault((ufunc, behavior_name), method)
52 continue
53 for rhs_name in list(rhs) + [behavior_name]:
54 registry.setdefault((ufunc, behavior_name, rhs_name), method)
55 if transpose is not None and rhs_name != behavior_name:
56 registry.setdefault(
57 (ufunc, rhs_name, behavior_name), transpose
58 )
59 if basecls.__name__ in rhs:
60 rhs.add(behavior_name)
61 return cls
62
63 return register
64
65
66 def mixin_class_method(ufunc, rhs=None, *, transpose=True):
67 """
68 Args:
69 ufunc (numpy.ufunc): A universal function (or NEP18 callable) that is
70 hooked in Awkward Array, i.e. it can be the first argument of a behavior.
71 rhs (Set[type] or None): Set of right-hand side argument types, optional
72 if wrapping a unary function. The left-hand side is expected to
73 always be `self` of the parent class.
74 transpose (bool): If true, automatically create a transpose signature
75 (only makes sense for binary ufuncs).
76
77 This decorator can be used to register a mixin class method.
78
79 Using this decorator ensures that derived classes that are declared with the
80 #ak.mixin_class decorator will also have the behaviors that this class has.
81 """
82
83 def register(method):
84 if not isinstance(rhs, (set, type(None))):
85 raise ak._errors.wrap_error(
86 ValueError("expected a set of right-hand-side argument types")
87 )
88 if transpose and rhs is not None:
89
90 def transposed(left, right):
91 return method(right, left)
92
93 # make a copy of rhs, we will edit it later
94 method._awkward_mixin = (ufunc, set(rhs), transposed)
95 else:
96 method._awkward_mixin = (ufunc, rhs, None)
97 return method
98
99 return register
100
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/awkward/behaviors/mixins.py b/src/awkward/behaviors/mixins.py
--- a/src/awkward/behaviors/mixins.py
+++ b/src/awkward/behaviors/mixins.py
@@ -1,6 +1,7 @@
# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
+import functools
import sys
import awkward as ak
@@ -63,6 +64,10 @@
return register
+def _call_transposed(func, left, right):
+ return func(right, left)
+
+
def mixin_class_method(ufunc, rhs=None, *, transpose=True):
"""
Args:
@@ -70,7 +75,8 @@
hooked in Awkward Array, i.e. it can be the first argument of a behavior.
rhs (Set[type] or None): Set of right-hand side argument types, optional
if wrapping a unary function. The left-hand side is expected to
- always be `self` of the parent class.
+ always be `self` of the parent class. The current class is implicitly
+ included in this set.
transpose (bool): If true, automatically create a transpose signature
(only makes sense for binary ufuncs).
@@ -86,12 +92,13 @@
ValueError("expected a set of right-hand-side argument types")
)
if transpose and rhs is not None:
-
- def transposed(left, right):
- return method(right, left)
-
# make a copy of rhs, we will edit it later
- method._awkward_mixin = (ufunc, set(rhs), transposed)
+ # use partial & a module-scoped function so that this is pickleable
+ method._awkward_mixin = (
+ ufunc,
+ set(rhs),
+ functools.partial(_call_transposed, method),
+ )
else:
method._awkward_mixin = (ufunc, rhs, None)
return method
| {"golden_diff": "diff --git a/src/awkward/behaviors/mixins.py b/src/awkward/behaviors/mixins.py\n--- a/src/awkward/behaviors/mixins.py\n+++ b/src/awkward/behaviors/mixins.py\n@@ -1,6 +1,7 @@\n # BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE\n \n \n+import functools\n import sys\n \n import awkward as ak\n@@ -63,6 +64,10 @@\n return register\n \n \n+def _call_transposed(func, left, right):\n+ return func(right, left)\n+\n+\n def mixin_class_method(ufunc, rhs=None, *, transpose=True):\n \"\"\"\n Args:\n@@ -70,7 +75,8 @@\n hooked in Awkward Array, i.e. it can be the first argument of a behavior.\n rhs (Set[type] or None): Set of right-hand side argument types, optional\n if wrapping a unary function. The left-hand side is expected to\n- always be `self` of the parent class.\n+ always be `self` of the parent class. The current class is implicitly\n+ included in this set.\n transpose (bool): If true, automatically create a transpose signature\n (only makes sense for binary ufuncs).\n \n@@ -86,12 +92,13 @@\n ValueError(\"expected a set of right-hand-side argument types\")\n )\n if transpose and rhs is not None:\n-\n- def transposed(left, right):\n- return method(right, left)\n-\n # make a copy of rhs, we will edit it later\n- method._awkward_mixin = (ufunc, set(rhs), transposed)\n+ # use partial & a module-scoped function so that this is pickleable\n+ method._awkward_mixin = (\n+ ufunc,\n+ set(rhs),\n+ functools.partial(_call_transposed, method),\n+ )\n else:\n method._awkward_mixin = (ufunc, rhs, None)\n return method\n", "issue": "behaviors with decorated mixin methods cannot be pickled\n### Version of Awkward Array\n\nmain\n\n### Description and code to reproduce\n\nThe mixin machinery introduces a closure, which `pickle` can't serialise.\n", "before_files": [{"content": "# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE\n\n\nimport sys\n\nimport awkward as ak\n\n\ndef mixin_class(registry, name=None):\n \"\"\"\n Args:\n registry (dict): The destination behavior mapping registry. Typically,\n this would be the global registry #ak.behavior, but one may wish\n to register methods in an alternative way.\n name (str): The name to assign to the behaviour class.\n\n This decorator can be used to register a behavior mixin class.\n\n Any inherited behaviors will automatically be made available to the decorated\n class.\n\n See the \"Mixin decorators\" section of #ak.behavior for further details.\n \"\"\"\n\n def register(cls):\n cls_name = cls.__name__\n if name is None:\n behavior_name = cls_name\n else:\n behavior_name = name\n\n record = type(\n cls_name + \"Record\",\n (cls, ak.highlevel.Record),\n {\"__module__\": cls.__module__},\n )\n setattr(sys.modules[cls.__module__], cls_name + \"Record\", record)\n registry[behavior_name] = record\n array = type(\n cls_name + \"Array\",\n (cls, ak.highlevel.Array),\n {\"__module__\": cls.__module__},\n )\n setattr(sys.modules[cls.__module__], cls_name + \"Array\", array)\n registry[\"*\", behavior_name] = array\n for basecls in cls.mro():\n for method in basecls.__dict__.values():\n if hasattr(method, \"_awkward_mixin\"):\n ufunc, rhs, transpose = method._awkward_mixin\n if rhs is None:\n registry.setdefault((ufunc, behavior_name), method)\n continue\n for rhs_name in list(rhs) + [behavior_name]:\n registry.setdefault((ufunc, behavior_name, rhs_name), method)\n if transpose is not None and rhs_name != behavior_name:\n registry.setdefault(\n (ufunc, rhs_name, behavior_name), transpose\n )\n if basecls.__name__ in rhs:\n rhs.add(behavior_name)\n return cls\n\n return register\n\n\ndef mixin_class_method(ufunc, rhs=None, *, transpose=True):\n \"\"\"\n Args:\n ufunc (numpy.ufunc): A universal function (or NEP18 callable) that is\n hooked in Awkward Array, i.e. it can be the first argument of a behavior.\n rhs (Set[type] or None): Set of right-hand side argument types, optional\n if wrapping a unary function. The left-hand side is expected to\n always be `self` of the parent class.\n transpose (bool): If true, automatically create a transpose signature\n (only makes sense for binary ufuncs).\n\n This decorator can be used to register a mixin class method.\n\n Using this decorator ensures that derived classes that are declared with the\n #ak.mixin_class decorator will also have the behaviors that this class has.\n \"\"\"\n\n def register(method):\n if not isinstance(rhs, (set, type(None))):\n raise ak._errors.wrap_error(\n ValueError(\"expected a set of right-hand-side argument types\")\n )\n if transpose and rhs is not None:\n\n def transposed(left, right):\n return method(right, left)\n\n # make a copy of rhs, we will edit it later\n method._awkward_mixin = (ufunc, set(rhs), transposed)\n else:\n method._awkward_mixin = (ufunc, rhs, None)\n return method\n\n return register\n", "path": "src/awkward/behaviors/mixins.py"}], "after_files": [{"content": "# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE\n\n\nimport functools\nimport sys\n\nimport awkward as ak\n\n\ndef mixin_class(registry, name=None):\n \"\"\"\n Args:\n registry (dict): The destination behavior mapping registry. Typically,\n this would be the global registry #ak.behavior, but one may wish\n to register methods in an alternative way.\n name (str): The name to assign to the behaviour class.\n\n This decorator can be used to register a behavior mixin class.\n\n Any inherited behaviors will automatically be made available to the decorated\n class.\n\n See the \"Mixin decorators\" section of #ak.behavior for further details.\n \"\"\"\n\n def register(cls):\n cls_name = cls.__name__\n if name is None:\n behavior_name = cls_name\n else:\n behavior_name = name\n\n record = type(\n cls_name + \"Record\",\n (cls, ak.highlevel.Record),\n {\"__module__\": cls.__module__},\n )\n setattr(sys.modules[cls.__module__], cls_name + \"Record\", record)\n registry[behavior_name] = record\n array = type(\n cls_name + \"Array\",\n (cls, ak.highlevel.Array),\n {\"__module__\": cls.__module__},\n )\n setattr(sys.modules[cls.__module__], cls_name + \"Array\", array)\n registry[\"*\", behavior_name] = array\n for basecls in cls.mro():\n for method in basecls.__dict__.values():\n if hasattr(method, \"_awkward_mixin\"):\n ufunc, rhs, transpose = method._awkward_mixin\n if rhs is None:\n registry.setdefault((ufunc, behavior_name), method)\n continue\n for rhs_name in list(rhs) + [behavior_name]:\n registry.setdefault((ufunc, behavior_name, rhs_name), method)\n if transpose is not None and rhs_name != behavior_name:\n registry.setdefault(\n (ufunc, rhs_name, behavior_name), transpose\n )\n if basecls.__name__ in rhs:\n rhs.add(behavior_name)\n return cls\n\n return register\n\n\ndef _call_transposed(func, left, right):\n return func(right, left)\n\n\ndef mixin_class_method(ufunc, rhs=None, *, transpose=True):\n \"\"\"\n Args:\n ufunc (numpy.ufunc): A universal function (or NEP18 callable) that is\n hooked in Awkward Array, i.e. it can be the first argument of a behavior.\n rhs (Set[type] or None): Set of right-hand side argument types, optional\n if wrapping a unary function. The left-hand side is expected to\n always be `self` of the parent class. The current class is implicitly\n included in this set.\n transpose (bool): If true, automatically create a transpose signature\n (only makes sense for binary ufuncs).\n\n This decorator can be used to register a mixin class method.\n\n Using this decorator ensures that derived classes that are declared with the\n #ak.mixin_class decorator will also have the behaviors that this class has.\n \"\"\"\n\n def register(method):\n if not isinstance(rhs, (set, type(None))):\n raise ak._errors.wrap_error(\n ValueError(\"expected a set of right-hand-side argument types\")\n )\n if transpose and rhs is not None:\n # make a copy of rhs, we will edit it later\n # use partial & a module-scoped function so that this is pickleable\n method._awkward_mixin = (\n ufunc,\n set(rhs),\n functools.partial(_call_transposed, method),\n )\n else:\n method._awkward_mixin = (ufunc, rhs, None)\n return method\n\n return register\n", "path": "src/awkward/behaviors/mixins.py"}]} | 1,280 | 460 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.