problem_id
stringlengths 18
22
| source
stringclasses 1
value | task_type
stringclasses 1
value | in_source_id
stringlengths 13
58
| prompt
stringlengths 1.1k
25.4k
| golden_diff
stringlengths 145
5.13k
| verification_info
stringlengths 582
39.1k
| num_tokens
int64 271
4.1k
| num_tokens_diff
int64 47
1.02k
|
---|---|---|---|---|---|---|---|---|
gh_patches_debug_1422
|
rasdani/github-patches
|
git_diff
|
jazzband__pip-tools-808
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
pip-compile replaces password in URL with ****
I am using Python 3.6.8, pip-tools 19.1 and setuptools 41.0.1 in an virtual environment.
Since some days, when i compile my requirements.in file which includes a package format with an git repo url like:
```
...
-e git+http://user:[email protected]/scm/path/git-repo-name.git#egg=packagename
google-api-python-client
...
```
the password string in the url is replaces with "***":
```
...
-e git+http://user:****@myhost.com/scm/path/git-repo-name.git#egg=packagename
google-api-python-client==1.7.8
...
```
Is there a way to prevent this behaviour?
Regards,
Josef
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `piptools/utils.py`
Content:
```
1 # coding: utf-8
2 from __future__ import absolute_import, division, print_function, unicode_literals
3
4 import sys
5 from collections import OrderedDict
6 from itertools import chain, groupby
7
8 from ._compat import install_req_from_line
9 from .click import style
10
11 UNSAFE_PACKAGES = {"setuptools", "distribute", "pip"}
12
13
14 def key_from_ireq(ireq):
15 """Get a standardized key for an InstallRequirement."""
16 if ireq.req is None and ireq.link is not None:
17 return str(ireq.link)
18 else:
19 return key_from_req(ireq.req)
20
21
22 def key_from_req(req):
23 """Get an all-lowercase version of the requirement's name."""
24 if hasattr(req, "key"):
25 # from pkg_resources, such as installed dists for pip-sync
26 key = req.key
27 else:
28 # from packaging, such as install requirements from requirements.txt
29 key = req.name
30
31 key = key.replace("_", "-").lower()
32 return key
33
34
35 def comment(text):
36 return style(text, fg="green")
37
38
39 def make_install_requirement(name, version, extras, constraint=False):
40 # If no extras are specified, the extras string is blank
41 extras_string = ""
42 if extras:
43 # Sort extras for stability
44 extras_string = "[{}]".format(",".join(sorted(extras)))
45
46 return install_req_from_line(
47 str("{}{}=={}".format(name, extras_string, version)), constraint=constraint
48 )
49
50
51 def format_requirement(ireq, marker=None, hashes=None):
52 """
53 Generic formatter for pretty printing InstallRequirements to the terminal
54 in a less verbose way than using its `__str__` method.
55 """
56 if ireq.editable:
57 line = "-e {}".format(ireq.link)
58 else:
59 line = str(ireq.req).lower()
60
61 if marker:
62 line = "{} ; {}".format(line, marker)
63
64 if hashes:
65 for hash_ in sorted(hashes):
66 line += " \\\n --hash={}".format(hash_)
67
68 return line
69
70
71 def format_specifier(ireq):
72 """
73 Generic formatter for pretty printing the specifier part of
74 InstallRequirements to the terminal.
75 """
76 # TODO: Ideally, this is carried over to the pip library itself
77 specs = ireq.specifier._specs if ireq.req is not None else []
78 specs = sorted(specs, key=lambda x: x._spec[1])
79 return ",".join(str(s) for s in specs) or "<any>"
80
81
82 def is_pinned_requirement(ireq):
83 """
84 Returns whether an InstallRequirement is a "pinned" requirement.
85
86 An InstallRequirement is considered pinned if:
87
88 - Is not editable
89 - It has exactly one specifier
90 - That specifier is "=="
91 - The version does not contain a wildcard
92
93 Examples:
94 django==1.8 # pinned
95 django>1.8 # NOT pinned
96 django~=1.8 # NOT pinned
97 django==1.* # NOT pinned
98 """
99 if ireq.editable:
100 return False
101
102 if len(ireq.specifier._specs) != 1:
103 return False
104
105 op, version = next(iter(ireq.specifier._specs))._spec
106 return (op == "==" or op == "===") and not version.endswith(".*")
107
108
109 def as_tuple(ireq):
110 """
111 Pulls out the (name: str, version:str, extras:(str)) tuple from
112 the pinned InstallRequirement.
113 """
114 if not is_pinned_requirement(ireq):
115 raise TypeError("Expected a pinned InstallRequirement, got {}".format(ireq))
116
117 name = key_from_req(ireq.req)
118 version = next(iter(ireq.specifier._specs))._spec[1]
119 extras = tuple(sorted(ireq.extras))
120 return name, version, extras
121
122
123 def full_groupby(iterable, key=None):
124 """Like groupby(), but sorts the input on the group key first."""
125 return groupby(sorted(iterable, key=key), key=key)
126
127
128 def flat_map(fn, collection):
129 """Map a function over a collection and flatten the result by one-level"""
130 return chain.from_iterable(map(fn, collection))
131
132
133 def lookup_table(values, key=None, keyval=None, unique=False, use_lists=False):
134 """
135 Builds a dict-based lookup table (index) elegantly.
136
137 Supports building normal and unique lookup tables. For example:
138
139 >>> assert lookup_table(
140 ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0]) == {
141 ... 'b': {'bar', 'baz'},
142 ... 'f': {'foo'},
143 ... 'q': {'quux', 'qux'}
144 ... }
145
146 For key functions that uniquely identify values, set unique=True:
147
148 >>> assert lookup_table(
149 ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0],
150 ... unique=True) == {
151 ... 'b': 'baz',
152 ... 'f': 'foo',
153 ... 'q': 'quux'
154 ... }
155
156 For the values represented as lists, set use_lists=True:
157
158 >>> assert lookup_table(
159 ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0],
160 ... use_lists=True) == {
161 ... 'b': ['bar', 'baz'],
162 ... 'f': ['foo'],
163 ... 'q': ['qux', 'quux']
164 ... }
165
166 The values of the resulting lookup table will be values, not sets.
167
168 For extra power, you can even change the values while building up the LUT.
169 To do so, use the `keyval` function instead of the `key` arg:
170
171 >>> assert lookup_table(
172 ... ['foo', 'bar', 'baz', 'qux', 'quux'],
173 ... keyval=lambda s: (s[0], s[1:])) == {
174 ... 'b': {'ar', 'az'},
175 ... 'f': {'oo'},
176 ... 'q': {'uux', 'ux'}
177 ... }
178
179 """
180 if keyval is None:
181 if key is None:
182
183 def keyval(v):
184 return v
185
186 else:
187
188 def keyval(v):
189 return (key(v), v)
190
191 if unique:
192 return dict(keyval(v) for v in values)
193
194 lut = {}
195 for value in values:
196 k, v = keyval(value)
197 try:
198 s = lut[k]
199 except KeyError:
200 if use_lists:
201 s = lut[k] = list()
202 else:
203 s = lut[k] = set()
204 if use_lists:
205 s.append(v)
206 else:
207 s.add(v)
208 return dict(lut)
209
210
211 def dedup(iterable):
212 """Deduplicate an iterable object like iter(set(iterable)) but
213 order-reserved.
214 """
215 return iter(OrderedDict.fromkeys(iterable))
216
217
218 def name_from_req(req):
219 """Get the name of the requirement"""
220 if hasattr(req, "project_name"):
221 # from pkg_resources, such as installed dists for pip-sync
222 return req.project_name
223 else:
224 # from packaging, such as install requirements from requirements.txt
225 return req.name
226
227
228 def fs_str(string):
229 """
230 Convert given string to a correctly encoded filesystem string.
231
232 On Python 2, if the input string is unicode, converts it to bytes
233 encoded with the filesystem encoding.
234
235 On Python 3 returns the string as is, since Python 3 uses unicode
236 paths and the input string shouldn't be bytes.
237
238 :type string: str|unicode
239 :rtype: str
240 """
241 if isinstance(string, str):
242 return string
243 if isinstance(string, bytes):
244 raise AssertionError
245 return string.encode(_fs_encoding)
246
247
248 _fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
249
250
251 def get_hashes_from_ireq(ireq):
252 """
253 Given an InstallRequirement, return a list of string hashes in
254 the format "{algorithm}:{hash}". Return an empty list if there are no hashes
255 in the requirement options.
256 """
257 result = []
258 ireq_hashes = ireq.options.get("hashes", {})
259 for algorithm, hexdigests in ireq_hashes.items():
260 for hash_ in hexdigests:
261 result.append("{}:{}".format(algorithm, hash_))
262 return result
263
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/piptools/utils.py b/piptools/utils.py
--- a/piptools/utils.py
+++ b/piptools/utils.py
@@ -54,7 +54,7 @@
in a less verbose way than using its `__str__` method.
"""
if ireq.editable:
- line = "-e {}".format(ireq.link)
+ line = "-e {}".format(ireq.link.url)
else:
line = str(ireq.req).lower()
|
{"golden_diff": "diff --git a/piptools/utils.py b/piptools/utils.py\n--- a/piptools/utils.py\n+++ b/piptools/utils.py\n@@ -54,7 +54,7 @@\n in a less verbose way than using its `__str__` method.\n \"\"\"\n if ireq.editable:\n- line = \"-e {}\".format(ireq.link)\n+ line = \"-e {}\".format(ireq.link.url)\n else:\n line = str(ireq.req).lower()\n", "issue": "pip-compile replaces password in URL with ****\nI am using Python 3.6.8, pip-tools 19.1 and setuptools 41.0.1 in an virtual environment.\r\n\r\nSince some days, when i compile my requirements.in file which includes a package format with an git repo url like:\r\n\r\n```\r\n...\r\n-e git+http://user:[email protected]/scm/path/git-repo-name.git#egg=packagename\r\ngoogle-api-python-client\r\n...\r\n```\r\n\r\nthe password string in the url is replaces with \"***\":\r\n\r\n```\r\n...\r\n-e git+http://user:****@myhost.com/scm/path/git-repo-name.git#egg=packagename\r\ngoogle-api-python-client==1.7.8\r\n...\r\n```\r\n\r\nIs there a way to prevent this behaviour?\r\n\r\nRegards,\r\nJosef\r\n\r\n\r\n\r\n\n", "before_files": [{"content": "# coding: utf-8\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport sys\nfrom collections import OrderedDict\nfrom itertools import chain, groupby\n\nfrom ._compat import install_req_from_line\nfrom .click import style\n\nUNSAFE_PACKAGES = {\"setuptools\", \"distribute\", \"pip\"}\n\n\ndef key_from_ireq(ireq):\n \"\"\"Get a standardized key for an InstallRequirement.\"\"\"\n if ireq.req is None and ireq.link is not None:\n return str(ireq.link)\n else:\n return key_from_req(ireq.req)\n\n\ndef key_from_req(req):\n \"\"\"Get an all-lowercase version of the requirement's name.\"\"\"\n if hasattr(req, \"key\"):\n # from pkg_resources, such as installed dists for pip-sync\n key = req.key\n else:\n # from packaging, such as install requirements from requirements.txt\n key = req.name\n\n key = key.replace(\"_\", \"-\").lower()\n return key\n\n\ndef comment(text):\n return style(text, fg=\"green\")\n\n\ndef make_install_requirement(name, version, extras, constraint=False):\n # If no extras are specified, the extras string is blank\n extras_string = \"\"\n if extras:\n # Sort extras for stability\n extras_string = \"[{}]\".format(\",\".join(sorted(extras)))\n\n return install_req_from_line(\n str(\"{}{}=={}\".format(name, extras_string, version)), constraint=constraint\n )\n\n\ndef format_requirement(ireq, marker=None, hashes=None):\n \"\"\"\n Generic formatter for pretty printing InstallRequirements to the terminal\n in a less verbose way than using its `__str__` method.\n \"\"\"\n if ireq.editable:\n line = \"-e {}\".format(ireq.link)\n else:\n line = str(ireq.req).lower()\n\n if marker:\n line = \"{} ; {}\".format(line, marker)\n\n if hashes:\n for hash_ in sorted(hashes):\n line += \" \\\\\\n --hash={}\".format(hash_)\n\n return line\n\n\ndef format_specifier(ireq):\n \"\"\"\n Generic formatter for pretty printing the specifier part of\n InstallRequirements to the terminal.\n \"\"\"\n # TODO: Ideally, this is carried over to the pip library itself\n specs = ireq.specifier._specs if ireq.req is not None else []\n specs = sorted(specs, key=lambda x: x._spec[1])\n return \",\".join(str(s) for s in specs) or \"<any>\"\n\n\ndef is_pinned_requirement(ireq):\n \"\"\"\n Returns whether an InstallRequirement is a \"pinned\" requirement.\n\n An InstallRequirement is considered pinned if:\n\n - Is not editable\n - It has exactly one specifier\n - That specifier is \"==\"\n - The version does not contain a wildcard\n\n Examples:\n django==1.8 # pinned\n django>1.8 # NOT pinned\n django~=1.8 # NOT pinned\n django==1.* # NOT pinned\n \"\"\"\n if ireq.editable:\n return False\n\n if len(ireq.specifier._specs) != 1:\n return False\n\n op, version = next(iter(ireq.specifier._specs))._spec\n return (op == \"==\" or op == \"===\") and not version.endswith(\".*\")\n\n\ndef as_tuple(ireq):\n \"\"\"\n Pulls out the (name: str, version:str, extras:(str)) tuple from\n the pinned InstallRequirement.\n \"\"\"\n if not is_pinned_requirement(ireq):\n raise TypeError(\"Expected a pinned InstallRequirement, got {}\".format(ireq))\n\n name = key_from_req(ireq.req)\n version = next(iter(ireq.specifier._specs))._spec[1]\n extras = tuple(sorted(ireq.extras))\n return name, version, extras\n\n\ndef full_groupby(iterable, key=None):\n \"\"\"Like groupby(), but sorts the input on the group key first.\"\"\"\n return groupby(sorted(iterable, key=key), key=key)\n\n\ndef flat_map(fn, collection):\n \"\"\"Map a function over a collection and flatten the result by one-level\"\"\"\n return chain.from_iterable(map(fn, collection))\n\n\ndef lookup_table(values, key=None, keyval=None, unique=False, use_lists=False):\n \"\"\"\n Builds a dict-based lookup table (index) elegantly.\n\n Supports building normal and unique lookup tables. For example:\n\n >>> assert lookup_table(\n ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0]) == {\n ... 'b': {'bar', 'baz'},\n ... 'f': {'foo'},\n ... 'q': {'quux', 'qux'}\n ... }\n\n For key functions that uniquely identify values, set unique=True:\n\n >>> assert lookup_table(\n ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0],\n ... unique=True) == {\n ... 'b': 'baz',\n ... 'f': 'foo',\n ... 'q': 'quux'\n ... }\n\n For the values represented as lists, set use_lists=True:\n\n >>> assert lookup_table(\n ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0],\n ... use_lists=True) == {\n ... 'b': ['bar', 'baz'],\n ... 'f': ['foo'],\n ... 'q': ['qux', 'quux']\n ... }\n\n The values of the resulting lookup table will be values, not sets.\n\n For extra power, you can even change the values while building up the LUT.\n To do so, use the `keyval` function instead of the `key` arg:\n\n >>> assert lookup_table(\n ... ['foo', 'bar', 'baz', 'qux', 'quux'],\n ... keyval=lambda s: (s[0], s[1:])) == {\n ... 'b': {'ar', 'az'},\n ... 'f': {'oo'},\n ... 'q': {'uux', 'ux'}\n ... }\n\n \"\"\"\n if keyval is None:\n if key is None:\n\n def keyval(v):\n return v\n\n else:\n\n def keyval(v):\n return (key(v), v)\n\n if unique:\n return dict(keyval(v) for v in values)\n\n lut = {}\n for value in values:\n k, v = keyval(value)\n try:\n s = lut[k]\n except KeyError:\n if use_lists:\n s = lut[k] = list()\n else:\n s = lut[k] = set()\n if use_lists:\n s.append(v)\n else:\n s.add(v)\n return dict(lut)\n\n\ndef dedup(iterable):\n \"\"\"Deduplicate an iterable object like iter(set(iterable)) but\n order-reserved.\n \"\"\"\n return iter(OrderedDict.fromkeys(iterable))\n\n\ndef name_from_req(req):\n \"\"\"Get the name of the requirement\"\"\"\n if hasattr(req, \"project_name\"):\n # from pkg_resources, such as installed dists for pip-sync\n return req.project_name\n else:\n # from packaging, such as install requirements from requirements.txt\n return req.name\n\n\ndef fs_str(string):\n \"\"\"\n Convert given string to a correctly encoded filesystem string.\n\n On Python 2, if the input string is unicode, converts it to bytes\n encoded with the filesystem encoding.\n\n On Python 3 returns the string as is, since Python 3 uses unicode\n paths and the input string shouldn't be bytes.\n\n :type string: str|unicode\n :rtype: str\n \"\"\"\n if isinstance(string, str):\n return string\n if isinstance(string, bytes):\n raise AssertionError\n return string.encode(_fs_encoding)\n\n\n_fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()\n\n\ndef get_hashes_from_ireq(ireq):\n \"\"\"\n Given an InstallRequirement, return a list of string hashes in\n the format \"{algorithm}:{hash}\". Return an empty list if there are no hashes\n in the requirement options.\n \"\"\"\n result = []\n ireq_hashes = ireq.options.get(\"hashes\", {})\n for algorithm, hexdigests in ireq_hashes.items():\n for hash_ in hexdigests:\n result.append(\"{}:{}\".format(algorithm, hash_))\n return result\n", "path": "piptools/utils.py"}], "after_files": [{"content": "# coding: utf-8\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport sys\nfrom collections import OrderedDict\nfrom itertools import chain, groupby\n\nfrom ._compat import install_req_from_line\nfrom .click import style\n\nUNSAFE_PACKAGES = {\"setuptools\", \"distribute\", \"pip\"}\n\n\ndef key_from_ireq(ireq):\n \"\"\"Get a standardized key for an InstallRequirement.\"\"\"\n if ireq.req is None and ireq.link is not None:\n return str(ireq.link)\n else:\n return key_from_req(ireq.req)\n\n\ndef key_from_req(req):\n \"\"\"Get an all-lowercase version of the requirement's name.\"\"\"\n if hasattr(req, \"key\"):\n # from pkg_resources, such as installed dists for pip-sync\n key = req.key\n else:\n # from packaging, such as install requirements from requirements.txt\n key = req.name\n\n key = key.replace(\"_\", \"-\").lower()\n return key\n\n\ndef comment(text):\n return style(text, fg=\"green\")\n\n\ndef make_install_requirement(name, version, extras, constraint=False):\n # If no extras are specified, the extras string is blank\n extras_string = \"\"\n if extras:\n # Sort extras for stability\n extras_string = \"[{}]\".format(\",\".join(sorted(extras)))\n\n return install_req_from_line(\n str(\"{}{}=={}\".format(name, extras_string, version)), constraint=constraint\n )\n\n\ndef format_requirement(ireq, marker=None, hashes=None):\n \"\"\"\n Generic formatter for pretty printing InstallRequirements to the terminal\n in a less verbose way than using its `__str__` method.\n \"\"\"\n if ireq.editable:\n line = \"-e {}\".format(ireq.link.url)\n else:\n line = str(ireq.req).lower()\n\n if marker:\n line = \"{} ; {}\".format(line, marker)\n\n if hashes:\n for hash_ in sorted(hashes):\n line += \" \\\\\\n --hash={}\".format(hash_)\n\n return line\n\n\ndef format_specifier(ireq):\n \"\"\"\n Generic formatter for pretty printing the specifier part of\n InstallRequirements to the terminal.\n \"\"\"\n # TODO: Ideally, this is carried over to the pip library itself\n specs = ireq.specifier._specs if ireq.req is not None else []\n specs = sorted(specs, key=lambda x: x._spec[1])\n return \",\".join(str(s) for s in specs) or \"<any>\"\n\n\ndef is_pinned_requirement(ireq):\n \"\"\"\n Returns whether an InstallRequirement is a \"pinned\" requirement.\n\n An InstallRequirement is considered pinned if:\n\n - Is not editable\n - It has exactly one specifier\n - That specifier is \"==\"\n - The version does not contain a wildcard\n\n Examples:\n django==1.8 # pinned\n django>1.8 # NOT pinned\n django~=1.8 # NOT pinned\n django==1.* # NOT pinned\n \"\"\"\n if ireq.editable:\n return False\n\n if len(ireq.specifier._specs) != 1:\n return False\n\n op, version = next(iter(ireq.specifier._specs))._spec\n return (op == \"==\" or op == \"===\") and not version.endswith(\".*\")\n\n\ndef as_tuple(ireq):\n \"\"\"\n Pulls out the (name: str, version:str, extras:(str)) tuple from\n the pinned InstallRequirement.\n \"\"\"\n if not is_pinned_requirement(ireq):\n raise TypeError(\"Expected a pinned InstallRequirement, got {}\".format(ireq))\n\n name = key_from_req(ireq.req)\n version = next(iter(ireq.specifier._specs))._spec[1]\n extras = tuple(sorted(ireq.extras))\n return name, version, extras\n\n\ndef full_groupby(iterable, key=None):\n \"\"\"Like groupby(), but sorts the input on the group key first.\"\"\"\n return groupby(sorted(iterable, key=key), key=key)\n\n\ndef flat_map(fn, collection):\n \"\"\"Map a function over a collection and flatten the result by one-level\"\"\"\n return chain.from_iterable(map(fn, collection))\n\n\ndef lookup_table(values, key=None, keyval=None, unique=False, use_lists=False):\n \"\"\"\n Builds a dict-based lookup table (index) elegantly.\n\n Supports building normal and unique lookup tables. For example:\n\n >>> assert lookup_table(\n ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0]) == {\n ... 'b': {'bar', 'baz'},\n ... 'f': {'foo'},\n ... 'q': {'quux', 'qux'}\n ... }\n\n For key functions that uniquely identify values, set unique=True:\n\n >>> assert lookup_table(\n ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0],\n ... unique=True) == {\n ... 'b': 'baz',\n ... 'f': 'foo',\n ... 'q': 'quux'\n ... }\n\n For the values represented as lists, set use_lists=True:\n\n >>> assert lookup_table(\n ... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0],\n ... use_lists=True) == {\n ... 'b': ['bar', 'baz'],\n ... 'f': ['foo'],\n ... 'q': ['qux', 'quux']\n ... }\n\n The values of the resulting lookup table will be values, not sets.\n\n For extra power, you can even change the values while building up the LUT.\n To do so, use the `keyval` function instead of the `key` arg:\n\n >>> assert lookup_table(\n ... ['foo', 'bar', 'baz', 'qux', 'quux'],\n ... keyval=lambda s: (s[0], s[1:])) == {\n ... 'b': {'ar', 'az'},\n ... 'f': {'oo'},\n ... 'q': {'uux', 'ux'}\n ... }\n\n \"\"\"\n if keyval is None:\n if key is None:\n\n def keyval(v):\n return v\n\n else:\n\n def keyval(v):\n return (key(v), v)\n\n if unique:\n return dict(keyval(v) for v in values)\n\n lut = {}\n for value in values:\n k, v = keyval(value)\n try:\n s = lut[k]\n except KeyError:\n if use_lists:\n s = lut[k] = list()\n else:\n s = lut[k] = set()\n if use_lists:\n s.append(v)\n else:\n s.add(v)\n return dict(lut)\n\n\ndef dedup(iterable):\n \"\"\"Deduplicate an iterable object like iter(set(iterable)) but\n order-reserved.\n \"\"\"\n return iter(OrderedDict.fromkeys(iterable))\n\n\ndef name_from_req(req):\n \"\"\"Get the name of the requirement\"\"\"\n if hasattr(req, \"project_name\"):\n # from pkg_resources, such as installed dists for pip-sync\n return req.project_name\n else:\n # from packaging, such as install requirements from requirements.txt\n return req.name\n\n\ndef fs_str(string):\n \"\"\"\n Convert given string to a correctly encoded filesystem string.\n\n On Python 2, if the input string is unicode, converts it to bytes\n encoded with the filesystem encoding.\n\n On Python 3 returns the string as is, since Python 3 uses unicode\n paths and the input string shouldn't be bytes.\n\n :type string: str|unicode\n :rtype: str\n \"\"\"\n if isinstance(string, str):\n return string\n if isinstance(string, bytes):\n raise AssertionError\n return string.encode(_fs_encoding)\n\n\n_fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()\n\n\ndef get_hashes_from_ireq(ireq):\n \"\"\"\n Given an InstallRequirement, return a list of string hashes in\n the format \"{algorithm}:{hash}\". Return an empty list if there are no hashes\n in the requirement options.\n \"\"\"\n result = []\n ireq_hashes = ireq.options.get(\"hashes\", {})\n for algorithm, hexdigests in ireq_hashes.items():\n for hash_ in hexdigests:\n result.append(\"{}:{}\".format(algorithm, hash_))\n return result\n", "path": "piptools/utils.py"}]}
| 3,010 | 110 |
gh_patches_debug_5314
|
rasdani/github-patches
|
git_diff
|
pyqtgraph__pyqtgraph-2305
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Options passed PenParameter are not applied
### Short description
`pTypes.PenParameter(name="Line Style", width=5)` should initialize the wrapped pen with `width` of 5, but it remains at 1. Same applies for other style options.
### Tested environment(s)
* PyQtGraph version: 0.12.4
* Qt Python binding: PyQt6
* Python version: 3.10
* Installation method: venv / pip
Options passed PenParameter are not applied
### Short description
`pTypes.PenParameter(name="Line Style", width=5)` should initialize the wrapped pen with `width` of 5, but it remains at 1. Same applies for other style options.
### Tested environment(s)
* PyQtGraph version: 0.12.4
* Qt Python binding: PyQt6
* Python version: 3.10
* Installation method: venv / pip
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pyqtgraph/parametertree/parameterTypes/pen.py`
Content:
```
1 import re
2 from contextlib import ExitStack
3
4 from . import GroupParameterItem
5 from .basetypes import GroupParameter, Parameter, ParameterItem
6 from .qtenum import QtEnumParameter
7 from ... import functions as fn
8 from ...Qt import QtCore
9 from ...SignalProxy import SignalProxy
10 from ...widgets.PenPreviewLabel import PenPreviewLabel
11
12 class PenParameterItem(GroupParameterItem):
13 def __init__(self, param, depth):
14 super().__init__(param, depth)
15 self.penLabel = PenPreviewLabel(param)
16
17 def treeWidgetChanged(self):
18 ParameterItem.treeWidgetChanged(self)
19 tw = self.treeWidget()
20 if tw is None:
21 return
22 tw.setItemWidget(self, 1, self.penLabel
23 )
24
25 class PenParameter(GroupParameter):
26 """
27 Controls the appearance of a QPen value.
28
29 When `saveState` is called, the value is encoded as (color, width, style, capStyle, joinStyle, cosmetic)
30
31 ============== ========================================================
32 **Options:**
33 color pen color, can be any argument accepted by :func:`~pyqtgraph.mkColor` (defaults to black)
34 width integer width >= 0 (defaults to 1)
35 style String version of QPenStyle enum, i.e. 'SolidLine' (default), 'DashLine', etc.
36 capStyle String version of QPenCapStyle enum, i.e. 'SquareCap' (default), 'RoundCap', etc.
37 joinStyle String version of QPenJoinStyle enum, i.e. 'BevelJoin' (default), 'RoundJoin', etc.
38 cosmetic Boolean, whether or not the pen is cosmetic (defaults to True)
39 ============== ========================================================
40 """
41 itemClass = PenParameterItem
42
43 def __init__(self, **opts):
44 self.pen = fn.mkPen()
45 children = self._makeChildren(self.pen)
46 if 'children' in opts:
47 raise KeyError('Cannot set "children" argument in Pen Parameter opts')
48 super().__init__(**opts, children=list(children))
49 self.valChangingProxy = SignalProxy(self.sigValueChanging, delay=1.0, slot=self._childrenFinishedChanging)
50
51 def _childrenFinishedChanging(self, paramAndValue):
52 self.sigValueChanged.emit(*paramAndValue)
53
54 def saveState(self, filter=None):
55 state = super().saveState(filter)
56 opts = state.pop('children')
57 state['value'] = tuple(o['value'] for o in opts.values())
58 return state
59
60 def restoreState(self, state, recursive=True, addChildren=True, removeChildren=True, blockSignals=True):
61 return super().restoreState(state, recursive=False, addChildren=False, removeChildren=False, blockSignals=blockSignals)
62
63 def _interpretValue(self, v):
64 return self.mkPen(v)
65
66 def setValue(self, value, blockSignal=None):
67 if not fn.eq(value, self.pen):
68 value = self.mkPen(value)
69 self.updateFromPen(self, value)
70 return super().setValue(self.pen, blockSignal)
71
72 def applyOptsToPen(self, **opts):
73 # Transform opts into a value for the current pen
74 paramNames = set(opts).intersection(self.names)
75 # Value should be overridden by opts
76 with self.treeChangeBlocker():
77 if 'value' in opts:
78 pen = self.mkPen(opts.pop('value'))
79 if not fn.eq(pen, self.pen):
80 self.updateFromPen(self, pen)
81 penOpts = {}
82 for kk in paramNames:
83 penOpts[kk] = opts[kk]
84 self[kk] = opts[kk]
85 return penOpts
86
87 def setOpts(self, **opts):
88 # Transform opts into a value
89 penOpts = self.applyOptsToPen(**opts)
90 if penOpts:
91 self.setValue(self.pen)
92 return super().setOpts(**opts)
93
94 def mkPen(self, *args, **kwargs):
95 """Thin wrapper around fn.mkPen which accepts the serialized state from saveState"""
96 if len(args) == 1 and isinstance(args[0], tuple) and len(args[0]) == len(self.childs):
97 opts = dict(zip(self.names, args[0]))
98 self.applyOptsToPen(**opts)
99 args = (self.pen,)
100 kwargs = {}
101 return fn.mkPen(*args, **kwargs)
102
103 def _makeChildren(self, boundPen=None):
104 cs = QtCore.Qt.PenCapStyle
105 js = QtCore.Qt.PenJoinStyle
106 ps = QtCore.Qt.PenStyle
107 param = Parameter.create(
108 name='Params', type='group', children=[
109 dict(name='color', type='color', value='k'),
110 dict(name='width', value=1, type='int', limits=[0, None]),
111 QtEnumParameter(ps, name='style', value='SolidLine'),
112 QtEnumParameter(cs, name='capStyle'),
113 QtEnumParameter(js, name='joinStyle'),
114 dict(name='cosmetic', type='bool', value=True)
115 ]
116 )
117
118 optsPen = boundPen or fn.mkPen()
119 for p in param:
120 name = p.name()
121 # Qt naming scheme uses isXXX for booleans
122 if isinstance(p.value(), bool):
123 attrName = f'is{name.title()}'
124 else:
125 attrName = name
126 default = getattr(optsPen, attrName)()
127 replace = r'\1 \2'
128 name = re.sub(r'(\w)([A-Z])', replace, name)
129 name = name.title().strip()
130 p.setOpts(title=name, default=default)
131
132 def penPropertyWrapper(propertySetter):
133 def tiePenPropToParam(_, value):
134 propertySetter(value)
135 self.sigValueChanging.emit(self, self.pen)
136
137 return tiePenPropToParam
138
139 if boundPen is not None:
140 self.updateFromPen(param, boundPen)
141 for p in param:
142 setter, setName = self._setterForParam(p.name(), boundPen, returnName=True)
143 # Instead, set the parameter which will signal the old setter
144 setattr(boundPen, setName, p.setValue)
145 newSetter = penPropertyWrapper(setter)
146 # Edge case: color picker uses a dialog with user interaction, so wait until full change there
147 if p.type() != 'color':
148 p.sigValueChanging.connect(newSetter)
149 # Force children to emulate self's value instead of being part of a tree like normal
150 p.sigValueChanged.disconnect(p._emitValueChanged)
151 # Some widgets (e.g. checkbox, combobox) don't emit 'changing' signals, so tie to 'changed' as well
152 p.sigValueChanged.connect(newSetter)
153
154 return param
155
156 @staticmethod
157 def _setterForParam(paramName, obj, returnName=False):
158 formatted = paramName[0].upper() + paramName[1:]
159 setter = getattr(obj, f'set{formatted}')
160 if returnName:
161 return setter, formatted
162 return setter
163
164 @staticmethod
165 def updateFromPen(param, pen):
166 """
167 Applies settings from a pen to either a Parameter or dict. The Parameter or dict must already
168 be populated with the relevant keys that can be found in `PenSelectorDialog.mkParam`.
169 """
170 stack = ExitStack()
171 if isinstance(param, Parameter):
172 names = param.names
173 # Block changes until all are finalized
174 stack.enter_context(param.treeChangeBlocker())
175 else:
176 names = param
177 for opt in names:
178 # Booleans have different naming convention
179 if isinstance(param[opt], bool):
180 attrName = f'is{opt.title()}'
181 else:
182 attrName = opt
183 param[opt] = getattr(pen, attrName)()
184 stack.close()
185
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pyqtgraph/parametertree/parameterTypes/pen.py b/pyqtgraph/parametertree/parameterTypes/pen.py
--- a/pyqtgraph/parametertree/parameterTypes/pen.py
+++ b/pyqtgraph/parametertree/parameterTypes/pen.py
@@ -41,7 +41,7 @@
itemClass = PenParameterItem
def __init__(self, **opts):
- self.pen = fn.mkPen()
+ self.pen = fn.mkPen(**opts)
children = self._makeChildren(self.pen)
if 'children' in opts:
raise KeyError('Cannot set "children" argument in Pen Parameter opts')
|
{"golden_diff": "diff --git a/pyqtgraph/parametertree/parameterTypes/pen.py b/pyqtgraph/parametertree/parameterTypes/pen.py\n--- a/pyqtgraph/parametertree/parameterTypes/pen.py\n+++ b/pyqtgraph/parametertree/parameterTypes/pen.py\n@@ -41,7 +41,7 @@\n itemClass = PenParameterItem\n \n def __init__(self, **opts):\n- self.pen = fn.mkPen()\n+ self.pen = fn.mkPen(**opts)\n children = self._makeChildren(self.pen)\n if 'children' in opts:\n raise KeyError('Cannot set \"children\" argument in Pen Parameter opts')\n", "issue": "Options passed PenParameter are not applied\n### Short description\r\n`pTypes.PenParameter(name=\"Line Style\", width=5)` should initialize the wrapped pen with `width` of 5, but it remains at 1. Same applies for other style options.\r\n\r\n### Tested environment(s)\r\n\r\n * PyQtGraph version: 0.12.4\r\n * Qt Python binding: PyQt6\r\n * Python version: 3.10\r\n * Installation method: venv / pip\r\n\nOptions passed PenParameter are not applied\n### Short description\r\n`pTypes.PenParameter(name=\"Line Style\", width=5)` should initialize the wrapped pen with `width` of 5, but it remains at 1. Same applies for other style options.\r\n\r\n### Tested environment(s)\r\n\r\n * PyQtGraph version: 0.12.4\r\n * Qt Python binding: PyQt6\r\n * Python version: 3.10\r\n * Installation method: venv / pip\r\n\n", "before_files": [{"content": "import re\nfrom contextlib import ExitStack\n\nfrom . import GroupParameterItem\nfrom .basetypes import GroupParameter, Parameter, ParameterItem\nfrom .qtenum import QtEnumParameter\nfrom ... import functions as fn\nfrom ...Qt import QtCore\nfrom ...SignalProxy import SignalProxy\nfrom ...widgets.PenPreviewLabel import PenPreviewLabel\n\nclass PenParameterItem(GroupParameterItem):\n def __init__(self, param, depth):\n super().__init__(param, depth)\n self.penLabel = PenPreviewLabel(param)\n\n def treeWidgetChanged(self):\n ParameterItem.treeWidgetChanged(self)\n tw = self.treeWidget()\n if tw is None:\n return\n tw.setItemWidget(self, 1, self.penLabel\n )\n\nclass PenParameter(GroupParameter):\n \"\"\"\n Controls the appearance of a QPen value.\n\n When `saveState` is called, the value is encoded as (color, width, style, capStyle, joinStyle, cosmetic)\n\n ============== ========================================================\n **Options:**\n color pen color, can be any argument accepted by :func:`~pyqtgraph.mkColor` (defaults to black)\n width integer width >= 0 (defaults to 1)\n style String version of QPenStyle enum, i.e. 'SolidLine' (default), 'DashLine', etc.\n capStyle String version of QPenCapStyle enum, i.e. 'SquareCap' (default), 'RoundCap', etc.\n joinStyle String version of QPenJoinStyle enum, i.e. 'BevelJoin' (default), 'RoundJoin', etc.\n cosmetic Boolean, whether or not the pen is cosmetic (defaults to True)\n ============== ========================================================\n \"\"\"\n itemClass = PenParameterItem\n\n def __init__(self, **opts):\n self.pen = fn.mkPen()\n children = self._makeChildren(self.pen)\n if 'children' in opts:\n raise KeyError('Cannot set \"children\" argument in Pen Parameter opts')\n super().__init__(**opts, children=list(children))\n self.valChangingProxy = SignalProxy(self.sigValueChanging, delay=1.0, slot=self._childrenFinishedChanging)\n\n def _childrenFinishedChanging(self, paramAndValue):\n self.sigValueChanged.emit(*paramAndValue)\n\n def saveState(self, filter=None):\n state = super().saveState(filter)\n opts = state.pop('children')\n state['value'] = tuple(o['value'] for o in opts.values())\n return state\n\n def restoreState(self, state, recursive=True, addChildren=True, removeChildren=True, blockSignals=True):\n return super().restoreState(state, recursive=False, addChildren=False, removeChildren=False, blockSignals=blockSignals)\n\n def _interpretValue(self, v):\n return self.mkPen(v)\n\n def setValue(self, value, blockSignal=None):\n if not fn.eq(value, self.pen):\n value = self.mkPen(value)\n self.updateFromPen(self, value)\n return super().setValue(self.pen, blockSignal)\n\n def applyOptsToPen(self, **opts):\n # Transform opts into a value for the current pen\n paramNames = set(opts).intersection(self.names)\n # Value should be overridden by opts\n with self.treeChangeBlocker():\n if 'value' in opts:\n pen = self.mkPen(opts.pop('value'))\n if not fn.eq(pen, self.pen):\n self.updateFromPen(self, pen)\n penOpts = {}\n for kk in paramNames:\n penOpts[kk] = opts[kk]\n self[kk] = opts[kk]\n return penOpts\n\n def setOpts(self, **opts):\n # Transform opts into a value\n penOpts = self.applyOptsToPen(**opts)\n if penOpts:\n self.setValue(self.pen)\n return super().setOpts(**opts)\n\n def mkPen(self, *args, **kwargs):\n \"\"\"Thin wrapper around fn.mkPen which accepts the serialized state from saveState\"\"\"\n if len(args) == 1 and isinstance(args[0], tuple) and len(args[0]) == len(self.childs):\n opts = dict(zip(self.names, args[0]))\n self.applyOptsToPen(**opts)\n args = (self.pen,)\n kwargs = {}\n return fn.mkPen(*args, **kwargs)\n\n def _makeChildren(self, boundPen=None):\n cs = QtCore.Qt.PenCapStyle\n js = QtCore.Qt.PenJoinStyle\n ps = QtCore.Qt.PenStyle\n param = Parameter.create(\n name='Params', type='group', children=[\n dict(name='color', type='color', value='k'),\n dict(name='width', value=1, type='int', limits=[0, None]),\n QtEnumParameter(ps, name='style', value='SolidLine'),\n QtEnumParameter(cs, name='capStyle'),\n QtEnumParameter(js, name='joinStyle'),\n dict(name='cosmetic', type='bool', value=True)\n ]\n )\n\n optsPen = boundPen or fn.mkPen()\n for p in param:\n name = p.name()\n # Qt naming scheme uses isXXX for booleans\n if isinstance(p.value(), bool):\n attrName = f'is{name.title()}'\n else:\n attrName = name\n default = getattr(optsPen, attrName)()\n replace = r'\\1 \\2'\n name = re.sub(r'(\\w)([A-Z])', replace, name)\n name = name.title().strip()\n p.setOpts(title=name, default=default)\n\n def penPropertyWrapper(propertySetter):\n def tiePenPropToParam(_, value):\n propertySetter(value)\n self.sigValueChanging.emit(self, self.pen)\n\n return tiePenPropToParam\n\n if boundPen is not None:\n self.updateFromPen(param, boundPen)\n for p in param:\n setter, setName = self._setterForParam(p.name(), boundPen, returnName=True)\n # Instead, set the parameter which will signal the old setter\n setattr(boundPen, setName, p.setValue)\n newSetter = penPropertyWrapper(setter)\n # Edge case: color picker uses a dialog with user interaction, so wait until full change there\n if p.type() != 'color':\n p.sigValueChanging.connect(newSetter)\n # Force children to emulate self's value instead of being part of a tree like normal\n p.sigValueChanged.disconnect(p._emitValueChanged)\n # Some widgets (e.g. checkbox, combobox) don't emit 'changing' signals, so tie to 'changed' as well\n p.sigValueChanged.connect(newSetter)\n\n return param\n\n @staticmethod\n def _setterForParam(paramName, obj, returnName=False):\n formatted = paramName[0].upper() + paramName[1:]\n setter = getattr(obj, f'set{formatted}')\n if returnName:\n return setter, formatted\n return setter\n\n @staticmethod\n def updateFromPen(param, pen):\n \"\"\"\n Applies settings from a pen to either a Parameter or dict. The Parameter or dict must already\n be populated with the relevant keys that can be found in `PenSelectorDialog.mkParam`.\n \"\"\"\n stack = ExitStack()\n if isinstance(param, Parameter):\n names = param.names\n # Block changes until all are finalized\n stack.enter_context(param.treeChangeBlocker())\n else:\n names = param\n for opt in names:\n # Booleans have different naming convention\n if isinstance(param[opt], bool):\n attrName = f'is{opt.title()}'\n else:\n attrName = opt\n param[opt] = getattr(pen, attrName)()\n stack.close()\n", "path": "pyqtgraph/parametertree/parameterTypes/pen.py"}], "after_files": [{"content": "import re\nfrom contextlib import ExitStack\n\nfrom . import GroupParameterItem\nfrom .basetypes import GroupParameter, Parameter, ParameterItem\nfrom .qtenum import QtEnumParameter\nfrom ... import functions as fn\nfrom ...Qt import QtCore\nfrom ...SignalProxy import SignalProxy\nfrom ...widgets.PenPreviewLabel import PenPreviewLabel\n\nclass PenParameterItem(GroupParameterItem):\n def __init__(self, param, depth):\n super().__init__(param, depth)\n self.penLabel = PenPreviewLabel(param)\n\n def treeWidgetChanged(self):\n ParameterItem.treeWidgetChanged(self)\n tw = self.treeWidget()\n if tw is None:\n return\n tw.setItemWidget(self, 1, self.penLabel\n )\n\nclass PenParameter(GroupParameter):\n \"\"\"\n Controls the appearance of a QPen value.\n\n When `saveState` is called, the value is encoded as (color, width, style, capStyle, joinStyle, cosmetic)\n\n ============== ========================================================\n **Options:**\n color pen color, can be any argument accepted by :func:`~pyqtgraph.mkColor` (defaults to black)\n width integer width >= 0 (defaults to 1)\n style String version of QPenStyle enum, i.e. 'SolidLine' (default), 'DashLine', etc.\n capStyle String version of QPenCapStyle enum, i.e. 'SquareCap' (default), 'RoundCap', etc.\n joinStyle String version of QPenJoinStyle enum, i.e. 'BevelJoin' (default), 'RoundJoin', etc.\n cosmetic Boolean, whether or not the pen is cosmetic (defaults to True)\n ============== ========================================================\n \"\"\"\n itemClass = PenParameterItem\n\n def __init__(self, **opts):\n self.pen = fn.mkPen(**opts)\n children = self._makeChildren(self.pen)\n if 'children' in opts:\n raise KeyError('Cannot set \"children\" argument in Pen Parameter opts')\n super().__init__(**opts, children=list(children))\n self.valChangingProxy = SignalProxy(self.sigValueChanging, delay=1.0, slot=self._childrenFinishedChanging)\n\n def _childrenFinishedChanging(self, paramAndValue):\n self.sigValueChanged.emit(*paramAndValue)\n\n def saveState(self, filter=None):\n state = super().saveState(filter)\n opts = state.pop('children')\n state['value'] = tuple(o['value'] for o in opts.values())\n return state\n\n def restoreState(self, state, recursive=True, addChildren=True, removeChildren=True, blockSignals=True):\n return super().restoreState(state, recursive=False, addChildren=False, removeChildren=False, blockSignals=blockSignals)\n\n def _interpretValue(self, v):\n return self.mkPen(v)\n\n def setValue(self, value, blockSignal=None):\n if not fn.eq(value, self.pen):\n value = self.mkPen(value)\n self.updateFromPen(self, value)\n return super().setValue(self.pen, blockSignal)\n\n def applyOptsToPen(self, **opts):\n # Transform opts into a value for the current pen\n paramNames = set(opts).intersection(self.names)\n # Value should be overridden by opts\n with self.treeChangeBlocker():\n if 'value' in opts:\n pen = self.mkPen(opts.pop('value'))\n if not fn.eq(pen, self.pen):\n self.updateFromPen(self, pen)\n penOpts = {}\n for kk in paramNames:\n penOpts[kk] = opts[kk]\n self[kk] = opts[kk]\n return penOpts\n\n def setOpts(self, **opts):\n # Transform opts into a value\n penOpts = self.applyOptsToPen(**opts)\n if penOpts:\n self.setValue(self.pen)\n return super().setOpts(**opts)\n\n def mkPen(self, *args, **kwargs):\n \"\"\"Thin wrapper around fn.mkPen which accepts the serialized state from saveState\"\"\"\n if len(args) == 1 and isinstance(args[0], tuple) and len(args[0]) == len(self.childs):\n opts = dict(zip(self.names, args[0]))\n self.applyOptsToPen(**opts)\n args = (self.pen,)\n kwargs = {}\n return fn.mkPen(*args, **kwargs)\n\n def _makeChildren(self, boundPen=None):\n cs = QtCore.Qt.PenCapStyle\n js = QtCore.Qt.PenJoinStyle\n ps = QtCore.Qt.PenStyle\n param = Parameter.create(\n name='Params', type='group', children=[\n dict(name='color', type='color', value='k'),\n dict(name='width', value=1, type='int', limits=[0, None]),\n QtEnumParameter(ps, name='style', value='SolidLine'),\n QtEnumParameter(cs, name='capStyle'),\n QtEnumParameter(js, name='joinStyle'),\n dict(name='cosmetic', type='bool', value=True)\n ]\n )\n\n optsPen = boundPen or fn.mkPen()\n for p in param:\n name = p.name()\n # Qt naming scheme uses isXXX for booleans\n if isinstance(p.value(), bool):\n attrName = f'is{name.title()}'\n else:\n attrName = name\n default = getattr(optsPen, attrName)()\n replace = r'\\1 \\2'\n name = re.sub(r'(\\w)([A-Z])', replace, name)\n name = name.title().strip()\n p.setOpts(title=name, default=default)\n\n def penPropertyWrapper(propertySetter):\n def tiePenPropToParam(_, value):\n propertySetter(value)\n self.sigValueChanging.emit(self, self.pen)\n\n return tiePenPropToParam\n\n if boundPen is not None:\n self.updateFromPen(param, boundPen)\n for p in param:\n setter, setName = self._setterForParam(p.name(), boundPen, returnName=True)\n # Instead, set the parameter which will signal the old setter\n setattr(boundPen, setName, p.setValue)\n newSetter = penPropertyWrapper(setter)\n # Edge case: color picker uses a dialog with user interaction, so wait until full change there\n if p.type() != 'color':\n p.sigValueChanging.connect(newSetter)\n # Force children to emulate self's value instead of being part of a tree like normal\n p.sigValueChanged.disconnect(p._emitValueChanged)\n # Some widgets (e.g. checkbox, combobox) don't emit 'changing' signals, so tie to 'changed' as well\n p.sigValueChanged.connect(newSetter)\n\n return param\n\n @staticmethod\n def _setterForParam(paramName, obj, returnName=False):\n formatted = paramName[0].upper() + paramName[1:]\n setter = getattr(obj, f'set{formatted}')\n if returnName:\n return setter, formatted\n return setter\n\n @staticmethod\n def updateFromPen(param, pen):\n \"\"\"\n Applies settings from a pen to either a Parameter or dict. The Parameter or dict must already\n be populated with the relevant keys that can be found in `PenSelectorDialog.mkParam`.\n \"\"\"\n stack = ExitStack()\n if isinstance(param, Parameter):\n names = param.names\n # Block changes until all are finalized\n stack.enter_context(param.treeChangeBlocker())\n else:\n names = param\n for opt in names:\n # Booleans have different naming convention\n if isinstance(param[opt], bool):\n attrName = f'is{opt.title()}'\n else:\n attrName = opt\n param[opt] = getattr(pen, attrName)()\n stack.close()\n", "path": "pyqtgraph/parametertree/parameterTypes/pen.py"}]}
| 2,578 | 151 |
gh_patches_debug_32902
|
rasdani/github-patches
|
git_diff
|
kornia__kornia-2383
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Faster implementation of MS-SSIM
## 🚀 Feature
<!-- A clear and concise description of the feature proposal -->
Current implementation impacts training speed significantly (~40% slowdown compared to pure L1)
## Motivation
<!-- A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]. If this is related to another GitHub issue, please link here too -->
MS-SSIM gives some awesome results, and it'd be nice to have an option to switch to a
* lower accuracy and faster implementation such as https://link.springer.com/article/10.1007/s11554-010-0170-9
* more optimized implementation
## Pitch
<!-- A clear and concise description of what you want to happen. -->
Faster iteration is always great. Difference between workflow for a task which starts in morning and ends during lunch compared to one that ends in evening is huge.
## Alternatives
<!-- A clear and concise description of any alternative solutions or features you've considered, if any. -->
Reduce the scales in MS-SSIM
## Additional context
<!-- Add any other context or screenshots about the feature request here. -->
______________________________________________________________________
#### Consider also to contribute to Kornia universe projects :)
<sub>
- [**Tutorials**](https://github.com/kornia/tutorials): our repository containing the tutorials.
</sub>
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kornia/metrics/ssim.py`
Content:
```
1 from typing import List
2
3 import torch
4 import torch.nn as nn
5
6 from kornia.filters import filter2d, get_gaussian_kernel2d
7 from kornia.filters.filter import _compute_padding
8
9
10 def _crop(img: torch.Tensor, cropping_shape: List[int]) -> torch.Tensor:
11 """Crop out the part of "valid" convolution area."""
12 return torch.nn.functional.pad(
13 img, (-cropping_shape[2], -cropping_shape[3], -cropping_shape[0], -cropping_shape[1])
14 )
15
16
17 def ssim(
18 img1: torch.Tensor,
19 img2: torch.Tensor,
20 window_size: int,
21 max_val: float = 1.0,
22 eps: float = 1e-12,
23 padding: str = 'same',
24 ) -> torch.Tensor:
25 r"""Function that computes the Structural Similarity (SSIM) index map between two images.
26
27 Measures the (SSIM) index between each element in the input `x` and target `y`.
28
29 The index can be described as:
30
31 .. math::
32
33 \text{SSIM}(x, y) = \frac{(2\mu_x\mu_y+c_1)(2\sigma_{xy}+c_2)}
34 {(\mu_x^2+\mu_y^2+c_1)(\sigma_x^2+\sigma_y^2+c_2)}
35
36 where:
37 - :math:`c_1=(k_1 L)^2` and :math:`c_2=(k_2 L)^2` are two variables to
38 stabilize the division with weak denominator.
39 - :math:`L` is the dynamic range of the pixel-values (typically this is
40 :math:`2^{\#\text{bits per pixel}}-1`).
41
42 Args:
43 img1: the first input image with shape :math:`(B, C, H, W)`.
44 img2: the second input image with shape :math:`(B, C, H, W)`.
45 window_size: the size of the gaussian kernel to smooth the images.
46 max_val: the dynamic range of the images.
47 eps: Small value for numerically stability when dividing.
48 padding: ``'same'`` | ``'valid'``. Whether to only use the "valid" convolution
49 area to compute SSIM to match the MATLAB implementation of original SSIM paper.
50
51 Returns:
52 The ssim index map with shape :math:`(B, C, H, W)`.
53
54 Examples:
55 >>> input1 = torch.rand(1, 4, 5, 5)
56 >>> input2 = torch.rand(1, 4, 5, 5)
57 >>> ssim_map = ssim(input1, input2, 5) # 1x4x5x5
58 """
59 if not isinstance(img1, torch.Tensor):
60 raise TypeError(f"Input img1 type is not a torch.Tensor. Got {type(img1)}")
61
62 if not isinstance(img2, torch.Tensor):
63 raise TypeError(f"Input img2 type is not a torch.Tensor. Got {type(img2)}")
64
65 if not isinstance(max_val, float):
66 raise TypeError(f"Input max_val type is not a float. Got {type(max_val)}")
67
68 if not len(img1.shape) == 4:
69 raise ValueError(f"Invalid img1 shape, we expect BxCxHxW. Got: {img1.shape}")
70
71 if not len(img2.shape) == 4:
72 raise ValueError(f"Invalid img2 shape, we expect BxCxHxW. Got: {img2.shape}")
73
74 if not img1.shape == img2.shape:
75 raise ValueError(f"img1 and img2 shapes must be the same. Got: {img1.shape} and {img2.shape}")
76
77 # prepare kernel
78 kernel: torch.Tensor = get_gaussian_kernel2d((window_size, window_size), (1.5, 1.5))
79
80 # compute coefficients
81 C1: float = (0.01 * max_val) ** 2
82 C2: float = (0.03 * max_val) ** 2
83
84 # compute local mean per channel
85 mu1: torch.Tensor = filter2d(img1, kernel)
86 mu2: torch.Tensor = filter2d(img2, kernel)
87
88 cropping_shape: List[int] = []
89 if padding == 'valid':
90 height, width = kernel.shape[-2:]
91 cropping_shape = _compute_padding([height, width])
92 mu1 = _crop(mu1, cropping_shape)
93 mu2 = _crop(mu2, cropping_shape)
94 elif padding == 'same':
95 pass
96
97 mu1_sq = mu1**2
98 mu2_sq = mu2**2
99 mu1_mu2 = mu1 * mu2
100
101 mu_img1_sq = filter2d(img1**2, kernel)
102 mu_img2_sq = filter2d(img2**2, kernel)
103 mu_img1_img2 = filter2d(img1 * img2, kernel)
104
105 if padding == 'valid':
106 mu_img1_sq = _crop(mu_img1_sq, cropping_shape)
107 mu_img2_sq = _crop(mu_img2_sq, cropping_shape)
108 mu_img1_img2 = _crop(mu_img1_img2, cropping_shape)
109 elif padding == 'same':
110 pass
111
112 # compute local sigma per channel
113 sigma1_sq = mu_img1_sq - mu1_sq
114 sigma2_sq = mu_img2_sq - mu2_sq
115 sigma12 = mu_img1_img2 - mu1_mu2
116
117 # compute the similarity index map
118 num: torch.Tensor = (2.0 * mu1_mu2 + C1) * (2.0 * sigma12 + C2)
119 den: torch.Tensor = (mu1_sq + mu2_sq + C1) * (sigma1_sq + sigma2_sq + C2)
120
121 return num / (den + eps)
122
123
124 class SSIM(nn.Module):
125 r"""Create a module that computes the Structural Similarity (SSIM) index between two images.
126
127 Measures the (SSIM) index between each element in the input `x` and target `y`.
128
129 The index can be described as:
130
131 .. math::
132
133 \text{SSIM}(x, y) = \frac{(2\mu_x\mu_y+c_1)(2\sigma_{xy}+c_2)}
134 {(\mu_x^2+\mu_y^2+c_1)(\sigma_x^2+\sigma_y^2+c_2)}
135
136 where:
137 - :math:`c_1=(k_1 L)^2` and :math:`c_2=(k_2 L)^2` are two variables to
138 stabilize the division with weak denominator.
139 - :math:`L` is the dynamic range of the pixel-values (typically this is
140 :math:`2^{\#\text{bits per pixel}}-1`).
141
142 Args:
143 window_size: the size of the gaussian kernel to smooth the images.
144 max_val: the dynamic range of the images.
145 eps: Small value for numerically stability when dividing.
146 padding: ``'same'`` | ``'valid'``. Whether to only use the "valid" convolution
147 area to compute SSIM to match the MATLAB implementation of original SSIM paper.
148
149 Shape:
150 - Input: :math:`(B, C, H, W)`.
151 - Target :math:`(B, C, H, W)`.
152 - Output: :math:`(B, C, H, W)`.
153
154 Examples:
155 >>> input1 = torch.rand(1, 4, 5, 5)
156 >>> input2 = torch.rand(1, 4, 5, 5)
157 >>> ssim = SSIM(5)
158 >>> ssim_map = ssim(input1, input2) # 1x4x5x5
159 """
160
161 def __init__(self, window_size: int, max_val: float = 1.0, eps: float = 1e-12, padding: str = 'same') -> None:
162 super().__init__()
163 self.window_size: int = window_size
164 self.max_val: float = max_val
165 self.eps = eps
166 self.padding = padding
167
168 def forward(self, img1: torch.Tensor, img2: torch.Tensor) -> torch.Tensor:
169 return ssim(img1, img2, self.window_size, self.max_val, self.eps, self.padding)
170
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/kornia/metrics/ssim.py b/kornia/metrics/ssim.py
--- a/kornia/metrics/ssim.py
+++ b/kornia/metrics/ssim.py
@@ -3,7 +3,7 @@
import torch
import torch.nn as nn
-from kornia.filters import filter2d, get_gaussian_kernel2d
+from kornia.filters import filter2d_separable, get_gaussian_kernel1d
from kornia.filters.filter import _compute_padding
@@ -75,19 +75,19 @@
raise ValueError(f"img1 and img2 shapes must be the same. Got: {img1.shape} and {img2.shape}")
# prepare kernel
- kernel: torch.Tensor = get_gaussian_kernel2d((window_size, window_size), (1.5, 1.5))
+ kernel: torch.Tensor = get_gaussian_kernel1d(window_size, 1.5, device=img1.device, dtype=img1.dtype)
# compute coefficients
C1: float = (0.01 * max_val) ** 2
C2: float = (0.03 * max_val) ** 2
# compute local mean per channel
- mu1: torch.Tensor = filter2d(img1, kernel)
- mu2: torch.Tensor = filter2d(img2, kernel)
+ mu1: torch.Tensor = filter2d_separable(img1, kernel, kernel)
+ mu2: torch.Tensor = filter2d_separable(img2, kernel, kernel)
cropping_shape: List[int] = []
if padding == 'valid':
- height, width = kernel.shape[-2:]
+ height = width = kernel.shape[-1]
cropping_shape = _compute_padding([height, width])
mu1 = _crop(mu1, cropping_shape)
mu2 = _crop(mu2, cropping_shape)
@@ -98,9 +98,9 @@
mu2_sq = mu2**2
mu1_mu2 = mu1 * mu2
- mu_img1_sq = filter2d(img1**2, kernel)
- mu_img2_sq = filter2d(img2**2, kernel)
- mu_img1_img2 = filter2d(img1 * img2, kernel)
+ mu_img1_sq = filter2d_separable(img1**2, kernel, kernel)
+ mu_img2_sq = filter2d_separable(img2**2, kernel, kernel)
+ mu_img1_img2 = filter2d_separable(img1 * img2, kernel, kernel)
if padding == 'valid':
mu_img1_sq = _crop(mu_img1_sq, cropping_shape)
|
{"golden_diff": "diff --git a/kornia/metrics/ssim.py b/kornia/metrics/ssim.py\n--- a/kornia/metrics/ssim.py\n+++ b/kornia/metrics/ssim.py\n@@ -3,7 +3,7 @@\n import torch\n import torch.nn as nn\n \n-from kornia.filters import filter2d, get_gaussian_kernel2d\n+from kornia.filters import filter2d_separable, get_gaussian_kernel1d\n from kornia.filters.filter import _compute_padding\n \n \n@@ -75,19 +75,19 @@\n raise ValueError(f\"img1 and img2 shapes must be the same. Got: {img1.shape} and {img2.shape}\")\n \n # prepare kernel\n- kernel: torch.Tensor = get_gaussian_kernel2d((window_size, window_size), (1.5, 1.5))\n+ kernel: torch.Tensor = get_gaussian_kernel1d(window_size, 1.5, device=img1.device, dtype=img1.dtype)\n \n # compute coefficients\n C1: float = (0.01 * max_val) ** 2\n C2: float = (0.03 * max_val) ** 2\n \n # compute local mean per channel\n- mu1: torch.Tensor = filter2d(img1, kernel)\n- mu2: torch.Tensor = filter2d(img2, kernel)\n+ mu1: torch.Tensor = filter2d_separable(img1, kernel, kernel)\n+ mu2: torch.Tensor = filter2d_separable(img2, kernel, kernel)\n \n cropping_shape: List[int] = []\n if padding == 'valid':\n- height, width = kernel.shape[-2:]\n+ height = width = kernel.shape[-1]\n cropping_shape = _compute_padding([height, width])\n mu1 = _crop(mu1, cropping_shape)\n mu2 = _crop(mu2, cropping_shape)\n@@ -98,9 +98,9 @@\n mu2_sq = mu2**2\n mu1_mu2 = mu1 * mu2\n \n- mu_img1_sq = filter2d(img1**2, kernel)\n- mu_img2_sq = filter2d(img2**2, kernel)\n- mu_img1_img2 = filter2d(img1 * img2, kernel)\n+ mu_img1_sq = filter2d_separable(img1**2, kernel, kernel)\n+ mu_img2_sq = filter2d_separable(img2**2, kernel, kernel)\n+ mu_img1_img2 = filter2d_separable(img1 * img2, kernel, kernel)\n \n if padding == 'valid':\n mu_img1_sq = _crop(mu_img1_sq, cropping_shape)\n", "issue": "Faster implementation of MS-SSIM\n## \ud83d\ude80 Feature\r\n<!-- A clear and concise description of the feature proposal -->\r\nCurrent implementation impacts training speed significantly (~40% slowdown compared to pure L1)\r\n\r\n## Motivation\r\n\r\n<!-- A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]. If this is related to another GitHub issue, please link here too -->\r\nMS-SSIM gives some awesome results, and it'd be nice to have an option to switch to a \r\n* lower accuracy and faster implementation such as https://link.springer.com/article/10.1007/s11554-010-0170-9\r\n* more optimized implementation\r\n\r\n## Pitch\r\n\r\n<!-- A clear and concise description of what you want to happen. -->\r\nFaster iteration is always great. Difference between workflow for a task which starts in morning and ends during lunch compared to one that ends in evening is huge.\r\n\r\n## Alternatives\r\n\r\n<!-- A clear and concise description of any alternative solutions or features you've considered, if any. -->\r\nReduce the scales in MS-SSIM\r\n\r\n## Additional context\r\n\r\n<!-- Add any other context or screenshots about the feature request here. -->\r\n\r\n______________________________________________________________________\r\n\r\n#### Consider also to contribute to Kornia universe projects :)\r\n\r\n<sub>\r\n\r\n- [**Tutorials**](https://github.com/kornia/tutorials): our repository containing the tutorials.\r\n\r\n</sub>\r\n\n", "before_files": [{"content": "from typing import List\n\nimport torch\nimport torch.nn as nn\n\nfrom kornia.filters import filter2d, get_gaussian_kernel2d\nfrom kornia.filters.filter import _compute_padding\n\n\ndef _crop(img: torch.Tensor, cropping_shape: List[int]) -> torch.Tensor:\n \"\"\"Crop out the part of \"valid\" convolution area.\"\"\"\n return torch.nn.functional.pad(\n img, (-cropping_shape[2], -cropping_shape[3], -cropping_shape[0], -cropping_shape[1])\n )\n\n\ndef ssim(\n img1: torch.Tensor,\n img2: torch.Tensor,\n window_size: int,\n max_val: float = 1.0,\n eps: float = 1e-12,\n padding: str = 'same',\n) -> torch.Tensor:\n r\"\"\"Function that computes the Structural Similarity (SSIM) index map between two images.\n\n Measures the (SSIM) index between each element in the input `x` and target `y`.\n\n The index can be described as:\n\n .. math::\n\n \\text{SSIM}(x, y) = \\frac{(2\\mu_x\\mu_y+c_1)(2\\sigma_{xy}+c_2)}\n {(\\mu_x^2+\\mu_y^2+c_1)(\\sigma_x^2+\\sigma_y^2+c_2)}\n\n where:\n - :math:`c_1=(k_1 L)^2` and :math:`c_2=(k_2 L)^2` are two variables to\n stabilize the division with weak denominator.\n - :math:`L` is the dynamic range of the pixel-values (typically this is\n :math:`2^{\\#\\text{bits per pixel}}-1`).\n\n Args:\n img1: the first input image with shape :math:`(B, C, H, W)`.\n img2: the second input image with shape :math:`(B, C, H, W)`.\n window_size: the size of the gaussian kernel to smooth the images.\n max_val: the dynamic range of the images.\n eps: Small value for numerically stability when dividing.\n padding: ``'same'`` | ``'valid'``. Whether to only use the \"valid\" convolution\n area to compute SSIM to match the MATLAB implementation of original SSIM paper.\n\n Returns:\n The ssim index map with shape :math:`(B, C, H, W)`.\n\n Examples:\n >>> input1 = torch.rand(1, 4, 5, 5)\n >>> input2 = torch.rand(1, 4, 5, 5)\n >>> ssim_map = ssim(input1, input2, 5) # 1x4x5x5\n \"\"\"\n if not isinstance(img1, torch.Tensor):\n raise TypeError(f\"Input img1 type is not a torch.Tensor. Got {type(img1)}\")\n\n if not isinstance(img2, torch.Tensor):\n raise TypeError(f\"Input img2 type is not a torch.Tensor. Got {type(img2)}\")\n\n if not isinstance(max_val, float):\n raise TypeError(f\"Input max_val type is not a float. Got {type(max_val)}\")\n\n if not len(img1.shape) == 4:\n raise ValueError(f\"Invalid img1 shape, we expect BxCxHxW. Got: {img1.shape}\")\n\n if not len(img2.shape) == 4:\n raise ValueError(f\"Invalid img2 shape, we expect BxCxHxW. Got: {img2.shape}\")\n\n if not img1.shape == img2.shape:\n raise ValueError(f\"img1 and img2 shapes must be the same. Got: {img1.shape} and {img2.shape}\")\n\n # prepare kernel\n kernel: torch.Tensor = get_gaussian_kernel2d((window_size, window_size), (1.5, 1.5))\n\n # compute coefficients\n C1: float = (0.01 * max_val) ** 2\n C2: float = (0.03 * max_val) ** 2\n\n # compute local mean per channel\n mu1: torch.Tensor = filter2d(img1, kernel)\n mu2: torch.Tensor = filter2d(img2, kernel)\n\n cropping_shape: List[int] = []\n if padding == 'valid':\n height, width = kernel.shape[-2:]\n cropping_shape = _compute_padding([height, width])\n mu1 = _crop(mu1, cropping_shape)\n mu2 = _crop(mu2, cropping_shape)\n elif padding == 'same':\n pass\n\n mu1_sq = mu1**2\n mu2_sq = mu2**2\n mu1_mu2 = mu1 * mu2\n\n mu_img1_sq = filter2d(img1**2, kernel)\n mu_img2_sq = filter2d(img2**2, kernel)\n mu_img1_img2 = filter2d(img1 * img2, kernel)\n\n if padding == 'valid':\n mu_img1_sq = _crop(mu_img1_sq, cropping_shape)\n mu_img2_sq = _crop(mu_img2_sq, cropping_shape)\n mu_img1_img2 = _crop(mu_img1_img2, cropping_shape)\n elif padding == 'same':\n pass\n\n # compute local sigma per channel\n sigma1_sq = mu_img1_sq - mu1_sq\n sigma2_sq = mu_img2_sq - mu2_sq\n sigma12 = mu_img1_img2 - mu1_mu2\n\n # compute the similarity index map\n num: torch.Tensor = (2.0 * mu1_mu2 + C1) * (2.0 * sigma12 + C2)\n den: torch.Tensor = (mu1_sq + mu2_sq + C1) * (sigma1_sq + sigma2_sq + C2)\n\n return num / (den + eps)\n\n\nclass SSIM(nn.Module):\n r\"\"\"Create a module that computes the Structural Similarity (SSIM) index between two images.\n\n Measures the (SSIM) index between each element in the input `x` and target `y`.\n\n The index can be described as:\n\n .. math::\n\n \\text{SSIM}(x, y) = \\frac{(2\\mu_x\\mu_y+c_1)(2\\sigma_{xy}+c_2)}\n {(\\mu_x^2+\\mu_y^2+c_1)(\\sigma_x^2+\\sigma_y^2+c_2)}\n\n where:\n - :math:`c_1=(k_1 L)^2` and :math:`c_2=(k_2 L)^2` are two variables to\n stabilize the division with weak denominator.\n - :math:`L` is the dynamic range of the pixel-values (typically this is\n :math:`2^{\\#\\text{bits per pixel}}-1`).\n\n Args:\n window_size: the size of the gaussian kernel to smooth the images.\n max_val: the dynamic range of the images.\n eps: Small value for numerically stability when dividing.\n padding: ``'same'`` | ``'valid'``. Whether to only use the \"valid\" convolution\n area to compute SSIM to match the MATLAB implementation of original SSIM paper.\n\n Shape:\n - Input: :math:`(B, C, H, W)`.\n - Target :math:`(B, C, H, W)`.\n - Output: :math:`(B, C, H, W)`.\n\n Examples:\n >>> input1 = torch.rand(1, 4, 5, 5)\n >>> input2 = torch.rand(1, 4, 5, 5)\n >>> ssim = SSIM(5)\n >>> ssim_map = ssim(input1, input2) # 1x4x5x5\n \"\"\"\n\n def __init__(self, window_size: int, max_val: float = 1.0, eps: float = 1e-12, padding: str = 'same') -> None:\n super().__init__()\n self.window_size: int = window_size\n self.max_val: float = max_val\n self.eps = eps\n self.padding = padding\n\n def forward(self, img1: torch.Tensor, img2: torch.Tensor) -> torch.Tensor:\n return ssim(img1, img2, self.window_size, self.max_val, self.eps, self.padding)\n", "path": "kornia/metrics/ssim.py"}], "after_files": [{"content": "from typing import List\n\nimport torch\nimport torch.nn as nn\n\nfrom kornia.filters import filter2d_separable, get_gaussian_kernel1d\nfrom kornia.filters.filter import _compute_padding\n\n\ndef _crop(img: torch.Tensor, cropping_shape: List[int]) -> torch.Tensor:\n \"\"\"Crop out the part of \"valid\" convolution area.\"\"\"\n return torch.nn.functional.pad(\n img, (-cropping_shape[2], -cropping_shape[3], -cropping_shape[0], -cropping_shape[1])\n )\n\n\ndef ssim(\n img1: torch.Tensor,\n img2: torch.Tensor,\n window_size: int,\n max_val: float = 1.0,\n eps: float = 1e-12,\n padding: str = 'same',\n) -> torch.Tensor:\n r\"\"\"Function that computes the Structural Similarity (SSIM) index map between two images.\n\n Measures the (SSIM) index between each element in the input `x` and target `y`.\n\n The index can be described as:\n\n .. math::\n\n \\text{SSIM}(x, y) = \\frac{(2\\mu_x\\mu_y+c_1)(2\\sigma_{xy}+c_2)}\n {(\\mu_x^2+\\mu_y^2+c_1)(\\sigma_x^2+\\sigma_y^2+c_2)}\n\n where:\n - :math:`c_1=(k_1 L)^2` and :math:`c_2=(k_2 L)^2` are two variables to\n stabilize the division with weak denominator.\n - :math:`L` is the dynamic range of the pixel-values (typically this is\n :math:`2^{\\#\\text{bits per pixel}}-1`).\n\n Args:\n img1: the first input image with shape :math:`(B, C, H, W)`.\n img2: the second input image with shape :math:`(B, C, H, W)`.\n window_size: the size of the gaussian kernel to smooth the images.\n max_val: the dynamic range of the images.\n eps: Small value for numerically stability when dividing.\n padding: ``'same'`` | ``'valid'``. Whether to only use the \"valid\" convolution\n area to compute SSIM to match the MATLAB implementation of original SSIM paper.\n\n Returns:\n The ssim index map with shape :math:`(B, C, H, W)`.\n\n Examples:\n >>> input1 = torch.rand(1, 4, 5, 5)\n >>> input2 = torch.rand(1, 4, 5, 5)\n >>> ssim_map = ssim(input1, input2, 5) # 1x4x5x5\n \"\"\"\n if not isinstance(img1, torch.Tensor):\n raise TypeError(f\"Input img1 type is not a torch.Tensor. Got {type(img1)}\")\n\n if not isinstance(img2, torch.Tensor):\n raise TypeError(f\"Input img2 type is not a torch.Tensor. Got {type(img2)}\")\n\n if not isinstance(max_val, float):\n raise TypeError(f\"Input max_val type is not a float. Got {type(max_val)}\")\n\n if not len(img1.shape) == 4:\n raise ValueError(f\"Invalid img1 shape, we expect BxCxHxW. Got: {img1.shape}\")\n\n if not len(img2.shape) == 4:\n raise ValueError(f\"Invalid img2 shape, we expect BxCxHxW. Got: {img2.shape}\")\n\n if not img1.shape == img2.shape:\n raise ValueError(f\"img1 and img2 shapes must be the same. Got: {img1.shape} and {img2.shape}\")\n\n # prepare kernel\n kernel: torch.Tensor = get_gaussian_kernel1d(window_size, 1.5, device=img1.device, dtype=img1.dtype)\n\n # compute coefficients\n C1: float = (0.01 * max_val) ** 2\n C2: float = (0.03 * max_val) ** 2\n\n # compute local mean per channel\n mu1: torch.Tensor = filter2d_separable(img1, kernel, kernel)\n mu2: torch.Tensor = filter2d_separable(img2, kernel, kernel)\n\n cropping_shape: List[int] = []\n if padding == 'valid':\n height = width = kernel.shape[-1]\n cropping_shape = _compute_padding([height, width])\n mu1 = _crop(mu1, cropping_shape)\n mu2 = _crop(mu2, cropping_shape)\n elif padding == 'same':\n pass\n\n mu1_sq = mu1**2\n mu2_sq = mu2**2\n mu1_mu2 = mu1 * mu2\n\n mu_img1_sq = filter2d_separable(img1**2, kernel, kernel)\n mu_img2_sq = filter2d_separable(img2**2, kernel, kernel)\n mu_img1_img2 = filter2d_separable(img1 * img2, kernel, kernel)\n\n if padding == 'valid':\n mu_img1_sq = _crop(mu_img1_sq, cropping_shape)\n mu_img2_sq = _crop(mu_img2_sq, cropping_shape)\n mu_img1_img2 = _crop(mu_img1_img2, cropping_shape)\n elif padding == 'same':\n pass\n\n # compute local sigma per channel\n sigma1_sq = mu_img1_sq - mu1_sq\n sigma2_sq = mu_img2_sq - mu2_sq\n sigma12 = mu_img1_img2 - mu1_mu2\n\n # compute the similarity index map\n num: torch.Tensor = (2.0 * mu1_mu2 + C1) * (2.0 * sigma12 + C2)\n den: torch.Tensor = (mu1_sq + mu2_sq + C1) * (sigma1_sq + sigma2_sq + C2)\n\n return num / (den + eps)\n\n\nclass SSIM(nn.Module):\n r\"\"\"Create a module that computes the Structural Similarity (SSIM) index between two images.\n\n Measures the (SSIM) index between each element in the input `x` and target `y`.\n\n The index can be described as:\n\n .. math::\n\n \\text{SSIM}(x, y) = \\frac{(2\\mu_x\\mu_y+c_1)(2\\sigma_{xy}+c_2)}\n {(\\mu_x^2+\\mu_y^2+c_1)(\\sigma_x^2+\\sigma_y^2+c_2)}\n\n where:\n - :math:`c_1=(k_1 L)^2` and :math:`c_2=(k_2 L)^2` are two variables to\n stabilize the division with weak denominator.\n - :math:`L` is the dynamic range of the pixel-values (typically this is\n :math:`2^{\\#\\text{bits per pixel}}-1`).\n\n Args:\n window_size: the size of the gaussian kernel to smooth the images.\n max_val: the dynamic range of the images.\n eps: Small value for numerically stability when dividing.\n padding: ``'same'`` | ``'valid'``. Whether to only use the \"valid\" convolution\n area to compute SSIM to match the MATLAB implementation of original SSIM paper.\n\n Shape:\n - Input: :math:`(B, C, H, W)`.\n - Target :math:`(B, C, H, W)`.\n - Output: :math:`(B, C, H, W)`.\n\n Examples:\n >>> input1 = torch.rand(1, 4, 5, 5)\n >>> input2 = torch.rand(1, 4, 5, 5)\n >>> ssim = SSIM(5)\n >>> ssim_map = ssim(input1, input2) # 1x4x5x5\n \"\"\"\n\n def __init__(self, window_size: int, max_val: float = 1.0, eps: float = 1e-12, padding: str = 'same') -> None:\n super().__init__()\n self.window_size: int = window_size\n self.max_val: float = max_val\n self.eps = eps\n self.padding = padding\n\n def forward(self, img1: torch.Tensor, img2: torch.Tensor) -> torch.Tensor:\n return ssim(img1, img2, self.window_size, self.max_val, self.eps, self.padding)\n", "path": "kornia/metrics/ssim.py"}]}
| 2,836 | 597 |
gh_patches_debug_17204
|
rasdani/github-patches
|
git_diff
|
pytorch__audio-934
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
tedlium test failing in windows on gpu in CircleCI
tedlium test failing in windows on gpu, see [here](https://app.circleci.com/pipelines/github/pytorch/audio/3455/workflows/dda668b7-25c3-48e9-bccc-d8a2c7fd6338/jobs/100751) from [here](https://app.circleci.com/pipelines/github/pytorch/audio/3455/workflows/dda668b7-25c3-48e9-bccc-d8a2c7fd6338). This may have been obscured by #929.
```
self = <torchaudio_unittest.datasets.tedlium_test.TestTedlium testMethod=test_tedlium_release1>
def test_tedlium_release1(self):
release = "release1"
dataset = tedlium.TEDLIUM(self.root_dir, release=release)
num_samples = 0
> for i, (data, sample_rate, transcript, talk_id, speaker_id, identifier) in enumerate(dataset):
torchaudio_unittest\datasets\tedlium_test.py:99:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
..\env\lib\site-packages\torchaudio-0.7.0a0+3250d3d-py3.8.egg\torchaudio\datasets\tedlium.py:184: in __getitem__
return self._load_tedlium_item(fileid, line, self._path)
..\env\lib\site-packages\torchaudio-0.7.0a0+3250d3d-py3.8.egg\torchaudio\datasets\tedlium.py:151: in _load_tedlium_item
waveform, sample_rate = self._load_audio(wave_path + self._ext_audio, start_time=start_time, end_time=end_time)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <torchaudio.datasets.tedlium.TEDLIUM object at 0x000002883EC0CF70>
path = 'C:\\Users\\circleci\\AppData\\Local\\Temp\\tmpzvh9jdr_\\tedlium\\TEDLIUM_release1\\train\\sph\\release1.sph'
start_time = 0, end_time = 32000, sample_rate = 16000
def _load_audio(self, path: str, start_time: float, end_time: float, sample_rate: int = 16000) -> [Tensor, int]:
"""Default load function used in TEDLIUM dataset, you can overwrite this function to customize functionality
and load individual sentences from a full ted audio talk file.
Args:
path (str): Path to audio file
start_time (int, optional): Time in seconds where the sample sentence stars
end_time (int, optional): Time in seconds where the sample sentence finishes
Returns:
[Tensor, int]: Audio tensor representation and sample rate
"""
start_time = int(float(start_time) * sample_rate)
end_time = int(float(end_time) * sample_rate)
if torchaudio.get_audio_backend() == "sox_io":
return torchaudio.load(path, frame_offset=start_time, num_frames=end_time - start_time)
> return torchaudio.load(path)[:, start_time:end_time]
E TypeError: tuple indices must be integers or slices, not tuple
..\env\lib\site-packages\torchaudio-0.7.0a0+3250d3d-py3.8.egg\torchaudio\datasets\tedlium.py:171: TypeError
```
Thoughts?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `torchaudio/datasets/tedlium.py`
Content:
```
1 import os
2 from typing import Tuple
3
4 import torchaudio
5 from torch import Tensor
6 from torch.utils.data import Dataset
7 from torchaudio.datasets.utils import (
8 download_url,
9 extract_archive,
10 )
11
12
13 _RELEASE_CONFIGS = {
14 "release1": {
15 "folder_in_archive": "TEDLIUM_release1",
16 "url": "http://www.openslr.org/resources/7/TEDLIUM_release1.tar.gz",
17 "checksum": "30301975fd8c5cac4040c261c0852f57cfa8adbbad2ce78e77e4986957445f27",
18 "data_path": "",
19 "subset": "train",
20 "supported_subsets": ["train", "test", "dev"],
21 "dict": "TEDLIUM.150K.dic",
22 },
23 "release2": {
24 "folder_in_archive": "TEDLIUM_release2",
25 "url": "http://www.openslr.org/resources/19/TEDLIUM_release2.tar.gz",
26 "checksum": "93281b5fcaaae5c88671c9d000b443cb3c7ea3499ad12010b3934ca41a7b9c58",
27 "data_path": "",
28 "subset": "train",
29 "supported_subsets": ["train", "test", "dev"],
30 "dict": "TEDLIUM.152k.dic",
31 },
32 "release3": {
33 "folder_in_archive": "TEDLIUM_release-3",
34 "url": "http://www.openslr.org/resources/51/TEDLIUM_release-3.tgz",
35 "checksum": "ad1e454d14d1ad550bc2564c462d87c7a7ec83d4dc2b9210f22ab4973b9eccdb",
36 "data_path": "data/",
37 "subset": None,
38 "supported_subsets": [None],
39 "dict": "TEDLIUM.152k.dic",
40 },
41 }
42
43
44 class TEDLIUM(Dataset):
45 """
46 Create a Dataset for Tedlium. It supports releases 1,2 and 3.
47
48 Args:
49 root (str): Path to the directory where the dataset is found or downloaded.
50 release (str, optional): Release version.
51 Allowed values are ``"release1"``, ``"release2"`` or ``"release3"``.
52 (default: ``"release1"``).
53 subset (str, optional): The subset of dataset to use. Valid options are ``"train"``, ``"dev"``,
54 and ``"test"`` for releases 1&2, ``None`` for release3. Defaults to ``"train"`` or ``None``.
55 download (bool, optional):
56 Whether to download the dataset if it is not found at root path. (default: ``False``).
57 """
58 def __init__(
59 self, root: str, release: str = "release1", subset: str = None, download: bool = False, audio_ext=".sph"
60 ) -> None:
61 self._ext_audio = audio_ext
62 if release in _RELEASE_CONFIGS.keys():
63 folder_in_archive = _RELEASE_CONFIGS[release]["folder_in_archive"]
64 url = _RELEASE_CONFIGS[release]["url"]
65 subset = subset if subset else _RELEASE_CONFIGS[release]["subset"]
66 else:
67 # Raise warning
68 raise RuntimeError(
69 "The release {} does not match any of the supported tedlium releases{} ".format(
70 release, _RELEASE_CONFIGS.keys(),
71 )
72 )
73 if subset not in _RELEASE_CONFIGS[release]["supported_subsets"]:
74 # Raise warning
75 raise RuntimeError(
76 "The subset {} does not match any of the supported tedlium subsets{} ".format(
77 subset, _RELEASE_CONFIGS[release]["supported_subsets"],
78 )
79 )
80
81 basename = os.path.basename(url)
82 archive = os.path.join(root, basename)
83
84 basename = basename.split(".")[0]
85
86 self._path = os.path.join(root, folder_in_archive, _RELEASE_CONFIGS[release]["data_path"])
87 if subset in ["train", "dev", "test"]:
88 self._path = os.path.join(self._path, subset)
89
90 if download:
91 if not os.path.isdir(self._path):
92 if not os.path.isfile(archive):
93 checksum = _RELEASE_CONFIGS[release]["checksum"]
94 download_url(url, root, hash_value=checksum)
95 extract_archive(archive)
96
97 # Create list for all samples
98 self._filelist = []
99 stm_path = os.path.join(self._path, "stm")
100 for file in sorted(os.listdir(stm_path)):
101 if file.endswith(".stm"):
102 stm_path = os.path.join(self._path, "stm", file)
103 with open(stm_path) as f:
104 l = len(f.readlines())
105 file = file.replace(".stm", "")
106 self._filelist.extend((file, line) for line in range(l))
107 # Create dict path for later read
108 self._dict_path = os.path.join(root, folder_in_archive, _RELEASE_CONFIGS[release]["dict"])
109 self._phoneme_dict = None
110
111 def _load_tedlium_item(self, fileid: str, line: int, path: str) -> Tuple[Tensor, int, str, int, int, int]:
112 """Loads a TEDLIUM dataset sample given a file name and corresponding sentence name.
113
114 Args:
115 fileid (str): File id to identify both text and audio files corresponding to the sample
116 line (int): Line identifier for the sample inside the text file
117 path (str): Dataset root path
118
119 Returns:
120 tuple: ``(waveform, sample_rate, transcript, talk_id, speaker_id, identifier)``
121 """
122 transcript_path = os.path.join(path, "stm", fileid)
123 with open(transcript_path + ".stm") as f:
124 transcript = f.readlines()[line]
125 talk_id, _, speaker_id, start_time, end_time, identifier, transcript = transcript.split(" ", 6)
126
127 wave_path = os.path.join(path, "sph", fileid)
128 waveform, sample_rate = self._load_audio(wave_path + self._ext_audio, start_time=start_time, end_time=end_time)
129
130 return (waveform, sample_rate, transcript, talk_id, speaker_id, identifier)
131
132 def _load_audio(self, path: str, start_time: float, end_time: float, sample_rate: int = 16000) -> [Tensor, int]:
133 """Default load function used in TEDLIUM dataset, you can overwrite this function to customize functionality
134 and load individual sentences from a full ted audio talk file.
135
136 Args:
137 path (str): Path to audio file
138 start_time (int, optional): Time in seconds where the sample sentence stars
139 end_time (int, optional): Time in seconds where the sample sentence finishes
140
141 Returns:
142 [Tensor, int]: Audio tensor representation and sample rate
143 """
144 start_time = int(float(start_time) * sample_rate)
145 end_time = int(float(end_time) * sample_rate)
146 if torchaudio.get_audio_backend() == "sox_io":
147 return torchaudio.load(path, frame_offset=start_time, num_frames=end_time - start_time)
148 return torchaudio.load(path)[:, start_time:end_time]
149
150 def __getitem__(self, n: int) -> Tuple[Tensor, int, str, int, int, int]:
151 """Load the n-th sample from the dataset.
152
153 Args:
154 n (int): The index of the sample to be loaded
155
156 Returns:
157 tuple: ``(waveform, sample_rate, transcript, talk_id, speaker_id, identifier)``
158 """
159 fileid, line = self._filelist[n]
160 return self._load_tedlium_item(fileid, line, self._path)
161
162 def __len__(self) -> int:
163 """TEDLIUM dataset custom function overwritting len default behaviour.
164
165 Returns:
166 int: TEDLIUM dataset length
167 """
168 return len(self._filelist)
169
170 @property
171 def phoneme_dict(self):
172 """dict[str, tuple[str]]: Phonemes. Mapping from word to tuple of phonemes.
173 Note that some words have empty phonemes.
174 """
175 # Read phoneme dictionary
176 if not self._phoneme_dict:
177 self._phoneme_dict = {}
178 with open(self._dict_path, "r", encoding="utf-8") as f:
179 for line in f.readlines():
180 content = line.strip().split()
181 self._phoneme_dict[content[0]] = tuple(content[1:]) # content[1:] can be empty list
182 return self._phoneme_dict.copy()
183
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/torchaudio/datasets/tedlium.py b/torchaudio/datasets/tedlium.py
--- a/torchaudio/datasets/tedlium.py
+++ b/torchaudio/datasets/tedlium.py
@@ -143,9 +143,14 @@
"""
start_time = int(float(start_time) * sample_rate)
end_time = int(float(end_time) * sample_rate)
- if torchaudio.get_audio_backend() == "sox_io":
- return torchaudio.load(path, frame_offset=start_time, num_frames=end_time - start_time)
- return torchaudio.load(path)[:, start_time:end_time]
+
+ backend = torchaudio.get_audio_backend()
+ if backend == "sox" or (backend == "soundfile" and torchaudio.USE_SOUNDFILE_LEGACY_INTERFACE):
+ kwargs = {"offset": start_time, "num_frames": end_time - start_time}
+ else:
+ kwargs = {"frame_offset": start_time, "num_frames": end_time - start_time}
+
+ return torchaudio.load(path, **kwargs)
def __getitem__(self, n: int) -> Tuple[Tensor, int, str, int, int, int]:
"""Load the n-th sample from the dataset.
|
{"golden_diff": "diff --git a/torchaudio/datasets/tedlium.py b/torchaudio/datasets/tedlium.py\n--- a/torchaudio/datasets/tedlium.py\n+++ b/torchaudio/datasets/tedlium.py\n@@ -143,9 +143,14 @@\n \"\"\"\n start_time = int(float(start_time) * sample_rate)\n end_time = int(float(end_time) * sample_rate)\n- if torchaudio.get_audio_backend() == \"sox_io\":\n- return torchaudio.load(path, frame_offset=start_time, num_frames=end_time - start_time)\n- return torchaudio.load(path)[:, start_time:end_time]\n+\n+ backend = torchaudio.get_audio_backend()\n+ if backend == \"sox\" or (backend == \"soundfile\" and torchaudio.USE_SOUNDFILE_LEGACY_INTERFACE):\n+ kwargs = {\"offset\": start_time, \"num_frames\": end_time - start_time}\n+ else:\n+ kwargs = {\"frame_offset\": start_time, \"num_frames\": end_time - start_time}\n+\n+ return torchaudio.load(path, **kwargs)\n \n def __getitem__(self, n: int) -> Tuple[Tensor, int, str, int, int, int]:\n \"\"\"Load the n-th sample from the dataset.\n", "issue": "tedlium test failing in windows on gpu in CircleCI\ntedlium test failing in windows on gpu, see [here](https://app.circleci.com/pipelines/github/pytorch/audio/3455/workflows/dda668b7-25c3-48e9-bccc-d8a2c7fd6338/jobs/100751) from [here](https://app.circleci.com/pipelines/github/pytorch/audio/3455/workflows/dda668b7-25c3-48e9-bccc-d8a2c7fd6338). This may have been obscured by #929.\r\n\r\n```\r\nself = <torchaudio_unittest.datasets.tedlium_test.TestTedlium testMethod=test_tedlium_release1>\r\n\r\n def test_tedlium_release1(self):\r\n release = \"release1\"\r\n dataset = tedlium.TEDLIUM(self.root_dir, release=release)\r\n num_samples = 0\r\n> for i, (data, sample_rate, transcript, talk_id, speaker_id, identifier) in enumerate(dataset):\r\n\r\ntorchaudio_unittest\\datasets\\tedlium_test.py:99: \r\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\r\n..\\env\\lib\\site-packages\\torchaudio-0.7.0a0+3250d3d-py3.8.egg\\torchaudio\\datasets\\tedlium.py:184: in __getitem__\r\n return self._load_tedlium_item(fileid, line, self._path)\r\n..\\env\\lib\\site-packages\\torchaudio-0.7.0a0+3250d3d-py3.8.egg\\torchaudio\\datasets\\tedlium.py:151: in _load_tedlium_item\r\n waveform, sample_rate = self._load_audio(wave_path + self._ext_audio, start_time=start_time, end_time=end_time)\r\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\r\n\r\nself = <torchaudio.datasets.tedlium.TEDLIUM object at 0x000002883EC0CF70>\r\npath = 'C:\\\\Users\\\\circleci\\\\AppData\\\\Local\\\\Temp\\\\tmpzvh9jdr_\\\\tedlium\\\\TEDLIUM_release1\\\\train\\\\sph\\\\release1.sph'\r\nstart_time = 0, end_time = 32000, sample_rate = 16000\r\n\r\n def _load_audio(self, path: str, start_time: float, end_time: float, sample_rate: int = 16000) -> [Tensor, int]:\r\n \"\"\"Default load function used in TEDLIUM dataset, you can overwrite this function to customize functionality\r\n and load individual sentences from a full ted audio talk file.\r\n \r\n Args:\r\n path (str): Path to audio file\r\n start_time (int, optional): Time in seconds where the sample sentence stars\r\n end_time (int, optional): Time in seconds where the sample sentence finishes\r\n \r\n Returns:\r\n [Tensor, int]: Audio tensor representation and sample rate\r\n \"\"\"\r\n start_time = int(float(start_time) * sample_rate)\r\n end_time = int(float(end_time) * sample_rate)\r\n if torchaudio.get_audio_backend() == \"sox_io\":\r\n return torchaudio.load(path, frame_offset=start_time, num_frames=end_time - start_time)\r\n> return torchaudio.load(path)[:, start_time:end_time]\r\nE TypeError: tuple indices must be integers or slices, not tuple\r\n\r\n..\\env\\lib\\site-packages\\torchaudio-0.7.0a0+3250d3d-py3.8.egg\\torchaudio\\datasets\\tedlium.py:171: TypeError\r\n```\r\n\r\nThoughts?\n", "before_files": [{"content": "import os\nfrom typing import Tuple\n\nimport torchaudio\nfrom torch import Tensor\nfrom torch.utils.data import Dataset\nfrom torchaudio.datasets.utils import (\n download_url,\n extract_archive,\n)\n\n\n_RELEASE_CONFIGS = {\n \"release1\": {\n \"folder_in_archive\": \"TEDLIUM_release1\",\n \"url\": \"http://www.openslr.org/resources/7/TEDLIUM_release1.tar.gz\",\n \"checksum\": \"30301975fd8c5cac4040c261c0852f57cfa8adbbad2ce78e77e4986957445f27\",\n \"data_path\": \"\",\n \"subset\": \"train\",\n \"supported_subsets\": [\"train\", \"test\", \"dev\"],\n \"dict\": \"TEDLIUM.150K.dic\",\n },\n \"release2\": {\n \"folder_in_archive\": \"TEDLIUM_release2\",\n \"url\": \"http://www.openslr.org/resources/19/TEDLIUM_release2.tar.gz\",\n \"checksum\": \"93281b5fcaaae5c88671c9d000b443cb3c7ea3499ad12010b3934ca41a7b9c58\",\n \"data_path\": \"\",\n \"subset\": \"train\",\n \"supported_subsets\": [\"train\", \"test\", \"dev\"],\n \"dict\": \"TEDLIUM.152k.dic\",\n },\n \"release3\": {\n \"folder_in_archive\": \"TEDLIUM_release-3\",\n \"url\": \"http://www.openslr.org/resources/51/TEDLIUM_release-3.tgz\",\n \"checksum\": \"ad1e454d14d1ad550bc2564c462d87c7a7ec83d4dc2b9210f22ab4973b9eccdb\",\n \"data_path\": \"data/\",\n \"subset\": None,\n \"supported_subsets\": [None],\n \"dict\": \"TEDLIUM.152k.dic\",\n },\n}\n\n\nclass TEDLIUM(Dataset):\n \"\"\"\n Create a Dataset for Tedlium. It supports releases 1,2 and 3.\n\n Args:\n root (str): Path to the directory where the dataset is found or downloaded.\n release (str, optional): Release version.\n Allowed values are ``\"release1\"``, ``\"release2\"`` or ``\"release3\"``.\n (default: ``\"release1\"``).\n subset (str, optional): The subset of dataset to use. Valid options are ``\"train\"``, ``\"dev\"``,\n and ``\"test\"`` for releases 1&2, ``None`` for release3. Defaults to ``\"train\"`` or ``None``.\n download (bool, optional):\n Whether to download the dataset if it is not found at root path. (default: ``False``).\n \"\"\"\n def __init__(\n self, root: str, release: str = \"release1\", subset: str = None, download: bool = False, audio_ext=\".sph\"\n ) -> None:\n self._ext_audio = audio_ext\n if release in _RELEASE_CONFIGS.keys():\n folder_in_archive = _RELEASE_CONFIGS[release][\"folder_in_archive\"]\n url = _RELEASE_CONFIGS[release][\"url\"]\n subset = subset if subset else _RELEASE_CONFIGS[release][\"subset\"]\n else:\n # Raise warning\n raise RuntimeError(\n \"The release {} does not match any of the supported tedlium releases{} \".format(\n release, _RELEASE_CONFIGS.keys(),\n )\n )\n if subset not in _RELEASE_CONFIGS[release][\"supported_subsets\"]:\n # Raise warning\n raise RuntimeError(\n \"The subset {} does not match any of the supported tedlium subsets{} \".format(\n subset, _RELEASE_CONFIGS[release][\"supported_subsets\"],\n )\n )\n\n basename = os.path.basename(url)\n archive = os.path.join(root, basename)\n\n basename = basename.split(\".\")[0]\n\n self._path = os.path.join(root, folder_in_archive, _RELEASE_CONFIGS[release][\"data_path\"])\n if subset in [\"train\", \"dev\", \"test\"]:\n self._path = os.path.join(self._path, subset)\n\n if download:\n if not os.path.isdir(self._path):\n if not os.path.isfile(archive):\n checksum = _RELEASE_CONFIGS[release][\"checksum\"]\n download_url(url, root, hash_value=checksum)\n extract_archive(archive)\n\n # Create list for all samples\n self._filelist = []\n stm_path = os.path.join(self._path, \"stm\")\n for file in sorted(os.listdir(stm_path)):\n if file.endswith(\".stm\"):\n stm_path = os.path.join(self._path, \"stm\", file)\n with open(stm_path) as f:\n l = len(f.readlines())\n file = file.replace(\".stm\", \"\")\n self._filelist.extend((file, line) for line in range(l))\n # Create dict path for later read\n self._dict_path = os.path.join(root, folder_in_archive, _RELEASE_CONFIGS[release][\"dict\"])\n self._phoneme_dict = None\n\n def _load_tedlium_item(self, fileid: str, line: int, path: str) -> Tuple[Tensor, int, str, int, int, int]:\n \"\"\"Loads a TEDLIUM dataset sample given a file name and corresponding sentence name.\n\n Args:\n fileid (str): File id to identify both text and audio files corresponding to the sample\n line (int): Line identifier for the sample inside the text file\n path (str): Dataset root path\n\n Returns:\n tuple: ``(waveform, sample_rate, transcript, talk_id, speaker_id, identifier)``\n \"\"\"\n transcript_path = os.path.join(path, \"stm\", fileid)\n with open(transcript_path + \".stm\") as f:\n transcript = f.readlines()[line]\n talk_id, _, speaker_id, start_time, end_time, identifier, transcript = transcript.split(\" \", 6)\n\n wave_path = os.path.join(path, \"sph\", fileid)\n waveform, sample_rate = self._load_audio(wave_path + self._ext_audio, start_time=start_time, end_time=end_time)\n\n return (waveform, sample_rate, transcript, talk_id, speaker_id, identifier)\n\n def _load_audio(self, path: str, start_time: float, end_time: float, sample_rate: int = 16000) -> [Tensor, int]:\n \"\"\"Default load function used in TEDLIUM dataset, you can overwrite this function to customize functionality\n and load individual sentences from a full ted audio talk file.\n\n Args:\n path (str): Path to audio file\n start_time (int, optional): Time in seconds where the sample sentence stars\n end_time (int, optional): Time in seconds where the sample sentence finishes\n\n Returns:\n [Tensor, int]: Audio tensor representation and sample rate\n \"\"\"\n start_time = int(float(start_time) * sample_rate)\n end_time = int(float(end_time) * sample_rate)\n if torchaudio.get_audio_backend() == \"sox_io\":\n return torchaudio.load(path, frame_offset=start_time, num_frames=end_time - start_time)\n return torchaudio.load(path)[:, start_time:end_time]\n\n def __getitem__(self, n: int) -> Tuple[Tensor, int, str, int, int, int]:\n \"\"\"Load the n-th sample from the dataset.\n\n Args:\n n (int): The index of the sample to be loaded\n\n Returns:\n tuple: ``(waveform, sample_rate, transcript, talk_id, speaker_id, identifier)``\n \"\"\"\n fileid, line = self._filelist[n]\n return self._load_tedlium_item(fileid, line, self._path)\n\n def __len__(self) -> int:\n \"\"\"TEDLIUM dataset custom function overwritting len default behaviour.\n\n Returns:\n int: TEDLIUM dataset length\n \"\"\"\n return len(self._filelist)\n\n @property\n def phoneme_dict(self):\n \"\"\"dict[str, tuple[str]]: Phonemes. Mapping from word to tuple of phonemes.\n Note that some words have empty phonemes.\n \"\"\"\n # Read phoneme dictionary\n if not self._phoneme_dict:\n self._phoneme_dict = {}\n with open(self._dict_path, \"r\", encoding=\"utf-8\") as f:\n for line in f.readlines():\n content = line.strip().split()\n self._phoneme_dict[content[0]] = tuple(content[1:]) # content[1:] can be empty list\n return self._phoneme_dict.copy()\n", "path": "torchaudio/datasets/tedlium.py"}], "after_files": [{"content": "import os\nfrom typing import Tuple\n\nimport torchaudio\nfrom torch import Tensor\nfrom torch.utils.data import Dataset\nfrom torchaudio.datasets.utils import (\n download_url,\n extract_archive,\n)\n\n\n_RELEASE_CONFIGS = {\n \"release1\": {\n \"folder_in_archive\": \"TEDLIUM_release1\",\n \"url\": \"http://www.openslr.org/resources/7/TEDLIUM_release1.tar.gz\",\n \"checksum\": \"30301975fd8c5cac4040c261c0852f57cfa8adbbad2ce78e77e4986957445f27\",\n \"data_path\": \"\",\n \"subset\": \"train\",\n \"supported_subsets\": [\"train\", \"test\", \"dev\"],\n \"dict\": \"TEDLIUM.150K.dic\",\n },\n \"release2\": {\n \"folder_in_archive\": \"TEDLIUM_release2\",\n \"url\": \"http://www.openslr.org/resources/19/TEDLIUM_release2.tar.gz\",\n \"checksum\": \"93281b5fcaaae5c88671c9d000b443cb3c7ea3499ad12010b3934ca41a7b9c58\",\n \"data_path\": \"\",\n \"subset\": \"train\",\n \"supported_subsets\": [\"train\", \"test\", \"dev\"],\n \"dict\": \"TEDLIUM.152k.dic\",\n },\n \"release3\": {\n \"folder_in_archive\": \"TEDLIUM_release-3\",\n \"url\": \"http://www.openslr.org/resources/51/TEDLIUM_release-3.tgz\",\n \"checksum\": \"ad1e454d14d1ad550bc2564c462d87c7a7ec83d4dc2b9210f22ab4973b9eccdb\",\n \"data_path\": \"data/\",\n \"subset\": None,\n \"supported_subsets\": [None],\n \"dict\": \"TEDLIUM.152k.dic\",\n },\n}\n\n\nclass TEDLIUM(Dataset):\n \"\"\"\n Create a Dataset for Tedlium. It supports releases 1,2 and 3.\n\n Args:\n root (str): Path to the directory where the dataset is found or downloaded.\n release (str, optional): Release version.\n Allowed values are ``\"release1\"``, ``\"release2\"`` or ``\"release3\"``.\n (default: ``\"release1\"``).\n subset (str, optional): The subset of dataset to use. Valid options are ``\"train\"``, ``\"dev\"``,\n and ``\"test\"`` for releases 1&2, ``None`` for release3. Defaults to ``\"train\"`` or ``None``.\n download (bool, optional):\n Whether to download the dataset if it is not found at root path. (default: ``False``).\n \"\"\"\n def __init__(\n self, root: str, release: str = \"release1\", subset: str = None, download: bool = False, audio_ext=\".sph\"\n ) -> None:\n self._ext_audio = audio_ext\n if release in _RELEASE_CONFIGS.keys():\n folder_in_archive = _RELEASE_CONFIGS[release][\"folder_in_archive\"]\n url = _RELEASE_CONFIGS[release][\"url\"]\n subset = subset if subset else _RELEASE_CONFIGS[release][\"subset\"]\n else:\n # Raise warning\n raise RuntimeError(\n \"The release {} does not match any of the supported tedlium releases{} \".format(\n release, _RELEASE_CONFIGS.keys(),\n )\n )\n if subset not in _RELEASE_CONFIGS[release][\"supported_subsets\"]:\n # Raise warning\n raise RuntimeError(\n \"The subset {} does not match any of the supported tedlium subsets{} \".format(\n subset, _RELEASE_CONFIGS[release][\"supported_subsets\"],\n )\n )\n\n basename = os.path.basename(url)\n archive = os.path.join(root, basename)\n\n basename = basename.split(\".\")[0]\n\n self._path = os.path.join(root, folder_in_archive, _RELEASE_CONFIGS[release][\"data_path\"])\n if subset in [\"train\", \"dev\", \"test\"]:\n self._path = os.path.join(self._path, subset)\n\n if download:\n if not os.path.isdir(self._path):\n if not os.path.isfile(archive):\n checksum = _RELEASE_CONFIGS[release][\"checksum\"]\n download_url(url, root, hash_value=checksum)\n extract_archive(archive)\n\n # Create list for all samples\n self._filelist = []\n stm_path = os.path.join(self._path, \"stm\")\n for file in sorted(os.listdir(stm_path)):\n if file.endswith(\".stm\"):\n stm_path = os.path.join(self._path, \"stm\", file)\n with open(stm_path) as f:\n l = len(f.readlines())\n file = file.replace(\".stm\", \"\")\n self._filelist.extend((file, line) for line in range(l))\n # Create dict path for later read\n self._dict_path = os.path.join(root, folder_in_archive, _RELEASE_CONFIGS[release][\"dict\"])\n self._phoneme_dict = None\n\n def _load_tedlium_item(self, fileid: str, line: int, path: str) -> Tuple[Tensor, int, str, int, int, int]:\n \"\"\"Loads a TEDLIUM dataset sample given a file name and corresponding sentence name.\n\n Args:\n fileid (str): File id to identify both text and audio files corresponding to the sample\n line (int): Line identifier for the sample inside the text file\n path (str): Dataset root path\n\n Returns:\n tuple: ``(waveform, sample_rate, transcript, talk_id, speaker_id, identifier)``\n \"\"\"\n transcript_path = os.path.join(path, \"stm\", fileid)\n with open(transcript_path + \".stm\") as f:\n transcript = f.readlines()[line]\n talk_id, _, speaker_id, start_time, end_time, identifier, transcript = transcript.split(\" \", 6)\n\n wave_path = os.path.join(path, \"sph\", fileid)\n waveform, sample_rate = self._load_audio(wave_path + self._ext_audio, start_time=start_time, end_time=end_time)\n\n return (waveform, sample_rate, transcript, talk_id, speaker_id, identifier)\n\n def _load_audio(self, path: str, start_time: float, end_time: float, sample_rate: int = 16000) -> [Tensor, int]:\n \"\"\"Default load function used in TEDLIUM dataset, you can overwrite this function to customize functionality\n and load individual sentences from a full ted audio talk file.\n\n Args:\n path (str): Path to audio file\n start_time (int, optional): Time in seconds where the sample sentence stars\n end_time (int, optional): Time in seconds where the sample sentence finishes\n\n Returns:\n [Tensor, int]: Audio tensor representation and sample rate\n \"\"\"\n start_time = int(float(start_time) * sample_rate)\n end_time = int(float(end_time) * sample_rate)\n\n backend = torchaudio.get_audio_backend()\n if backend == \"sox\" or (backend == \"soundfile\" and torchaudio.USE_SOUNDFILE_LEGACY_INTERFACE):\n kwargs = {\"offset\": start_time, \"num_frames\": end_time - start_time}\n else:\n kwargs = {\"frame_offset\": start_time, \"num_frames\": end_time - start_time}\n\n return torchaudio.load(path, **kwargs)\n\n def __getitem__(self, n: int) -> Tuple[Tensor, int, str, int, int, int]:\n \"\"\"Load the n-th sample from the dataset.\n\n Args:\n n (int): The index of the sample to be loaded\n\n Returns:\n tuple: ``(waveform, sample_rate, transcript, talk_id, speaker_id, identifier)``\n \"\"\"\n fileid, line = self._filelist[n]\n return self._load_tedlium_item(fileid, line, self._path)\n\n def __len__(self) -> int:\n \"\"\"TEDLIUM dataset custom function overwritting len default behaviour.\n\n Returns:\n int: TEDLIUM dataset length\n \"\"\"\n return len(self._filelist)\n\n @property\n def phoneme_dict(self):\n \"\"\"dict[str, tuple[str]]: Phonemes. Mapping from word to tuple of phonemes.\n Note that some words have empty phonemes.\n \"\"\"\n # Read phoneme dictionary\n if not self._phoneme_dict:\n self._phoneme_dict = {}\n with open(self._dict_path, \"r\", encoding=\"utf-8\") as f:\n for line in f.readlines():\n content = line.strip().split()\n self._phoneme_dict[content[0]] = tuple(content[1:]) # content[1:] can be empty list\n return self._phoneme_dict.copy()\n", "path": "torchaudio/datasets/tedlium.py"}]}
| 3,578 | 291 |
gh_patches_debug_9946
|
rasdani/github-patches
|
git_diff
|
google__flax-365
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Syntax warning due to comparison of literals using is in Python 3.8
### Problem you have encountered:
Syntax warning due to comparison of literals using is in Python 3.8 to use != .
### Steps to reproduce:
```
find . -iname '*.py' | grep -v example | grep -v doc | xargs -P4 -I{} python3.8 -Wall -m py_compile {}
./flax/optim/weight_norm.py:150: SyntaxWarning: "is not" with a literal. Did you mean "!="?
if decay is not 0:
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `flax/optim/weight_norm.py`
Content:
```
1 # Copyright 2020 The Flax Authors.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 # Lint as: python3
16 from typing import Any
17
18 from .. import struct
19
20 import jax
21 import jax.numpy as jnp
22
23 import numpy as onp
24
25 from .base import OptimizerDef
26
27
28 @struct.dataclass
29 class _WeightNormHyperParams:
30 inner: Any
31 wn_decay: onp.ndarray
32 wn_eps: onp.ndarray
33
34
35 @struct.dataclass
36 class _WeightNormParamState:
37 direction_state: Any
38 scale_state: Any
39 mult: onp.ndarray
40
41
42 class WeightNorm(OptimizerDef):
43 """Adds weight normalization to an optimizer def.
44
45 See https://arxiv.org/abs/1602.07868
46 """
47
48 def __init__(self, wrapped_optimizer, wn_decay=0, wn_eps=1e-8):
49 """Constructor for a WeightNorm optimizer.
50
51 Weight vectors are decomposed as w = g * v/||v||_2, for scalar
52 scale parameter g, and raw weight vector v. The original optimizer is then
53 applied to the (g,v) parameterization and the updated parameters are
54 transformed back to w-space, i.e.
55 w,state --> (g,v) --(original optimizer)--> (g',v') --> w',state'
56
57 We assume the output axis of any kernel matrix is the last one,
58 as per the Tensorflow convention.
59
60 Args:
61 wrapped_optimizer: another OptimizerDef
62 wn_decay: apply l2 decay to the unnoralized weight vector
63 wn_eps: additive constant for stability of
64 the normalization (default: 1e-8).
65 """
66 hps = _WeightNormHyperParams(
67 wrapped_optimizer.hyper_params, wn_decay, wn_eps)
68 super().__init__(hps)
69 self.wrapped_optimizer = wrapped_optimizer
70
71 def update_hyper_params(self, **hyper_param_overrides):
72 decay = hyper_param_overrides.pop('wn_decay', self.hyper_params.wn_decay)
73 eps = hyper_param_overrides.pop('wn_eps', self.hyper_params.wn_eps)
74 inner = self.wrapped_optimizer.update_hyper_params(
75 **hyper_param_overrides)
76 return self.hyper_params.replace(inner=inner, wn_decay=decay, wn_eps=eps)
77
78 def init_state(self, params):
79 leaves, treedef = jax.tree_flatten(params)
80 directions, scales = zip(*(self._split_param(p) for p in leaves))
81 directions = treedef.unflatten(directions)
82 scales = treedef.unflatten(scales)
83 wn_params = {'direction': directions, 'scale': scales}
84 state = self.wrapped_optimizer.init_state(wn_params)
85 direction_state = state.param_states['direction']
86 scale_state = state.param_states['scale']
87 param_states = jax.tree_multimap(
88 lambda _, *args: _WeightNormParamState(*args),
89 params, direction_state, scale_state, scales)
90 return state.replace(param_states=param_states)
91
92 def apply_gradient(self, hyper_params, params, state, grads):
93 p_leaves, treedef = jax.tree_flatten(params)
94 s_leaves = treedef.flatten_up_to(state.param_states)
95 g_leaves = treedef.flatten_up_to(grads)
96 split_grads = zip(*(self._split_grad(p, s, g, hyper_params.wn_decay)
97 for p, s, g in zip(p_leaves, s_leaves, g_leaves)))
98 d_p, d_s, d_g, s_p, s_s, s_g = [
99 jax.tree_unflatten(treedef, x) for x in split_grads]
100 wn_params = {'direction': d_p, 'scale': s_p}
101 wn_state = {'direction': d_s, 'scale': s_s}
102 wn_grads = {'direction': d_g, 'scale': s_g}
103 new_wn_params, new_state = self.wrapped_optimizer.apply_gradient(
104 hyper_params.inner, wn_params,
105 state.replace(param_states=wn_state), wn_grads)
106
107 directions = treedef.flatten_up_to(new_wn_params['direction'])
108 scales = treedef.flatten_up_to(new_wn_params['scale'])
109 new_params, mults = zip(*(self._merge_param(d, s, hyper_params.wn_eps)
110 for d, s in zip(directions, scales)))
111 new_params = jax.tree_unflatten(treedef, new_params)
112 mults = jax.tree_unflatten(treedef, mults)
113
114 direction_state = new_state.param_states['direction']
115 scale_state = new_state.param_states['scale']
116 param_states = jax.tree_multimap(
117 lambda _, *args: _WeightNormParamState(*args),
118 params, direction_state, scale_state, mults)
119 return new_params, new_state.replace(param_states=param_states)
120
121 def _split_param(self, param):
122 if param.size > param.shape[-1]:
123 scale = jnp.sqrt(jnp.square(param).sum(
124 tuple(range(param.ndim-1)), keepdims=True))
125 direction = param / scale
126 return direction, scale
127 else:
128 return param, ()
129
130 def _merge_param(self, direction, scale, eps):
131 if direction.size > direction.shape[-1]:
132 norm = jnp.sqrt(jnp.square(direction).sum(
133 tuple(range(direction.ndim - 1)), keepdims=True))
134 mult = scale / (eps + norm)
135 param = direction * mult
136 return param, mult
137 else:
138 return direction, ()
139
140 def _split_grad(self, param, state, grad, decay):
141 """Split the gradient for the direction and scale."""
142 if param.size > param.shape[-1]:
143 red_dims = tuple(range(param.ndim-1))
144 direction = param / state.mult
145 norm = jnp.sqrt(jnp.square(param).sum(red_dims, keepdims=True))
146 scale = norm * jnp.sign(state.mult)
147 scale_grad = jnp.sum(
148 grad * direction, axis=red_dims, keepdims=True)
149 direction_grad = state.mult * (grad - scale_grad * direction)
150 if decay is not 0:
151 direction_grad = direction_grad + decay * direction
152 direction_info = direction, state.direction_state, direction_grad
153 scale_info = scale, state.scale_state, scale_grad
154 return direction_info + scale_info
155 else:
156 return (param, state.direction_state, grad, (), (), ())
157
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/flax/optim/weight_norm.py b/flax/optim/weight_norm.py
--- a/flax/optim/weight_norm.py
+++ b/flax/optim/weight_norm.py
@@ -147,7 +147,7 @@
scale_grad = jnp.sum(
grad * direction, axis=red_dims, keepdims=True)
direction_grad = state.mult * (grad - scale_grad * direction)
- if decay is not 0:
+ if decay != 0:
direction_grad = direction_grad + decay * direction
direction_info = direction, state.direction_state, direction_grad
scale_info = scale, state.scale_state, scale_grad
|
{"golden_diff": "diff --git a/flax/optim/weight_norm.py b/flax/optim/weight_norm.py\n--- a/flax/optim/weight_norm.py\n+++ b/flax/optim/weight_norm.py\n@@ -147,7 +147,7 @@\n scale_grad = jnp.sum(\n grad * direction, axis=red_dims, keepdims=True)\n direction_grad = state.mult * (grad - scale_grad * direction)\n- if decay is not 0:\n+ if decay != 0:\n direction_grad = direction_grad + decay * direction\n direction_info = direction, state.direction_state, direction_grad\n scale_info = scale, state.scale_state, scale_grad\n", "issue": "Syntax warning due to comparison of literals using is in Python 3.8\n### Problem you have encountered:\r\n\r\nSyntax warning due to comparison of literals using is in Python 3.8 to use != .\r\n\r\n### Steps to reproduce:\r\n\r\n```\r\nfind . -iname '*.py' | grep -v example | grep -v doc | xargs -P4 -I{} python3.8 -Wall -m py_compile {}\r\n./flax/optim/weight_norm.py:150: SyntaxWarning: \"is not\" with a literal. Did you mean \"!=\"?\r\n if decay is not 0:\r\n```\n", "before_files": [{"content": "# Copyright 2020 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# Lint as: python3\nfrom typing import Any\n\nfrom .. import struct\n\nimport jax\nimport jax.numpy as jnp\n\nimport numpy as onp\n\nfrom .base import OptimizerDef\n\n\[email protected]\nclass _WeightNormHyperParams:\n inner: Any\n wn_decay: onp.ndarray\n wn_eps: onp.ndarray\n\n\[email protected]\nclass _WeightNormParamState:\n direction_state: Any\n scale_state: Any\n mult: onp.ndarray\n\n\nclass WeightNorm(OptimizerDef):\n \"\"\"Adds weight normalization to an optimizer def.\n\n See https://arxiv.org/abs/1602.07868\n \"\"\"\n\n def __init__(self, wrapped_optimizer, wn_decay=0, wn_eps=1e-8):\n \"\"\"Constructor for a WeightNorm optimizer.\n\n Weight vectors are decomposed as w = g * v/||v||_2, for scalar\n scale parameter g, and raw weight vector v. The original optimizer is then\n applied to the (g,v) parameterization and the updated parameters are\n transformed back to w-space, i.e.\n w,state --> (g,v) --(original optimizer)--> (g',v') --> w',state'\n\n We assume the output axis of any kernel matrix is the last one,\n as per the Tensorflow convention.\n\n Args:\n wrapped_optimizer: another OptimizerDef\n wn_decay: apply l2 decay to the unnoralized weight vector\n wn_eps: additive constant for stability of\n the normalization (default: 1e-8).\n \"\"\"\n hps = _WeightNormHyperParams(\n wrapped_optimizer.hyper_params, wn_decay, wn_eps)\n super().__init__(hps)\n self.wrapped_optimizer = wrapped_optimizer\n\n def update_hyper_params(self, **hyper_param_overrides):\n decay = hyper_param_overrides.pop('wn_decay', self.hyper_params.wn_decay)\n eps = hyper_param_overrides.pop('wn_eps', self.hyper_params.wn_eps)\n inner = self.wrapped_optimizer.update_hyper_params(\n **hyper_param_overrides)\n return self.hyper_params.replace(inner=inner, wn_decay=decay, wn_eps=eps)\n\n def init_state(self, params):\n leaves, treedef = jax.tree_flatten(params)\n directions, scales = zip(*(self._split_param(p) for p in leaves))\n directions = treedef.unflatten(directions)\n scales = treedef.unflatten(scales)\n wn_params = {'direction': directions, 'scale': scales}\n state = self.wrapped_optimizer.init_state(wn_params)\n direction_state = state.param_states['direction']\n scale_state = state.param_states['scale']\n param_states = jax.tree_multimap(\n lambda _, *args: _WeightNormParamState(*args),\n params, direction_state, scale_state, scales)\n return state.replace(param_states=param_states)\n\n def apply_gradient(self, hyper_params, params, state, grads):\n p_leaves, treedef = jax.tree_flatten(params)\n s_leaves = treedef.flatten_up_to(state.param_states)\n g_leaves = treedef.flatten_up_to(grads)\n split_grads = zip(*(self._split_grad(p, s, g, hyper_params.wn_decay)\n for p, s, g in zip(p_leaves, s_leaves, g_leaves)))\n d_p, d_s, d_g, s_p, s_s, s_g = [\n jax.tree_unflatten(treedef, x) for x in split_grads]\n wn_params = {'direction': d_p, 'scale': s_p}\n wn_state = {'direction': d_s, 'scale': s_s}\n wn_grads = {'direction': d_g, 'scale': s_g}\n new_wn_params, new_state = self.wrapped_optimizer.apply_gradient(\n hyper_params.inner, wn_params,\n state.replace(param_states=wn_state), wn_grads)\n\n directions = treedef.flatten_up_to(new_wn_params['direction'])\n scales = treedef.flatten_up_to(new_wn_params['scale'])\n new_params, mults = zip(*(self._merge_param(d, s, hyper_params.wn_eps)\n for d, s in zip(directions, scales)))\n new_params = jax.tree_unflatten(treedef, new_params)\n mults = jax.tree_unflatten(treedef, mults)\n\n direction_state = new_state.param_states['direction']\n scale_state = new_state.param_states['scale']\n param_states = jax.tree_multimap(\n lambda _, *args: _WeightNormParamState(*args),\n params, direction_state, scale_state, mults)\n return new_params, new_state.replace(param_states=param_states)\n\n def _split_param(self, param):\n if param.size > param.shape[-1]:\n scale = jnp.sqrt(jnp.square(param).sum(\n tuple(range(param.ndim-1)), keepdims=True))\n direction = param / scale\n return direction, scale\n else:\n return param, ()\n\n def _merge_param(self, direction, scale, eps):\n if direction.size > direction.shape[-1]:\n norm = jnp.sqrt(jnp.square(direction).sum(\n tuple(range(direction.ndim - 1)), keepdims=True))\n mult = scale / (eps + norm)\n param = direction * mult\n return param, mult\n else:\n return direction, ()\n\n def _split_grad(self, param, state, grad, decay):\n \"\"\"Split the gradient for the direction and scale.\"\"\"\n if param.size > param.shape[-1]:\n red_dims = tuple(range(param.ndim-1))\n direction = param / state.mult\n norm = jnp.sqrt(jnp.square(param).sum(red_dims, keepdims=True))\n scale = norm * jnp.sign(state.mult)\n scale_grad = jnp.sum(\n grad * direction, axis=red_dims, keepdims=True)\n direction_grad = state.mult * (grad - scale_grad * direction)\n if decay is not 0:\n direction_grad = direction_grad + decay * direction\n direction_info = direction, state.direction_state, direction_grad\n scale_info = scale, state.scale_state, scale_grad\n return direction_info + scale_info\n else:\n return (param, state.direction_state, grad, (), (), ())\n", "path": "flax/optim/weight_norm.py"}], "after_files": [{"content": "# Copyright 2020 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# Lint as: python3\nfrom typing import Any\n\nfrom .. import struct\n\nimport jax\nimport jax.numpy as jnp\n\nimport numpy as onp\n\nfrom .base import OptimizerDef\n\n\[email protected]\nclass _WeightNormHyperParams:\n inner: Any\n wn_decay: onp.ndarray\n wn_eps: onp.ndarray\n\n\[email protected]\nclass _WeightNormParamState:\n direction_state: Any\n scale_state: Any\n mult: onp.ndarray\n\n\nclass WeightNorm(OptimizerDef):\n \"\"\"Adds weight normalization to an optimizer def.\n\n See https://arxiv.org/abs/1602.07868\n \"\"\"\n\n def __init__(self, wrapped_optimizer, wn_decay=0, wn_eps=1e-8):\n \"\"\"Constructor for a WeightNorm optimizer.\n\n Weight vectors are decomposed as w = g * v/||v||_2, for scalar\n scale parameter g, and raw weight vector v. The original optimizer is then\n applied to the (g,v) parameterization and the updated parameters are\n transformed back to w-space, i.e.\n w,state --> (g,v) --(original optimizer)--> (g',v') --> w',state'\n\n We assume the output axis of any kernel matrix is the last one,\n as per the Tensorflow convention.\n\n Args:\n wrapped_optimizer: another OptimizerDef\n wn_decay: apply l2 decay to the unnoralized weight vector\n wn_eps: additive constant for stability of\n the normalization (default: 1e-8).\n \"\"\"\n hps = _WeightNormHyperParams(\n wrapped_optimizer.hyper_params, wn_decay, wn_eps)\n super().__init__(hps)\n self.wrapped_optimizer = wrapped_optimizer\n\n def update_hyper_params(self, **hyper_param_overrides):\n decay = hyper_param_overrides.pop('wn_decay', self.hyper_params.wn_decay)\n eps = hyper_param_overrides.pop('wn_eps', self.hyper_params.wn_eps)\n inner = self.wrapped_optimizer.update_hyper_params(\n **hyper_param_overrides)\n return self.hyper_params.replace(inner=inner, wn_decay=decay, wn_eps=eps)\n\n def init_state(self, params):\n leaves, treedef = jax.tree_flatten(params)\n directions, scales = zip(*(self._split_param(p) for p in leaves))\n directions = treedef.unflatten(directions)\n scales = treedef.unflatten(scales)\n wn_params = {'direction': directions, 'scale': scales}\n state = self.wrapped_optimizer.init_state(wn_params)\n direction_state = state.param_states['direction']\n scale_state = state.param_states['scale']\n param_states = jax.tree_multimap(\n lambda _, *args: _WeightNormParamState(*args),\n params, direction_state, scale_state, scales)\n return state.replace(param_states=param_states)\n\n def apply_gradient(self, hyper_params, params, state, grads):\n p_leaves, treedef = jax.tree_flatten(params)\n s_leaves = treedef.flatten_up_to(state.param_states)\n g_leaves = treedef.flatten_up_to(grads)\n split_grads = zip(*(self._split_grad(p, s, g, hyper_params.wn_decay)\n for p, s, g in zip(p_leaves, s_leaves, g_leaves)))\n d_p, d_s, d_g, s_p, s_s, s_g = [\n jax.tree_unflatten(treedef, x) for x in split_grads]\n wn_params = {'direction': d_p, 'scale': s_p}\n wn_state = {'direction': d_s, 'scale': s_s}\n wn_grads = {'direction': d_g, 'scale': s_g}\n new_wn_params, new_state = self.wrapped_optimizer.apply_gradient(\n hyper_params.inner, wn_params,\n state.replace(param_states=wn_state), wn_grads)\n\n directions = treedef.flatten_up_to(new_wn_params['direction'])\n scales = treedef.flatten_up_to(new_wn_params['scale'])\n new_params, mults = zip(*(self._merge_param(d, s, hyper_params.wn_eps)\n for d, s in zip(directions, scales)))\n new_params = jax.tree_unflatten(treedef, new_params)\n mults = jax.tree_unflatten(treedef, mults)\n\n direction_state = new_state.param_states['direction']\n scale_state = new_state.param_states['scale']\n param_states = jax.tree_multimap(\n lambda _, *args: _WeightNormParamState(*args),\n params, direction_state, scale_state, mults)\n return new_params, new_state.replace(param_states=param_states)\n\n def _split_param(self, param):\n if param.size > param.shape[-1]:\n scale = jnp.sqrt(jnp.square(param).sum(\n tuple(range(param.ndim-1)), keepdims=True))\n direction = param / scale\n return direction, scale\n else:\n return param, ()\n\n def _merge_param(self, direction, scale, eps):\n if direction.size > direction.shape[-1]:\n norm = jnp.sqrt(jnp.square(direction).sum(\n tuple(range(direction.ndim - 1)), keepdims=True))\n mult = scale / (eps + norm)\n param = direction * mult\n return param, mult\n else:\n return direction, ()\n\n def _split_grad(self, param, state, grad, decay):\n \"\"\"Split the gradient for the direction and scale.\"\"\"\n if param.size > param.shape[-1]:\n red_dims = tuple(range(param.ndim-1))\n direction = param / state.mult\n norm = jnp.sqrt(jnp.square(param).sum(red_dims, keepdims=True))\n scale = norm * jnp.sign(state.mult)\n scale_grad = jnp.sum(\n grad * direction, axis=red_dims, keepdims=True)\n direction_grad = state.mult * (grad - scale_grad * direction)\n if decay != 0:\n direction_grad = direction_grad + decay * direction\n direction_info = direction, state.direction_state, direction_grad\n scale_info = scale, state.scale_state, scale_grad\n return direction_info + scale_info\n else:\n return (param, state.direction_state, grad, (), (), ())\n", "path": "flax/optim/weight_norm.py"}]}
| 2,268 | 149 |
gh_patches_debug_14013
|
rasdani/github-patches
|
git_diff
|
mitmproxy__mitmproxy-2991
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Keybinding ‘M’ erroneously highlighted
##### Steps to reproduce the problem:
1. run `mitmproxy`
2. type `O` to open the options editor
3. move the cursor down to `default_contentview` and press Enter
4. type `4` to select `json`
5. type `q` to return to the flows list
6. the status bar now says “[M:json]” with the ‘M’ highlighted, as if it were a shortcut
7. however, typing `M` invokes “Toggle viewing marked flows” instead
##### Any other comments? What have you tried so far?
This is similar to #2953.
##### System information
Mitmproxy: 3.0.3 binary
Python: 3.5.2
OpenSSL: OpenSSL 1.1.0g 2 Nov 2017
Platform: Linux-4.4.0-116-generic-x86_64-with-debian-stretch-sid
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mitmproxy/tools/console/statusbar.py`
Content:
```
1 import os.path
2
3 import urwid
4
5 from mitmproxy.tools.console import common
6 from mitmproxy.tools.console import signals
7 from mitmproxy.tools.console import commandexecutor
8 import mitmproxy.tools.console.master # noqa
9 from mitmproxy.tools.console.commander import commander
10
11
12 class PromptPath:
13 def __init__(self, callback, args):
14 self.callback, self.args = callback, args
15
16 def __call__(self, pth):
17 if not pth:
18 return
19 pth = os.path.expanduser(pth)
20 try:
21 return self.callback(pth, *self.args)
22 except IOError as v:
23 signals.status_message.send(message=v.strerror)
24
25
26 class PromptStub:
27 def __init__(self, callback, args):
28 self.callback, self.args = callback, args
29
30 def __call__(self, txt):
31 return self.callback(txt, *self.args)
32
33
34 class ActionBar(urwid.WidgetWrap):
35
36 def __init__(self, master):
37 self.master = master
38 urwid.WidgetWrap.__init__(self, None)
39 self.clear()
40 signals.status_message.connect(self.sig_message)
41 signals.status_prompt.connect(self.sig_prompt)
42 signals.status_prompt_onekey.connect(self.sig_prompt_onekey)
43 signals.status_prompt_command.connect(self.sig_prompt_command)
44
45 self.prompting = None
46
47 self.onekey = False
48
49 def sig_message(self, sender, message, expire=1):
50 if self.prompting:
51 return
52 cols, _ = self.master.ui.get_cols_rows()
53 w = urwid.Text(self.shorten_message(message, cols))
54 self._w = w
55 if expire:
56 def cb(*args):
57 if w == self._w:
58 self.clear()
59 signals.call_in.send(seconds=expire, callback=cb)
60
61 def prep_prompt(self, p):
62 return p.strip() + ": "
63
64 def shorten_message(self, msg, max_width):
65 """
66 Shorten message so that it fits into a single line in the statusbar.
67 """
68 if isinstance(msg, tuple):
69 disp_attr, msg_text = msg
70 elif isinstance(msg, str):
71 disp_attr, msg_text = None, msg
72 else:
73 return msg
74 msg_end = "\u2026" # unicode ellipsis for the end of shortened message
75 prompt = "(more in eventlog)"
76
77 msg_lines = msg_text.split("\n")
78 first_line = msg_lines[0]
79 if len(msg_lines) > 1:
80 # First line of messages with a few lines must end with prompt.
81 line_length = len(first_line) + len(prompt)
82 else:
83 line_length = len(first_line)
84
85 if line_length > max_width:
86 shortening_index = max(0, max_width - len(prompt) - len(msg_end))
87 first_line = first_line[:shortening_index] + msg_end
88 else:
89 if len(msg_lines) == 1:
90 prompt = ""
91
92 return [(disp_attr, first_line), ("warn", prompt)]
93
94 def sig_prompt(self, sender, prompt, text, callback, args=()):
95 signals.focus.send(self, section="footer")
96 self._w = urwid.Edit(self.prep_prompt(prompt), text or "")
97 self.prompting = PromptStub(callback, args)
98
99 def sig_prompt_command(self, sender, partial=""):
100 signals.focus.send(self, section="footer")
101 self._w = commander.CommandEdit(self.master, partial)
102 self.prompting = commandexecutor.CommandExecutor(self.master)
103
104 def sig_prompt_onekey(self, sender, prompt, keys, callback, args=()):
105 """
106 Keys are a set of (word, key) tuples. The appropriate key in the
107 word is highlighted.
108 """
109 signals.focus.send(self, section="footer")
110 prompt = [prompt, " ("]
111 mkup = []
112 for i, e in enumerate(keys):
113 mkup.extend(common.highlight_key(e[0], e[1]))
114 if i < len(keys) - 1:
115 mkup.append(",")
116 prompt.extend(mkup)
117 prompt.append(")? ")
118 self.onekey = set(i[1] for i in keys)
119 self._w = urwid.Edit(prompt, "")
120 self.prompting = PromptStub(callback, args)
121
122 def selectable(self):
123 return True
124
125 def keypress(self, size, k):
126 if self.prompting:
127 if k == "esc":
128 self.prompt_done()
129 elif self.onekey:
130 if k == "enter":
131 self.prompt_done()
132 elif k in self.onekey:
133 self.prompt_execute(k)
134 elif k == "enter":
135 self.prompt_execute(self._w.get_edit_text())
136 else:
137 if common.is_keypress(k):
138 self._w.keypress(size, k)
139 else:
140 return k
141
142 def clear(self):
143 self._w = urwid.Text("")
144 self.prompting = None
145
146 def prompt_done(self):
147 self.prompting = None
148 self.onekey = False
149 signals.status_message.send(message="")
150 signals.focus.send(self, section="body")
151
152 def prompt_execute(self, txt):
153 p = self.prompting
154 self.prompt_done()
155 msg = p(txt)
156 if msg:
157 signals.status_message.send(message=msg, expire=1)
158
159
160 class StatusBar(urwid.WidgetWrap):
161 keyctx = ""
162
163 def __init__(
164 self, master: "mitmproxy.tools.console.master.ConsoleMaster"
165 ) -> None:
166 self.master = master
167 self.ib = urwid.WidgetWrap(urwid.Text(""))
168 self.ab = ActionBar(self.master)
169 super().__init__(urwid.Pile([self.ib, self.ab]))
170 signals.update_settings.connect(self.sig_update)
171 signals.flowlist_change.connect(self.sig_update)
172 master.options.changed.connect(self.sig_update)
173 master.view.focus.sig_change.connect(self.sig_update)
174 master.view.sig_view_add.connect(self.sig_update)
175 self.redraw()
176
177 def sig_update(self, sender, flow=None, updated=None):
178 self.redraw()
179
180 def keypress(self, *args, **kwargs):
181 return self.ab.keypress(*args, **kwargs)
182
183 def get_status(self):
184 r = []
185
186 sreplay = self.master.addons.get("serverplayback")
187 creplay = self.master.addons.get("clientplayback")
188
189 if len(self.master.options.setheaders):
190 r.append("[")
191 r.append(("heading_key", "H"))
192 r.append("eaders]")
193 if len(self.master.options.replacements):
194 r.append("[")
195 r.append(("heading_key", "R"))
196 r.append("eplacing]")
197 if creplay.count():
198 r.append("[")
199 r.append(("heading_key", "cplayback"))
200 r.append(":%s]" % creplay.count())
201 if sreplay.count():
202 r.append("[")
203 r.append(("heading_key", "splayback"))
204 r.append(":%s]" % sreplay.count())
205 if self.master.options.ignore_hosts:
206 r.append("[")
207 r.append(("heading_key", "I"))
208 r.append("gnore:%d]" % len(self.master.options.ignore_hosts))
209 if self.master.options.tcp_hosts:
210 r.append("[")
211 r.append(("heading_key", "T"))
212 r.append("CP:%d]" % len(self.master.options.tcp_hosts))
213 if self.master.options.intercept:
214 r.append("[")
215 if not self.master.options.intercept_active:
216 r.append("X")
217 r.append(("heading_key", "i"))
218 r.append(":%s]" % self.master.options.intercept)
219 if self.master.options.view_filter:
220 r.append("[")
221 r.append(("heading_key", "f"))
222 r.append(":%s]" % self.master.options.view_filter)
223 if self.master.options.stickycookie:
224 r.append("[")
225 r.append(("heading_key", "t"))
226 r.append(":%s]" % self.master.options.stickycookie)
227 if self.master.options.stickyauth:
228 r.append("[")
229 r.append(("heading_key", "u"))
230 r.append(":%s]" % self.master.options.stickyauth)
231 if self.master.options.console_default_contentview != "auto":
232 r.append("[")
233 r.append(("heading_key", "M"))
234 r.append(":%s]" % self.master.options.console_default_contentview)
235 if self.master.options.has_changed("view_order"):
236 r.append("[")
237 r.append(("heading_key", "o"))
238 r.append(":%s]" % self.master.options.view_order)
239
240 opts = []
241 if self.master.options.anticache:
242 opts.append("anticache")
243 if self.master.options.anticomp:
244 opts.append("anticomp")
245 if self.master.options.showhost:
246 opts.append("showhost")
247 if not self.master.options.server_replay_refresh:
248 opts.append("norefresh")
249 if self.master.options.server_replay_kill_extra:
250 opts.append("killextra")
251 if not self.master.options.upstream_cert:
252 opts.append("no-upstream-cert")
253 if self.master.options.console_focus_follow:
254 opts.append("following")
255 if self.master.options.stream_large_bodies:
256 opts.append(self.master.options.stream_large_bodies)
257
258 if opts:
259 r.append("[%s]" % (":".join(opts)))
260
261 if self.master.options.mode != "regular":
262 r.append("[%s]" % self.master.options.mode)
263 if self.master.options.scripts:
264 r.append("[scripts:%s]" % len(self.master.options.scripts))
265
266 if self.master.options.save_stream_file:
267 r.append("[W:%s]" % self.master.options.save_stream_file)
268
269 return r
270
271 def redraw(self):
272 fc = len(self.master.view)
273 if self.master.view.focus.flow is None:
274 offset = 0
275 else:
276 offset = self.master.view.focus.index + 1
277
278 if self.master.options.view_order_reversed:
279 arrow = common.SYMBOL_UP
280 else:
281 arrow = common.SYMBOL_DOWN
282
283 marked = ""
284 if self.master.view.show_marked:
285 marked = "M"
286
287 t = [
288 ('heading', ("%s %s [%s/%s]" % (arrow, marked, offset, fc)).ljust(11)),
289 ]
290
291 if self.master.options.server:
292 host = self.master.options.listen_host
293 if host == "0.0.0.0" or host == "":
294 host = "*"
295 boundaddr = "[%s:%s]" % (host, self.master.options.listen_port)
296 else:
297 boundaddr = ""
298 t.extend(self.get_status())
299 status = urwid.AttrWrap(urwid.Columns([
300 urwid.Text(t),
301 urwid.Text(boundaddr, align="right"),
302 ]), "heading")
303 self.ib._w = status
304
305 def selectable(self):
306 return True
307
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/mitmproxy/tools/console/statusbar.py b/mitmproxy/tools/console/statusbar.py
--- a/mitmproxy/tools/console/statusbar.py
+++ b/mitmproxy/tools/console/statusbar.py
@@ -228,10 +228,8 @@
r.append("[")
r.append(("heading_key", "u"))
r.append(":%s]" % self.master.options.stickyauth)
- if self.master.options.console_default_contentview != "auto":
- r.append("[")
- r.append(("heading_key", "M"))
- r.append(":%s]" % self.master.options.console_default_contentview)
+ if self.master.options.console_default_contentview != 'auto':
+ r.append("[contentview:%s]" % (self.master.options.console_default_contentview))
if self.master.options.has_changed("view_order"):
r.append("[")
r.append(("heading_key", "o"))
|
{"golden_diff": "diff --git a/mitmproxy/tools/console/statusbar.py b/mitmproxy/tools/console/statusbar.py\n--- a/mitmproxy/tools/console/statusbar.py\n+++ b/mitmproxy/tools/console/statusbar.py\n@@ -228,10 +228,8 @@\n r.append(\"[\")\n r.append((\"heading_key\", \"u\"))\n r.append(\":%s]\" % self.master.options.stickyauth)\n- if self.master.options.console_default_contentview != \"auto\":\n- r.append(\"[\")\n- r.append((\"heading_key\", \"M\"))\n- r.append(\":%s]\" % self.master.options.console_default_contentview)\n+ if self.master.options.console_default_contentview != 'auto':\n+ r.append(\"[contentview:%s]\" % (self.master.options.console_default_contentview))\n if self.master.options.has_changed(\"view_order\"):\n r.append(\"[\")\n r.append((\"heading_key\", \"o\"))\n", "issue": "Keybinding \u2018M\u2019 erroneously highlighted\n##### Steps to reproduce the problem:\r\n\r\n1. run `mitmproxy`\r\n2. type `O` to open the options editor\r\n3. move the cursor down to `default_contentview` and press Enter\r\n4. type `4` to select `json`\r\n5. type `q` to return to the flows list\r\n6. the status bar now says \u201c[M:json]\u201d with the \u2018M\u2019 highlighted, as if it were a shortcut\r\n7. however, typing `M` invokes \u201cToggle viewing marked flows\u201d instead\r\n\r\n##### Any other comments? What have you tried so far?\r\n\r\nThis is similar to #2953.\r\n\r\n##### System information\r\n\r\nMitmproxy: 3.0.3 binary\r\nPython: 3.5.2\r\nOpenSSL: OpenSSL 1.1.0g 2 Nov 2017\r\nPlatform: Linux-4.4.0-116-generic-x86_64-with-debian-stretch-sid\r\n\n", "before_files": [{"content": "import os.path\n\nimport urwid\n\nfrom mitmproxy.tools.console import common\nfrom mitmproxy.tools.console import signals\nfrom mitmproxy.tools.console import commandexecutor\nimport mitmproxy.tools.console.master # noqa\nfrom mitmproxy.tools.console.commander import commander\n\n\nclass PromptPath:\n def __init__(self, callback, args):\n self.callback, self.args = callback, args\n\n def __call__(self, pth):\n if not pth:\n return\n pth = os.path.expanduser(pth)\n try:\n return self.callback(pth, *self.args)\n except IOError as v:\n signals.status_message.send(message=v.strerror)\n\n\nclass PromptStub:\n def __init__(self, callback, args):\n self.callback, self.args = callback, args\n\n def __call__(self, txt):\n return self.callback(txt, *self.args)\n\n\nclass ActionBar(urwid.WidgetWrap):\n\n def __init__(self, master):\n self.master = master\n urwid.WidgetWrap.__init__(self, None)\n self.clear()\n signals.status_message.connect(self.sig_message)\n signals.status_prompt.connect(self.sig_prompt)\n signals.status_prompt_onekey.connect(self.sig_prompt_onekey)\n signals.status_prompt_command.connect(self.sig_prompt_command)\n\n self.prompting = None\n\n self.onekey = False\n\n def sig_message(self, sender, message, expire=1):\n if self.prompting:\n return\n cols, _ = self.master.ui.get_cols_rows()\n w = urwid.Text(self.shorten_message(message, cols))\n self._w = w\n if expire:\n def cb(*args):\n if w == self._w:\n self.clear()\n signals.call_in.send(seconds=expire, callback=cb)\n\n def prep_prompt(self, p):\n return p.strip() + \": \"\n\n def shorten_message(self, msg, max_width):\n \"\"\"\n Shorten message so that it fits into a single line in the statusbar.\n \"\"\"\n if isinstance(msg, tuple):\n disp_attr, msg_text = msg\n elif isinstance(msg, str):\n disp_attr, msg_text = None, msg\n else:\n return msg\n msg_end = \"\\u2026\" # unicode ellipsis for the end of shortened message\n prompt = \"(more in eventlog)\"\n\n msg_lines = msg_text.split(\"\\n\")\n first_line = msg_lines[0]\n if len(msg_lines) > 1:\n # First line of messages with a few lines must end with prompt.\n line_length = len(first_line) + len(prompt)\n else:\n line_length = len(first_line)\n\n if line_length > max_width:\n shortening_index = max(0, max_width - len(prompt) - len(msg_end))\n first_line = first_line[:shortening_index] + msg_end\n else:\n if len(msg_lines) == 1:\n prompt = \"\"\n\n return [(disp_attr, first_line), (\"warn\", prompt)]\n\n def sig_prompt(self, sender, prompt, text, callback, args=()):\n signals.focus.send(self, section=\"footer\")\n self._w = urwid.Edit(self.prep_prompt(prompt), text or \"\")\n self.prompting = PromptStub(callback, args)\n\n def sig_prompt_command(self, sender, partial=\"\"):\n signals.focus.send(self, section=\"footer\")\n self._w = commander.CommandEdit(self.master, partial)\n self.prompting = commandexecutor.CommandExecutor(self.master)\n\n def sig_prompt_onekey(self, sender, prompt, keys, callback, args=()):\n \"\"\"\n Keys are a set of (word, key) tuples. The appropriate key in the\n word is highlighted.\n \"\"\"\n signals.focus.send(self, section=\"footer\")\n prompt = [prompt, \" (\"]\n mkup = []\n for i, e in enumerate(keys):\n mkup.extend(common.highlight_key(e[0], e[1]))\n if i < len(keys) - 1:\n mkup.append(\",\")\n prompt.extend(mkup)\n prompt.append(\")? \")\n self.onekey = set(i[1] for i in keys)\n self._w = urwid.Edit(prompt, \"\")\n self.prompting = PromptStub(callback, args)\n\n def selectable(self):\n return True\n\n def keypress(self, size, k):\n if self.prompting:\n if k == \"esc\":\n self.prompt_done()\n elif self.onekey:\n if k == \"enter\":\n self.prompt_done()\n elif k in self.onekey:\n self.prompt_execute(k)\n elif k == \"enter\":\n self.prompt_execute(self._w.get_edit_text())\n else:\n if common.is_keypress(k):\n self._w.keypress(size, k)\n else:\n return k\n\n def clear(self):\n self._w = urwid.Text(\"\")\n self.prompting = None\n\n def prompt_done(self):\n self.prompting = None\n self.onekey = False\n signals.status_message.send(message=\"\")\n signals.focus.send(self, section=\"body\")\n\n def prompt_execute(self, txt):\n p = self.prompting\n self.prompt_done()\n msg = p(txt)\n if msg:\n signals.status_message.send(message=msg, expire=1)\n\n\nclass StatusBar(urwid.WidgetWrap):\n keyctx = \"\"\n\n def __init__(\n self, master: \"mitmproxy.tools.console.master.ConsoleMaster\"\n ) -> None:\n self.master = master\n self.ib = urwid.WidgetWrap(urwid.Text(\"\"))\n self.ab = ActionBar(self.master)\n super().__init__(urwid.Pile([self.ib, self.ab]))\n signals.update_settings.connect(self.sig_update)\n signals.flowlist_change.connect(self.sig_update)\n master.options.changed.connect(self.sig_update)\n master.view.focus.sig_change.connect(self.sig_update)\n master.view.sig_view_add.connect(self.sig_update)\n self.redraw()\n\n def sig_update(self, sender, flow=None, updated=None):\n self.redraw()\n\n def keypress(self, *args, **kwargs):\n return self.ab.keypress(*args, **kwargs)\n\n def get_status(self):\n r = []\n\n sreplay = self.master.addons.get(\"serverplayback\")\n creplay = self.master.addons.get(\"clientplayback\")\n\n if len(self.master.options.setheaders):\n r.append(\"[\")\n r.append((\"heading_key\", \"H\"))\n r.append(\"eaders]\")\n if len(self.master.options.replacements):\n r.append(\"[\")\n r.append((\"heading_key\", \"R\"))\n r.append(\"eplacing]\")\n if creplay.count():\n r.append(\"[\")\n r.append((\"heading_key\", \"cplayback\"))\n r.append(\":%s]\" % creplay.count())\n if sreplay.count():\n r.append(\"[\")\n r.append((\"heading_key\", \"splayback\"))\n r.append(\":%s]\" % sreplay.count())\n if self.master.options.ignore_hosts:\n r.append(\"[\")\n r.append((\"heading_key\", \"I\"))\n r.append(\"gnore:%d]\" % len(self.master.options.ignore_hosts))\n if self.master.options.tcp_hosts:\n r.append(\"[\")\n r.append((\"heading_key\", \"T\"))\n r.append(\"CP:%d]\" % len(self.master.options.tcp_hosts))\n if self.master.options.intercept:\n r.append(\"[\")\n if not self.master.options.intercept_active:\n r.append(\"X\")\n r.append((\"heading_key\", \"i\"))\n r.append(\":%s]\" % self.master.options.intercept)\n if self.master.options.view_filter:\n r.append(\"[\")\n r.append((\"heading_key\", \"f\"))\n r.append(\":%s]\" % self.master.options.view_filter)\n if self.master.options.stickycookie:\n r.append(\"[\")\n r.append((\"heading_key\", \"t\"))\n r.append(\":%s]\" % self.master.options.stickycookie)\n if self.master.options.stickyauth:\n r.append(\"[\")\n r.append((\"heading_key\", \"u\"))\n r.append(\":%s]\" % self.master.options.stickyauth)\n if self.master.options.console_default_contentview != \"auto\":\n r.append(\"[\")\n r.append((\"heading_key\", \"M\"))\n r.append(\":%s]\" % self.master.options.console_default_contentview)\n if self.master.options.has_changed(\"view_order\"):\n r.append(\"[\")\n r.append((\"heading_key\", \"o\"))\n r.append(\":%s]\" % self.master.options.view_order)\n\n opts = []\n if self.master.options.anticache:\n opts.append(\"anticache\")\n if self.master.options.anticomp:\n opts.append(\"anticomp\")\n if self.master.options.showhost:\n opts.append(\"showhost\")\n if not self.master.options.server_replay_refresh:\n opts.append(\"norefresh\")\n if self.master.options.server_replay_kill_extra:\n opts.append(\"killextra\")\n if not self.master.options.upstream_cert:\n opts.append(\"no-upstream-cert\")\n if self.master.options.console_focus_follow:\n opts.append(\"following\")\n if self.master.options.stream_large_bodies:\n opts.append(self.master.options.stream_large_bodies)\n\n if opts:\n r.append(\"[%s]\" % (\":\".join(opts)))\n\n if self.master.options.mode != \"regular\":\n r.append(\"[%s]\" % self.master.options.mode)\n if self.master.options.scripts:\n r.append(\"[scripts:%s]\" % len(self.master.options.scripts))\n\n if self.master.options.save_stream_file:\n r.append(\"[W:%s]\" % self.master.options.save_stream_file)\n\n return r\n\n def redraw(self):\n fc = len(self.master.view)\n if self.master.view.focus.flow is None:\n offset = 0\n else:\n offset = self.master.view.focus.index + 1\n\n if self.master.options.view_order_reversed:\n arrow = common.SYMBOL_UP\n else:\n arrow = common.SYMBOL_DOWN\n\n marked = \"\"\n if self.master.view.show_marked:\n marked = \"M\"\n\n t = [\n ('heading', (\"%s %s [%s/%s]\" % (arrow, marked, offset, fc)).ljust(11)),\n ]\n\n if self.master.options.server:\n host = self.master.options.listen_host\n if host == \"0.0.0.0\" or host == \"\":\n host = \"*\"\n boundaddr = \"[%s:%s]\" % (host, self.master.options.listen_port)\n else:\n boundaddr = \"\"\n t.extend(self.get_status())\n status = urwid.AttrWrap(urwid.Columns([\n urwid.Text(t),\n urwid.Text(boundaddr, align=\"right\"),\n ]), \"heading\")\n self.ib._w = status\n\n def selectable(self):\n return True\n", "path": "mitmproxy/tools/console/statusbar.py"}], "after_files": [{"content": "import os.path\n\nimport urwid\n\nfrom mitmproxy.tools.console import common\nfrom mitmproxy.tools.console import signals\nfrom mitmproxy.tools.console import commandexecutor\nimport mitmproxy.tools.console.master # noqa\nfrom mitmproxy.tools.console.commander import commander\n\n\nclass PromptPath:\n def __init__(self, callback, args):\n self.callback, self.args = callback, args\n\n def __call__(self, pth):\n if not pth:\n return\n pth = os.path.expanduser(pth)\n try:\n return self.callback(pth, *self.args)\n except IOError as v:\n signals.status_message.send(message=v.strerror)\n\n\nclass PromptStub:\n def __init__(self, callback, args):\n self.callback, self.args = callback, args\n\n def __call__(self, txt):\n return self.callback(txt, *self.args)\n\n\nclass ActionBar(urwid.WidgetWrap):\n\n def __init__(self, master):\n self.master = master\n urwid.WidgetWrap.__init__(self, None)\n self.clear()\n signals.status_message.connect(self.sig_message)\n signals.status_prompt.connect(self.sig_prompt)\n signals.status_prompt_onekey.connect(self.sig_prompt_onekey)\n signals.status_prompt_command.connect(self.sig_prompt_command)\n\n self.prompting = None\n\n self.onekey = False\n\n def sig_message(self, sender, message, expire=1):\n if self.prompting:\n return\n cols, _ = self.master.ui.get_cols_rows()\n w = urwid.Text(self.shorten_message(message, cols))\n self._w = w\n if expire:\n def cb(*args):\n if w == self._w:\n self.clear()\n signals.call_in.send(seconds=expire, callback=cb)\n\n def prep_prompt(self, p):\n return p.strip() + \": \"\n\n def shorten_message(self, msg, max_width):\n \"\"\"\n Shorten message so that it fits into a single line in the statusbar.\n \"\"\"\n if isinstance(msg, tuple):\n disp_attr, msg_text = msg\n elif isinstance(msg, str):\n disp_attr, msg_text = None, msg\n else:\n return msg\n msg_end = \"\\u2026\" # unicode ellipsis for the end of shortened message\n prompt = \"(more in eventlog)\"\n\n msg_lines = msg_text.split(\"\\n\")\n first_line = msg_lines[0]\n if len(msg_lines) > 1:\n # First line of messages with a few lines must end with prompt.\n line_length = len(first_line) + len(prompt)\n else:\n line_length = len(first_line)\n\n if line_length > max_width:\n shortening_index = max(0, max_width - len(prompt) - len(msg_end))\n first_line = first_line[:shortening_index] + msg_end\n else:\n if len(msg_lines) == 1:\n prompt = \"\"\n\n return [(disp_attr, first_line), (\"warn\", prompt)]\n\n def sig_prompt(self, sender, prompt, text, callback, args=()):\n signals.focus.send(self, section=\"footer\")\n self._w = urwid.Edit(self.prep_prompt(prompt), text or \"\")\n self.prompting = PromptStub(callback, args)\n\n def sig_prompt_command(self, sender, partial=\"\"):\n signals.focus.send(self, section=\"footer\")\n self._w = commander.CommandEdit(self.master, partial)\n self.prompting = commandexecutor.CommandExecutor(self.master)\n\n def sig_prompt_onekey(self, sender, prompt, keys, callback, args=()):\n \"\"\"\n Keys are a set of (word, key) tuples. The appropriate key in the\n word is highlighted.\n \"\"\"\n signals.focus.send(self, section=\"footer\")\n prompt = [prompt, \" (\"]\n mkup = []\n for i, e in enumerate(keys):\n mkup.extend(common.highlight_key(e[0], e[1]))\n if i < len(keys) - 1:\n mkup.append(\",\")\n prompt.extend(mkup)\n prompt.append(\")? \")\n self.onekey = set(i[1] for i in keys)\n self._w = urwid.Edit(prompt, \"\")\n self.prompting = PromptStub(callback, args)\n\n def selectable(self):\n return True\n\n def keypress(self, size, k):\n if self.prompting:\n if k == \"esc\":\n self.prompt_done()\n elif self.onekey:\n if k == \"enter\":\n self.prompt_done()\n elif k in self.onekey:\n self.prompt_execute(k)\n elif k == \"enter\":\n self.prompt_execute(self._w.get_edit_text())\n else:\n if common.is_keypress(k):\n self._w.keypress(size, k)\n else:\n return k\n\n def clear(self):\n self._w = urwid.Text(\"\")\n self.prompting = None\n\n def prompt_done(self):\n self.prompting = None\n self.onekey = False\n signals.status_message.send(message=\"\")\n signals.focus.send(self, section=\"body\")\n\n def prompt_execute(self, txt):\n p = self.prompting\n self.prompt_done()\n msg = p(txt)\n if msg:\n signals.status_message.send(message=msg, expire=1)\n\n\nclass StatusBar(urwid.WidgetWrap):\n keyctx = \"\"\n\n def __init__(\n self, master: \"mitmproxy.tools.console.master.ConsoleMaster\"\n ) -> None:\n self.master = master\n self.ib = urwid.WidgetWrap(urwid.Text(\"\"))\n self.ab = ActionBar(self.master)\n super().__init__(urwid.Pile([self.ib, self.ab]))\n signals.update_settings.connect(self.sig_update)\n signals.flowlist_change.connect(self.sig_update)\n master.options.changed.connect(self.sig_update)\n master.view.focus.sig_change.connect(self.sig_update)\n master.view.sig_view_add.connect(self.sig_update)\n self.redraw()\n\n def sig_update(self, sender, flow=None, updated=None):\n self.redraw()\n\n def keypress(self, *args, **kwargs):\n return self.ab.keypress(*args, **kwargs)\n\n def get_status(self):\n r = []\n\n sreplay = self.master.addons.get(\"serverplayback\")\n creplay = self.master.addons.get(\"clientplayback\")\n\n if len(self.master.options.setheaders):\n r.append(\"[\")\n r.append((\"heading_key\", \"H\"))\n r.append(\"eaders]\")\n if len(self.master.options.replacements):\n r.append(\"[\")\n r.append((\"heading_key\", \"R\"))\n r.append(\"eplacing]\")\n if creplay.count():\n r.append(\"[\")\n r.append((\"heading_key\", \"cplayback\"))\n r.append(\":%s]\" % creplay.count())\n if sreplay.count():\n r.append(\"[\")\n r.append((\"heading_key\", \"splayback\"))\n r.append(\":%s]\" % sreplay.count())\n if self.master.options.ignore_hosts:\n r.append(\"[\")\n r.append((\"heading_key\", \"I\"))\n r.append(\"gnore:%d]\" % len(self.master.options.ignore_hosts))\n if self.master.options.tcp_hosts:\n r.append(\"[\")\n r.append((\"heading_key\", \"T\"))\n r.append(\"CP:%d]\" % len(self.master.options.tcp_hosts))\n if self.master.options.intercept:\n r.append(\"[\")\n if not self.master.options.intercept_active:\n r.append(\"X\")\n r.append((\"heading_key\", \"i\"))\n r.append(\":%s]\" % self.master.options.intercept)\n if self.master.options.view_filter:\n r.append(\"[\")\n r.append((\"heading_key\", \"f\"))\n r.append(\":%s]\" % self.master.options.view_filter)\n if self.master.options.stickycookie:\n r.append(\"[\")\n r.append((\"heading_key\", \"t\"))\n r.append(\":%s]\" % self.master.options.stickycookie)\n if self.master.options.stickyauth:\n r.append(\"[\")\n r.append((\"heading_key\", \"u\"))\n r.append(\":%s]\" % self.master.options.stickyauth)\n if self.master.options.console_default_contentview != 'auto':\n r.append(\"[contentview:%s]\" % (self.master.options.console_default_contentview))\n if self.master.options.has_changed(\"view_order\"):\n r.append(\"[\")\n r.append((\"heading_key\", \"o\"))\n r.append(\":%s]\" % self.master.options.view_order)\n\n opts = []\n if self.master.options.anticache:\n opts.append(\"anticache\")\n if self.master.options.anticomp:\n opts.append(\"anticomp\")\n if self.master.options.showhost:\n opts.append(\"showhost\")\n if not self.master.options.server_replay_refresh:\n opts.append(\"norefresh\")\n if self.master.options.server_replay_kill_extra:\n opts.append(\"killextra\")\n if not self.master.options.upstream_cert:\n opts.append(\"no-upstream-cert\")\n if self.master.options.console_focus_follow:\n opts.append(\"following\")\n if self.master.options.stream_large_bodies:\n opts.append(self.master.options.stream_large_bodies)\n\n if opts:\n r.append(\"[%s]\" % (\":\".join(opts)))\n\n if self.master.options.mode != \"regular\":\n r.append(\"[%s]\" % self.master.options.mode)\n if self.master.options.scripts:\n r.append(\"[scripts:%s]\" % len(self.master.options.scripts))\n\n if self.master.options.save_stream_file:\n r.append(\"[W:%s]\" % self.master.options.save_stream_file)\n\n return r\n\n def redraw(self):\n fc = len(self.master.view)\n if self.master.view.focus.flow is None:\n offset = 0\n else:\n offset = self.master.view.focus.index + 1\n\n if self.master.options.view_order_reversed:\n arrow = common.SYMBOL_UP\n else:\n arrow = common.SYMBOL_DOWN\n\n marked = \"\"\n if self.master.view.show_marked:\n marked = \"M\"\n\n t = [\n ('heading', (\"%s %s [%s/%s]\" % (arrow, marked, offset, fc)).ljust(11)),\n ]\n\n if self.master.options.server:\n host = self.master.options.listen_host\n if host == \"0.0.0.0\" or host == \"\":\n host = \"*\"\n boundaddr = \"[%s:%s]\" % (host, self.master.options.listen_port)\n else:\n boundaddr = \"\"\n t.extend(self.get_status())\n status = urwid.AttrWrap(urwid.Columns([\n urwid.Text(t),\n urwid.Text(boundaddr, align=\"right\"),\n ]), \"heading\")\n self.ib._w = status\n\n def selectable(self):\n return True\n", "path": "mitmproxy/tools/console/statusbar.py"}]}
| 3,625 | 197 |
gh_patches_debug_1134
|
rasdani/github-patches
|
git_diff
|
openstates__openstates-scrapers-2982
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
OR failing since at least 2019-06-09
OR has been failing since 2019-06-09
Based on automated runs it appears that OR has not run successfully in 2 days (2019-06-09).
```
loaded Open States pupa settings...
or (scrape, import)
bills: {}
votes: {}
08:01:13 CRITICAL pupa: Session(s) 2019-2020 Interim were reported by Oregon.get_session_list() but were not found in Oregon.legislative_sessions or Oregon.ignored_scraped_sessions.
```
Visit http://bobsled.openstates.org for more info.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `openstates/or/__init__.py`
Content:
```
1 from pupa.scrape import Jurisdiction, Organization
2 from .people import ORPersonScraper
3 # from .committees import ORCommitteeScraper
4 from .bills import ORBillScraper
5 from .votes import ORVoteScraper
6
7
8 class Oregon(Jurisdiction):
9 division_id = "ocd-division/country:us/state:or"
10 classification = "government"
11 name = "Oregon"
12 url = "https://olis.leg.state.or.us"
13 scrapers = {
14 'people': ORPersonScraper,
15 # 'committees': ORCommitteeScraper,
16 'bills': ORBillScraper,
17 'votes': ORVoteScraper
18 }
19 legislative_sessions = [
20 {
21 "_scraped_name": "2007 Regular Session",
22 "identifier": "2007 Regular Session",
23 "name": "2007 Regular Session"
24 },
25 {
26 "_scraped_name": "2008 Special Session",
27 "identifier": "2008 Special Session",
28 "name": "2008 Special Session"
29 },
30 {
31 "_scraped_name": "2009 Regular Session",
32 "identifier": "2009 Regular Session",
33 "name": "2009 Regular Session"
34 },
35 {
36 "_scraped_name": "2010 Special Session",
37 "identifier": "2012 Special Session",
38 "name": "2010 Special Session"
39 },
40 {
41 "_scraped_name": "2011 Regular Session",
42 "identifier": "2011 Regular Session",
43 "name": "2011 Regular Session"
44 },
45 {
46 "_scraped_name": "2012 Regular Session",
47 "identifier": "2012 Regular Session",
48 "name": "2012 Regular Session"
49 },
50 {
51 "_scraped_name": "2012 Special Session",
52 "identifier": "2012 Special Session",
53 "name": "2012 Speical Session"
54 },
55 {
56 "_scraped_name": "2013 Regular Session",
57 "identifier": "2013 Regular Session",
58 "name": "2013 Regular Session"
59 },
60 {
61 "_scraped_name": "2013 Special Session",
62 "identifier": "2013 Special Session",
63 "name": "2013 Special Session"
64 },
65 {
66 "_scraped_name": "2014 Regular Session",
67 "identifier": "2014 Regular Session",
68 "name": "2014 Regular Session"
69 },
70 {
71 "_scraped_name": "2015 Regular Session",
72 "identifier": "2015 Regular Session",
73 "name": "2015 Regular Session"
74 },
75 {
76 "_scraped_name": "2016 Regular Session",
77 "identifier": "2016 Regular Session",
78 "name": "2016 Regular Session"
79 },
80 {
81 "_scraped_name": "2017 Regular Session",
82 "end_date": "2017-07-10",
83 "identifier": "2017 Regular Session",
84 "name": "2017 Regular Session",
85 "start_date": "2017-02-01"
86 },
87 {
88 "_scraped_name": "2018 Regular Session",
89 "identifier": "2018 Regular Session",
90 "name": "2018 Regular Session",
91 "start_date": "2018-02-05",
92 "end_date": "2018-03-09",
93 },
94 {
95 "_scraped_name": "2018 1st Special Session",
96 "identifier": "2018 Special Session",
97 "name": "2018 Special Session",
98 "start_date": "2018-05-21",
99 "end_date": "2018-05-21",
100 },
101 {
102 "_scraped_name": "2019 Regular Session",
103 "identifier": "2019 Regular Session",
104 "name": "2019 Regular Session",
105 "start_date": "2019-01-22",
106 "end_date": "2019-06-30",
107 },
108 ]
109 ignored_scraped_sessions = [
110 "Today",
111 "2017-2018 Interim",
112 "2015-2016 Interim",
113 "2013 1st Special Session",
114 "2012 1st Special Session",
115 "2013 - 2014 Interim",
116 "2011 - 2012 Interim",
117 "2009 - 2010 Interim",
118 "2007 - 2008 Interim"
119 ]
120
121 def get_organizations(self):
122 legislature_name = "Oregon Legislative Assembly"
123
124 legislature = Organization(name=legislature_name,
125 classification="legislature")
126 upper = Organization('Senate', classification='upper',
127 parent_id=legislature._id)
128 lower = Organization('House', classification='lower',
129 parent_id=legislature._id)
130
131 yield legislature
132 yield upper
133 yield lower
134
135 def get_session_list(self):
136 from .apiclient import OregonLegislatorODataClient
137 sessions = OregonLegislatorODataClient(None).all_sessions()
138 sessions = [s['SessionName'] for s in sessions]
139 return sessions
140
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/openstates/or/__init__.py b/openstates/or/__init__.py
--- a/openstates/or/__init__.py
+++ b/openstates/or/__init__.py
@@ -108,6 +108,7 @@
]
ignored_scraped_sessions = [
"Today",
+ "2019-2020 Interim",
"2017-2018 Interim",
"2015-2016 Interim",
"2013 1st Special Session",
|
{"golden_diff": "diff --git a/openstates/or/__init__.py b/openstates/or/__init__.py\n--- a/openstates/or/__init__.py\n+++ b/openstates/or/__init__.py\n@@ -108,6 +108,7 @@\n ]\n ignored_scraped_sessions = [\n \"Today\",\n+ \"2019-2020 Interim\",\n \"2017-2018 Interim\",\n \"2015-2016 Interim\",\n \"2013 1st Special Session\",\n", "issue": "OR failing since at least 2019-06-09\nOR has been failing since 2019-06-09\n\nBased on automated runs it appears that OR has not run successfully in 2 days (2019-06-09).\n\n\n```\n loaded Open States pupa settings...\nor (scrape, import)\n bills: {}\n votes: {}\n08:01:13 CRITICAL pupa: Session(s) 2019-2020 Interim were reported by Oregon.get_session_list() but were not found in Oregon.legislative_sessions or Oregon.ignored_scraped_sessions.\n```\n\nVisit http://bobsled.openstates.org for more info.\n\n", "before_files": [{"content": "from pupa.scrape import Jurisdiction, Organization\nfrom .people import ORPersonScraper\n# from .committees import ORCommitteeScraper\nfrom .bills import ORBillScraper\nfrom .votes import ORVoteScraper\n\n\nclass Oregon(Jurisdiction):\n division_id = \"ocd-division/country:us/state:or\"\n classification = \"government\"\n name = \"Oregon\"\n url = \"https://olis.leg.state.or.us\"\n scrapers = {\n 'people': ORPersonScraper,\n # 'committees': ORCommitteeScraper,\n 'bills': ORBillScraper,\n 'votes': ORVoteScraper\n }\n legislative_sessions = [\n {\n \"_scraped_name\": \"2007 Regular Session\",\n \"identifier\": \"2007 Regular Session\",\n \"name\": \"2007 Regular Session\"\n },\n {\n \"_scraped_name\": \"2008 Special Session\",\n \"identifier\": \"2008 Special Session\",\n \"name\": \"2008 Special Session\"\n },\n {\n \"_scraped_name\": \"2009 Regular Session\",\n \"identifier\": \"2009 Regular Session\",\n \"name\": \"2009 Regular Session\"\n },\n {\n \"_scraped_name\": \"2010 Special Session\",\n \"identifier\": \"2012 Special Session\",\n \"name\": \"2010 Special Session\"\n },\n {\n \"_scraped_name\": \"2011 Regular Session\",\n \"identifier\": \"2011 Regular Session\",\n \"name\": \"2011 Regular Session\"\n },\n {\n \"_scraped_name\": \"2012 Regular Session\",\n \"identifier\": \"2012 Regular Session\",\n \"name\": \"2012 Regular Session\"\n },\n {\n \"_scraped_name\": \"2012 Special Session\",\n \"identifier\": \"2012 Special Session\",\n \"name\": \"2012 Speical Session\"\n },\n {\n \"_scraped_name\": \"2013 Regular Session\",\n \"identifier\": \"2013 Regular Session\",\n \"name\": \"2013 Regular Session\"\n },\n {\n \"_scraped_name\": \"2013 Special Session\",\n \"identifier\": \"2013 Special Session\",\n \"name\": \"2013 Special Session\"\n },\n {\n \"_scraped_name\": \"2014 Regular Session\",\n \"identifier\": \"2014 Regular Session\",\n \"name\": \"2014 Regular Session\"\n },\n {\n \"_scraped_name\": \"2015 Regular Session\",\n \"identifier\": \"2015 Regular Session\",\n \"name\": \"2015 Regular Session\"\n },\n {\n \"_scraped_name\": \"2016 Regular Session\",\n \"identifier\": \"2016 Regular Session\",\n \"name\": \"2016 Regular Session\"\n },\n {\n \"_scraped_name\": \"2017 Regular Session\",\n \"end_date\": \"2017-07-10\",\n \"identifier\": \"2017 Regular Session\",\n \"name\": \"2017 Regular Session\",\n \"start_date\": \"2017-02-01\"\n },\n {\n \"_scraped_name\": \"2018 Regular Session\",\n \"identifier\": \"2018 Regular Session\",\n \"name\": \"2018 Regular Session\",\n \"start_date\": \"2018-02-05\",\n \"end_date\": \"2018-03-09\",\n },\n {\n \"_scraped_name\": \"2018 1st Special Session\",\n \"identifier\": \"2018 Special Session\",\n \"name\": \"2018 Special Session\",\n \"start_date\": \"2018-05-21\",\n \"end_date\": \"2018-05-21\",\n },\n {\n \"_scraped_name\": \"2019 Regular Session\",\n \"identifier\": \"2019 Regular Session\",\n \"name\": \"2019 Regular Session\",\n \"start_date\": \"2019-01-22\",\n \"end_date\": \"2019-06-30\",\n },\n ]\n ignored_scraped_sessions = [\n \"Today\",\n \"2017-2018 Interim\",\n \"2015-2016 Interim\",\n \"2013 1st Special Session\",\n \"2012 1st Special Session\",\n \"2013 - 2014 Interim\",\n \"2011 - 2012 Interim\",\n \"2009 - 2010 Interim\",\n \"2007 - 2008 Interim\"\n ]\n\n def get_organizations(self):\n legislature_name = \"Oregon Legislative Assembly\"\n\n legislature = Organization(name=legislature_name,\n classification=\"legislature\")\n upper = Organization('Senate', classification='upper',\n parent_id=legislature._id)\n lower = Organization('House', classification='lower',\n parent_id=legislature._id)\n\n yield legislature\n yield upper\n yield lower\n\n def get_session_list(self):\n from .apiclient import OregonLegislatorODataClient\n sessions = OregonLegislatorODataClient(None).all_sessions()\n sessions = [s['SessionName'] for s in sessions]\n return sessions\n", "path": "openstates/or/__init__.py"}], "after_files": [{"content": "from pupa.scrape import Jurisdiction, Organization\nfrom .people import ORPersonScraper\n# from .committees import ORCommitteeScraper\nfrom .bills import ORBillScraper\nfrom .votes import ORVoteScraper\n\n\nclass Oregon(Jurisdiction):\n division_id = \"ocd-division/country:us/state:or\"\n classification = \"government\"\n name = \"Oregon\"\n url = \"https://olis.leg.state.or.us\"\n scrapers = {\n 'people': ORPersonScraper,\n # 'committees': ORCommitteeScraper,\n 'bills': ORBillScraper,\n 'votes': ORVoteScraper\n }\n legislative_sessions = [\n {\n \"_scraped_name\": \"2007 Regular Session\",\n \"identifier\": \"2007 Regular Session\",\n \"name\": \"2007 Regular Session\"\n },\n {\n \"_scraped_name\": \"2008 Special Session\",\n \"identifier\": \"2008 Special Session\",\n \"name\": \"2008 Special Session\"\n },\n {\n \"_scraped_name\": \"2009 Regular Session\",\n \"identifier\": \"2009 Regular Session\",\n \"name\": \"2009 Regular Session\"\n },\n {\n \"_scraped_name\": \"2010 Special Session\",\n \"identifier\": \"2012 Special Session\",\n \"name\": \"2010 Special Session\"\n },\n {\n \"_scraped_name\": \"2011 Regular Session\",\n \"identifier\": \"2011 Regular Session\",\n \"name\": \"2011 Regular Session\"\n },\n {\n \"_scraped_name\": \"2012 Regular Session\",\n \"identifier\": \"2012 Regular Session\",\n \"name\": \"2012 Regular Session\"\n },\n {\n \"_scraped_name\": \"2012 Special Session\",\n \"identifier\": \"2012 Special Session\",\n \"name\": \"2012 Speical Session\"\n },\n {\n \"_scraped_name\": \"2013 Regular Session\",\n \"identifier\": \"2013 Regular Session\",\n \"name\": \"2013 Regular Session\"\n },\n {\n \"_scraped_name\": \"2013 Special Session\",\n \"identifier\": \"2013 Special Session\",\n \"name\": \"2013 Special Session\"\n },\n {\n \"_scraped_name\": \"2014 Regular Session\",\n \"identifier\": \"2014 Regular Session\",\n \"name\": \"2014 Regular Session\"\n },\n {\n \"_scraped_name\": \"2015 Regular Session\",\n \"identifier\": \"2015 Regular Session\",\n \"name\": \"2015 Regular Session\"\n },\n {\n \"_scraped_name\": \"2016 Regular Session\",\n \"identifier\": \"2016 Regular Session\",\n \"name\": \"2016 Regular Session\"\n },\n {\n \"_scraped_name\": \"2017 Regular Session\",\n \"end_date\": \"2017-07-10\",\n \"identifier\": \"2017 Regular Session\",\n \"name\": \"2017 Regular Session\",\n \"start_date\": \"2017-02-01\"\n },\n {\n \"_scraped_name\": \"2018 Regular Session\",\n \"identifier\": \"2018 Regular Session\",\n \"name\": \"2018 Regular Session\",\n \"start_date\": \"2018-02-05\",\n \"end_date\": \"2018-03-09\",\n },\n {\n \"_scraped_name\": \"2018 1st Special Session\",\n \"identifier\": \"2018 Special Session\",\n \"name\": \"2018 Special Session\",\n \"start_date\": \"2018-05-21\",\n \"end_date\": \"2018-05-21\",\n },\n {\n \"_scraped_name\": \"2019 Regular Session\",\n \"identifier\": \"2019 Regular Session\",\n \"name\": \"2019 Regular Session\",\n \"start_date\": \"2019-01-22\",\n \"end_date\": \"2019-06-30\",\n },\n ]\n ignored_scraped_sessions = [\n \"Today\",\n \"2019-2020 Interim\",\n \"2017-2018 Interim\",\n \"2015-2016 Interim\",\n \"2013 1st Special Session\",\n \"2012 1st Special Session\",\n \"2013 - 2014 Interim\",\n \"2011 - 2012 Interim\",\n \"2009 - 2010 Interim\",\n \"2007 - 2008 Interim\"\n ]\n\n def get_organizations(self):\n legislature_name = \"Oregon Legislative Assembly\"\n\n legislature = Organization(name=legislature_name,\n classification=\"legislature\")\n upper = Organization('Senate', classification='upper',\n parent_id=legislature._id)\n lower = Organization('House', classification='lower',\n parent_id=legislature._id)\n\n yield legislature\n yield upper\n yield lower\n\n def get_session_list(self):\n from .apiclient import OregonLegislatorODataClient\n sessions = OregonLegislatorODataClient(None).all_sessions()\n sessions = [s['SessionName'] for s in sessions]\n return sessions\n", "path": "openstates/or/__init__.py"}]}
| 1,987 | 123 |
gh_patches_debug_16048
|
rasdani/github-patches
|
git_diff
|
pwndbg__pwndbg-473
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
TypeError exception raised on up/down commands.
### Description
When running the command `up` or `down` with an integer argument, the following exception is raised:
```
Traceback (most recent call last):
File "/home/david/.pwndbg/pwndbg/commands/__init__.py", line 109, in __call__
return self.function(*args, **kwargs)
File "/home/david/.pwndbg/pwndbg/commands/__init__.py", line 200, in _OnlyWhenRunning
return function(*a, **kw)
File "/home/david/.pwndbg/pwndbg/commands/ida.py", line 46, in up
for i in range(n):
TypeError: 'str' object cannot be interpreted as an integer
```
### Steps to reproduce
Open any binary and attempt to do `up 2` during debugging.
### My setup
pwndbg> version
Gdb: 7.12.0.20161007-git
Python: 3.6.5rc1 (default, Mar 14 2018, 06:54:23) [GCC 7.3.0]
Pwndbg: 1.0.0 build: f69b81e
Capstone: 4.0.1024
Unicorn: 1.0.1
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pwndbg/commands/ida.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 from __future__ import absolute_import
4 from __future__ import division
5 from __future__ import print_function
6 from __future__ import unicode_literals
7
8 import bz2
9 import datetime
10 import os
11
12 import gdb
13
14 import pwndbg.commands
15 import pwndbg.commands.context
16 import pwndbg.ida
17 import pwndbg.regs
18 from pwndbg.gdbutils.functions import GdbFunction
19
20
21 @pwndbg.commands.ParsedCommand
22 @pwndbg.commands.OnlyWhenRunning
23 @pwndbg.events.stop
24 @pwndbg.ida.withIDA
25 def j(*args):
26 """
27 Synchronize IDA's cursor with GDB
28 """
29 try:
30 pc = int(gdb.selected_frame().pc())
31 pwndbg.ida.Jump(pc)
32 except Exception:
33 pass
34
35
36
37 @pwndbg.commands.Command
38 @pwndbg.commands.OnlyWhenRunning
39 def up(n=1):
40 """
41 Select and print stack frame that called this one.
42 An argument says how many frames up to go.
43 """
44 f = gdb.selected_frame()
45
46 for i in range(n):
47 o = f.older()
48 if o:
49 o.select()
50
51 bt = pwndbg.commands.context.context_backtrace(with_banner=False)
52 print('\n'.join(bt))
53
54 j()
55
56
57 @pwndbg.commands.Command
58 @pwndbg.commands.OnlyWhenRunning
59 def down(n=1):
60 """
61 Select and print stack frame called by this one.
62 An argument says how many frames down to go.
63 """
64 f = gdb.selected_frame()
65
66 for i in range(n):
67 o = f.newer()
68 if o:
69 o.select()
70
71 bt = pwndbg.commands.context.context_backtrace(with_banner=False)
72 print('\n'.join(bt))
73
74 j()
75
76
77 @pwndbg.commands.Command
78 @pwndbg.ida.withIDA
79 def save_ida():
80 """Save the IDA database"""
81 if not pwndbg.ida.available():
82 return
83
84 path = pwndbg.ida.GetIdbPath()
85
86 # Need to handle emulated paths for Wine
87 if path.startswith('Z:'):
88 path = path[2:].replace('\\', '/')
89 pwndbg.ida.SaveBase(path)
90
91 basename = os.path.basename(path)
92 dirname = os.path.dirname(path)
93 backups = os.path.join(dirname, 'ida-backup')
94
95 if not os.path.isdir(backups):
96 os.mkdir(backups)
97
98 basename, ext = os.path.splitext(basename)
99 basename += '-%s' % datetime.datetime.now().isoformat()
100 basename += ext
101
102 # Windows doesn't like colons in paths
103 basename = basename.replace(':', '_')
104
105 full_path = os.path.join(backups, basename)
106
107 pwndbg.ida.SaveBase(full_path)
108
109 data = open(full_path, 'rb').read()
110
111 # Compress!
112 full_path_compressed = full_path + '.bz2'
113 bz2.BZ2File(full_path_compressed, 'w').write(data)
114
115 # Remove old version
116 os.unlink(full_path)
117
118 save_ida()
119
120
121 @GdbFunction()
122 def ida(name):
123
124 """Evaluate ida.LocByName() on the supplied value."""
125 name = name.string()
126 result = pwndbg.ida.LocByName(name)
127
128 if 0xffffe000 <= result <= 0xffffffff or 0xffffffffffffe000 <= result <= 0xffffffffffffffff:
129 raise ValueError("ida.LocByName(%r) == BADADDR" % name)
130
131 return result
132
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pwndbg/commands/ida.py b/pwndbg/commands/ida.py
--- a/pwndbg/commands/ida.py
+++ b/pwndbg/commands/ida.py
@@ -43,10 +43,10 @@
"""
f = gdb.selected_frame()
- for i in range(n):
- o = f.older()
- if o:
- o.select()
+ for i in range(int(n)):
+ if f.older():
+ f = f.older()
+ f.select()
bt = pwndbg.commands.context.context_backtrace(with_banner=False)
print('\n'.join(bt))
@@ -63,10 +63,10 @@
"""
f = gdb.selected_frame()
- for i in range(n):
- o = f.newer()
- if o:
- o.select()
+ for i in range(int(n)):
+ if f.newer():
+ f = f.newer()
+ f.select()
bt = pwndbg.commands.context.context_backtrace(with_banner=False)
print('\n'.join(bt))
|
{"golden_diff": "diff --git a/pwndbg/commands/ida.py b/pwndbg/commands/ida.py\n--- a/pwndbg/commands/ida.py\n+++ b/pwndbg/commands/ida.py\n@@ -43,10 +43,10 @@\n \"\"\"\n f = gdb.selected_frame()\n \n- for i in range(n):\n- o = f.older()\n- if o:\n- o.select()\n+ for i in range(int(n)):\n+ if f.older():\n+ f = f.older()\n+ f.select()\n \n bt = pwndbg.commands.context.context_backtrace(with_banner=False)\n print('\\n'.join(bt))\n@@ -63,10 +63,10 @@\n \"\"\"\n f = gdb.selected_frame()\n \n- for i in range(n):\n- o = f.newer()\n- if o:\n- o.select()\n+ for i in range(int(n)):\n+ if f.newer():\n+ f = f.newer()\n+ f.select()\n \n bt = pwndbg.commands.context.context_backtrace(with_banner=False)\n print('\\n'.join(bt))\n", "issue": "TypeError exception raised on up/down commands.\n### Description\r\n\r\nWhen running the command `up` or `down` with an integer argument, the following exception is raised:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"/home/david/.pwndbg/pwndbg/commands/__init__.py\", line 109, in __call__\r\n return self.function(*args, **kwargs)\r\n File \"/home/david/.pwndbg/pwndbg/commands/__init__.py\", line 200, in _OnlyWhenRunning\r\n return function(*a, **kw)\r\n File \"/home/david/.pwndbg/pwndbg/commands/ida.py\", line 46, in up\r\n for i in range(n):\r\nTypeError: 'str' object cannot be interpreted as an integer\r\n```\r\n\r\n### Steps to reproduce\r\n\r\nOpen any binary and attempt to do `up 2` during debugging.\r\n\r\n### My setup\r\n\r\npwndbg> version\r\nGdb: 7.12.0.20161007-git\r\nPython: 3.6.5rc1 (default, Mar 14 2018, 06:54:23) [GCC 7.3.0]\r\nPwndbg: 1.0.0 build: f69b81e\r\nCapstone: 4.0.1024\r\nUnicorn: 1.0.1\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nimport bz2\nimport datetime\nimport os\n\nimport gdb\n\nimport pwndbg.commands\nimport pwndbg.commands.context\nimport pwndbg.ida\nimport pwndbg.regs\nfrom pwndbg.gdbutils.functions import GdbFunction\n\n\[email protected]\[email protected]\[email protected]\[email protected]\ndef j(*args):\n \"\"\"\n Synchronize IDA's cursor with GDB\n \"\"\"\n try:\n pc = int(gdb.selected_frame().pc())\n pwndbg.ida.Jump(pc)\n except Exception:\n pass\n\n\n\[email protected]\[email protected]\ndef up(n=1):\n \"\"\"\n Select and print stack frame that called this one.\n An argument says how many frames up to go.\n \"\"\"\n f = gdb.selected_frame()\n\n for i in range(n):\n o = f.older()\n if o:\n o.select()\n\n bt = pwndbg.commands.context.context_backtrace(with_banner=False)\n print('\\n'.join(bt))\n\n j()\n\n\[email protected]\[email protected]\ndef down(n=1):\n \"\"\"\n Select and print stack frame called by this one.\n An argument says how many frames down to go.\n \"\"\"\n f = gdb.selected_frame()\n\n for i in range(n):\n o = f.newer()\n if o:\n o.select()\n\n bt = pwndbg.commands.context.context_backtrace(with_banner=False)\n print('\\n'.join(bt))\n\n j()\n\n\[email protected]\[email protected]\ndef save_ida():\n \"\"\"Save the IDA database\"\"\"\n if not pwndbg.ida.available():\n return\n\n path = pwndbg.ida.GetIdbPath()\n\n # Need to handle emulated paths for Wine\n if path.startswith('Z:'):\n path = path[2:].replace('\\\\', '/')\n pwndbg.ida.SaveBase(path)\n\n basename = os.path.basename(path)\n dirname = os.path.dirname(path)\n backups = os.path.join(dirname, 'ida-backup')\n\n if not os.path.isdir(backups):\n os.mkdir(backups)\n\n basename, ext = os.path.splitext(basename)\n basename += '-%s' % datetime.datetime.now().isoformat()\n basename += ext\n\n # Windows doesn't like colons in paths\n basename = basename.replace(':', '_')\n\n full_path = os.path.join(backups, basename)\n\n pwndbg.ida.SaveBase(full_path)\n\n data = open(full_path, 'rb').read()\n\n # Compress!\n full_path_compressed = full_path + '.bz2'\n bz2.BZ2File(full_path_compressed, 'w').write(data)\n\n # Remove old version\n os.unlink(full_path)\n\nsave_ida()\n\n\n@GdbFunction()\ndef ida(name):\n\n \"\"\"Evaluate ida.LocByName() on the supplied value.\"\"\"\n name = name.string()\n result = pwndbg.ida.LocByName(name)\n\n if 0xffffe000 <= result <= 0xffffffff or 0xffffffffffffe000 <= result <= 0xffffffffffffffff:\n raise ValueError(\"ida.LocByName(%r) == BADADDR\" % name)\n\n return result\n", "path": "pwndbg/commands/ida.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nimport bz2\nimport datetime\nimport os\n\nimport gdb\n\nimport pwndbg.commands\nimport pwndbg.commands.context\nimport pwndbg.ida\nimport pwndbg.regs\nfrom pwndbg.gdbutils.functions import GdbFunction\n\n\[email protected]\[email protected]\[email protected]\[email protected]\ndef j(*args):\n \"\"\"\n Synchronize IDA's cursor with GDB\n \"\"\"\n try:\n pc = int(gdb.selected_frame().pc())\n pwndbg.ida.Jump(pc)\n except Exception:\n pass\n\n\n\[email protected]\[email protected]\ndef up(n=1):\n \"\"\"\n Select and print stack frame that called this one.\n An argument says how many frames up to go.\n \"\"\"\n f = gdb.selected_frame()\n\n for i in range(int(n)):\n if f.older():\n f = f.older()\n f.select()\n\n bt = pwndbg.commands.context.context_backtrace(with_banner=False)\n print('\\n'.join(bt))\n\n j()\n\n\[email protected]\[email protected]\ndef down(n=1):\n \"\"\"\n Select and print stack frame called by this one.\n An argument says how many frames down to go.\n \"\"\"\n f = gdb.selected_frame()\n\n for i in range(int(n)):\n if f.newer():\n f = f.newer()\n f.select()\n\n bt = pwndbg.commands.context.context_backtrace(with_banner=False)\n print('\\n'.join(bt))\n\n j()\n\n\[email protected]\[email protected]\ndef save_ida():\n \"\"\"Save the IDA database\"\"\"\n if not pwndbg.ida.available():\n return\n\n path = pwndbg.ida.GetIdbPath()\n\n # Need to handle emulated paths for Wine\n if path.startswith('Z:'):\n path = path[2:].replace('\\\\', '/')\n pwndbg.ida.SaveBase(path)\n\n basename = os.path.basename(path)\n dirname = os.path.dirname(path)\n backups = os.path.join(dirname, 'ida-backup')\n\n if not os.path.isdir(backups):\n os.mkdir(backups)\n\n basename, ext = os.path.splitext(basename)\n basename += '-%s' % datetime.datetime.now().isoformat()\n basename += ext\n\n # Windows doesn't like colons in paths\n basename = basename.replace(':', '_')\n\n full_path = os.path.join(backups, basename)\n\n pwndbg.ida.SaveBase(full_path)\n\n data = open(full_path, 'rb').read()\n\n # Compress!\n full_path_compressed = full_path + '.bz2'\n bz2.BZ2File(full_path_compressed, 'w').write(data)\n\n # Remove old version\n os.unlink(full_path)\n\nsave_ida()\n\n\n@GdbFunction()\ndef ida(name):\n\n \"\"\"Evaluate ida.LocByName() on the supplied value.\"\"\"\n name = name.string()\n result = pwndbg.ida.LocByName(name)\n\n if 0xffffe000 <= result <= 0xffffffff or 0xffffffffffffe000 <= result <= 0xffffffffffffffff:\n raise ValueError(\"ida.LocByName(%r) == BADADDR\" % name)\n\n return result\n", "path": "pwndbg/commands/ida.py"}]}
| 1,666 | 250 |
gh_patches_debug_14033
|
rasdani/github-patches
|
git_diff
|
facebookresearch__ParlAI-3405
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
BST dataset: where is 'mode' for each utterances?
Hi, I have a question about BST dataset (https://arxiv.org/pdf/2004.08449.pdf).
<img width="971" alt="스크린샷 2021-01-03 오후 9 48 39" src="https://user-images.githubusercontent.com/15323600/103478953-7afeea80-4e0d-11eb-9a1d-c5b87b237d48.png">
While the paper and Fig. 1 tell us that each utterance are annotated with the 'mode' (e.g., _PB,K,E,S_),
I cannot find it in raw BST dataset (i.e., `train.json, valid.json, test.json`) unfortunately :(
Without it, one cannot even reproduce **MT Two-Stage model** in the paper because the model is based on a dialog manager that classifies the mode given utterance.
Is the 'mode' only used for private perhaps?
Thanks.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `parlai/tasks/blended_skill_talk/build.py`
Content:
```
1 #!/usr/bin/env python3
2
3 # Copyright (c) Facebook, Inc. and its affiliates.
4 # This source code is licensed under the MIT license found in the
5 # LICENSE file in the root directory of this source tree.
6 # Download and build the data if it does not exist.
7
8 import json
9 import os
10
11 from parlai.core import build_data
12 from parlai.utils.io import PathManager
13
14
15 RESOURCES = [
16 build_data.DownloadableFile(
17 'http://parl.ai/downloads/blended_skill_talk/blended_skill_talk.tar.gz',
18 'blended_skill_talk.tar.gz',
19 '5fbed0068ee89e2d43b93c3ecb341e784617033efa5e8e911a219d4eda6134a6',
20 ),
21 build_data.DownloadableFile(
22 'http://parl.ai/downloads/blended_skill_talk/personas_list.txt',
23 'persona_list.txt',
24 '59a51adedc78e806a380f16477de3740cefe3494d20f8a2a733841bedaaa3ee5',
25 zipped=False,
26 ),
27 build_data.DownloadableFile(
28 'http://parl.ai/downloads/blended_skill_talk/topic_to_persona_list.txt',
29 'topic_to_persona_list.txt',
30 '47cdb6cbee0516ca7400be35fa07761339b86c6c026425bf5dba00e5534e8182',
31 zipped=False,
32 ),
33 build_data.DownloadableFile(
34 'http://parl.ai/downloads/blended_skill_talk/ed_persona_topicifier__train__both_sides.json',
35 'ed_persona_topicifier__train__both_sides.json',
36 'ff2ea7c5fcb0449890d57a629cc3e8794ab95ac6db1057bf58d540c2b576e4cc',
37 zipped=False,
38 ),
39 build_data.DownloadableFile(
40 'http://parl.ai/downloads/blended_skill_talk/ed_persona_topicifier__train__experiencer_only.json',
41 'ed_persona_topicifier__train__experiencer_only.json',
42 '751f0ba2f421a11eee2bfc896d60ab70d669093c3a5f6cb30e8d202133a90ec7',
43 zipped=False,
44 ),
45 build_data.DownloadableFile(
46 'http://parl.ai/downloads/blended_skill_talk/ed_persona_topicifier__valid__experiencer_only.json',
47 'ed_persona_topicifier__valid__experiencer_only.json',
48 '15d5412f5990a8a9c892305009d8597a737322aafe878b03ec71143703b25ba0',
49 zipped=False,
50 ),
51 build_data.DownloadableFile(
52 'http://parl.ai/downloads/blended_skill_talk/ed_persona_topicifier__test__experiencer_only.json',
53 'ed_persona_topicifier__test__experiencer_only.json',
54 '2604e977787be0b5edc54561f7ce8a54c40758d235a3fee262fe20fe36b8cd15',
55 zipped=False,
56 ),
57 build_data.DownloadableFile(
58 'http://parl.ai/downloads/blended_skill_talk/safe_personas_2.txt',
59 'safe_personas.txt',
60 '2ee292aa0006ea002e9b23d4f7326fe9e17514ce5793d31fd8d679035d4366a7',
61 zipped=False,
62 ),
63 ]
64
65
66 def build(opt):
67 dpath = os.path.join(opt['datapath'], 'blended_skill_talk')
68 version = 'v1.4'
69
70 if not build_data.built(dpath, version_string=version):
71 print('[building data: ' + dpath + ']')
72 if build_data.built(dpath):
73 # An older version exists, so remove these outdated files
74 build_data.remove_dir(dpath)
75 build_data.make_dir(dpath)
76
77 # Download the data
78 for downloadable_file in RESOURCES:
79 downloadable_file.download_file(dpath)
80
81 # Format it for use with ParlAIDialogTeacher
82 _create_parlai_format(dpath)
83
84 # Mark the data as built
85 build_data.mark_done(dpath, version_string=version)
86
87
88 def _create_parlai_format(dpath: str):
89 """
90 Copy data into the format read by ParlAIDialogTeacher.
91
92 'text' will be from the free Turker, who speaks first, and 'label' will be from the
93 guided Turker.
94 """
95
96 datatypes = ['train', 'valid', 'test']
97 for datatype in datatypes:
98
99 load_path = os.path.join(dpath, f'{datatype}.json')
100 save_path = os.path.join(dpath, f'{datatype}.txt')
101
102 print(f'Loading {load_path}.')
103 with PathManager.open(load_path, 'r', encoding='utf8') as f_read:
104 data = json.load(f_read)
105
106 print(f'Saving to {save_path}')
107 with PathManager.open(save_path, 'w', encoding='utf8') as f_write:
108 for episode in data:
109 assert (
110 len(episode['dialog'])
111 == len(episode['suggestions'])
112 == len(episode['chosen_suggestions'])
113 )
114 num_entries = len(episode['dialog']) // 2
115 for entry_idx in range(num_entries):
116 line = _get_line(
117 episode=episode, num_entries=num_entries, entry_idx=entry_idx
118 )
119 f_write.write(f'{line} \n')
120
121
122 def _get_line(episode: dict, num_entries: int, entry_idx: int) -> str:
123 """
124 Return the line to print in the reformatted file.
125 """
126 episode_done = entry_idx == num_entries - 1
127
128 # Compile original context
129 if entry_idx == 0:
130 # Add those pieces of context that appear in the datasets that this one was
131 # based on. Specifically:
132 # - Your persona, but not your partner's persona (from ConvAI2)
133 # - Topic (from Wizard of Wikipedia)
134 # - **Not** the situation (from EmpatheticDialogues)
135 persona_pieces = [
136 f"your persona: {episode['personas'][1][0]}",
137 f"your persona: {episode['personas'][1][1]}",
138 ]
139 if episode['context_dataset'] == 'wizard_of_wikipedia':
140 additional_context_pieces = [episode['additional_context']]
141 else:
142 additional_context_pieces = []
143 previous_utterance_pieces = [
144 episode['free_turker_utterance'],
145 episode['guided_turker_utterance'],
146 ]
147 original_context = (
148 '\n'.join(
149 persona_pieces + additional_context_pieces + previous_utterance_pieces
150 )
151 + '\n'
152 )
153 else:
154 original_context = ''
155
156 # Gather messages and suggestions
157 free_message = episode['dialog'][2 * entry_idx][1]
158 guided_message = episode['dialog'][2 * entry_idx + 1][1]
159 single_task_suggestions = {
160 task: episode['suggestions'][2 * entry_idx + 1][task]
161 for task in ['convai2', 'empathetic_dialogues', 'wizard_of_wikipedia']
162 }
163 guided_chosen_suggestion = episode['chosen_suggestions'][2 * entry_idx + 1]
164
165 # Compile into text string
166 parts = {
167 'text': original_context + free_message,
168 'labels': guided_message,
169 'context_dataset': episode['context_dataset'],
170 'free_message': free_message,
171 **single_task_suggestions,
172 'guided_chosen_suggestion': guided_chosen_suggestion,
173 }
174 assert all([isinstance(part, str) for part in parts.values()])
175 line = '\t'.join([f'{key}:{_escape(value)}' for key, value in parts.items()])
176
177 # Add episode_done
178 if episode_done:
179 line += '\tepisode_done:True'
180
181 # Add label_candidates
182 if 'label_candidates' in episode:
183 label_candidates = episode['label_candidates'][entry_idx]
184 # Note that episode['dialog'] is indexed by utterance (from either Turker) and
185 # episode['label_candidates'] is indexed by guided Turker response
186 assert all([isinstance(cand, str) for cand in label_candidates])
187 escaped_label_candidates = [_escape(cand) for cand in label_candidates]
188 line += '\tlabel_candidates:' + '|'.join(escaped_label_candidates)
189 return line
190
191
192 def _escape(value: str) -> str:
193 return value.replace('\t', '\\t').replace('\n', '\\n').replace('|', '__PIPE__')
194
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/parlai/tasks/blended_skill_talk/build.py b/parlai/tasks/blended_skill_talk/build.py
--- a/parlai/tasks/blended_skill_talk/build.py
+++ b/parlai/tasks/blended_skill_talk/build.py
@@ -60,12 +60,18 @@
'2ee292aa0006ea002e9b23d4f7326fe9e17514ce5793d31fd8d679035d4366a7',
zipped=False,
),
+ build_data.DownloadableFile(
+ 'http://parl.ai/downloads/blended_skill_talk/human_annotations.json',
+ 'human_annotations.json',
+ 'fe76a989d4fdcfa4a5beb214054069ed0abf45f3530eaeb8df38e75f7bb14795',
+ zipped=False,
+ ),
]
def build(opt):
dpath = os.path.join(opt['datapath'], 'blended_skill_talk')
- version = 'v1.4'
+ version = 'v1.5'
if not build_data.built(dpath, version_string=version):
print('[building data: ' + dpath + ']')
|
{"golden_diff": "diff --git a/parlai/tasks/blended_skill_talk/build.py b/parlai/tasks/blended_skill_talk/build.py\n--- a/parlai/tasks/blended_skill_talk/build.py\n+++ b/parlai/tasks/blended_skill_talk/build.py\n@@ -60,12 +60,18 @@\n '2ee292aa0006ea002e9b23d4f7326fe9e17514ce5793d31fd8d679035d4366a7',\n zipped=False,\n ),\n+ build_data.DownloadableFile(\n+ 'http://parl.ai/downloads/blended_skill_talk/human_annotations.json',\n+ 'human_annotations.json',\n+ 'fe76a989d4fdcfa4a5beb214054069ed0abf45f3530eaeb8df38e75f7bb14795',\n+ zipped=False,\n+ ),\n ]\n \n \n def build(opt):\n dpath = os.path.join(opt['datapath'], 'blended_skill_talk')\n- version = 'v1.4'\n+ version = 'v1.5'\n \n if not build_data.built(dpath, version_string=version):\n print('[building data: ' + dpath + ']')\n", "issue": "BST dataset: where is 'mode' for each utterances?\nHi, I have a question about BST dataset (https://arxiv.org/pdf/2004.08449.pdf).\r\n<img width=\"971\" alt=\"\u1109\u1173\u110f\u1173\u1105\u1175\u11ab\u1109\u1163\u11ba 2021-01-03 \u110b\u1169\u1112\u116e 9 48 39\" src=\"https://user-images.githubusercontent.com/15323600/103478953-7afeea80-4e0d-11eb-9a1d-c5b87b237d48.png\">\r\n\r\nWhile the paper and Fig. 1 tell us that each utterance are annotated with the 'mode' (e.g., _PB,K,E,S_),\r\n I cannot find it in raw BST dataset (i.e., `train.json, valid.json, test.json`) unfortunately :(\r\n\r\nWithout it, one cannot even reproduce **MT Two-Stage model** in the paper because the model is based on a dialog manager that classifies the mode given utterance.\r\n\r\nIs the 'mode' only used for private perhaps?\r\n\r\nThanks.\n", "before_files": [{"content": "#!/usr/bin/env python3\n\n# Copyright (c) Facebook, Inc. and its affiliates.\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n# Download and build the data if it does not exist.\n\nimport json\nimport os\n\nfrom parlai.core import build_data\nfrom parlai.utils.io import PathManager\n\n\nRESOURCES = [\n build_data.DownloadableFile(\n 'http://parl.ai/downloads/blended_skill_talk/blended_skill_talk.tar.gz',\n 'blended_skill_talk.tar.gz',\n '5fbed0068ee89e2d43b93c3ecb341e784617033efa5e8e911a219d4eda6134a6',\n ),\n build_data.DownloadableFile(\n 'http://parl.ai/downloads/blended_skill_talk/personas_list.txt',\n 'persona_list.txt',\n '59a51adedc78e806a380f16477de3740cefe3494d20f8a2a733841bedaaa3ee5',\n zipped=False,\n ),\n build_data.DownloadableFile(\n 'http://parl.ai/downloads/blended_skill_talk/topic_to_persona_list.txt',\n 'topic_to_persona_list.txt',\n '47cdb6cbee0516ca7400be35fa07761339b86c6c026425bf5dba00e5534e8182',\n zipped=False,\n ),\n build_data.DownloadableFile(\n 'http://parl.ai/downloads/blended_skill_talk/ed_persona_topicifier__train__both_sides.json',\n 'ed_persona_topicifier__train__both_sides.json',\n 'ff2ea7c5fcb0449890d57a629cc3e8794ab95ac6db1057bf58d540c2b576e4cc',\n zipped=False,\n ),\n build_data.DownloadableFile(\n 'http://parl.ai/downloads/blended_skill_talk/ed_persona_topicifier__train__experiencer_only.json',\n 'ed_persona_topicifier__train__experiencer_only.json',\n '751f0ba2f421a11eee2bfc896d60ab70d669093c3a5f6cb30e8d202133a90ec7',\n zipped=False,\n ),\n build_data.DownloadableFile(\n 'http://parl.ai/downloads/blended_skill_talk/ed_persona_topicifier__valid__experiencer_only.json',\n 'ed_persona_topicifier__valid__experiencer_only.json',\n '15d5412f5990a8a9c892305009d8597a737322aafe878b03ec71143703b25ba0',\n zipped=False,\n ),\n build_data.DownloadableFile(\n 'http://parl.ai/downloads/blended_skill_talk/ed_persona_topicifier__test__experiencer_only.json',\n 'ed_persona_topicifier__test__experiencer_only.json',\n '2604e977787be0b5edc54561f7ce8a54c40758d235a3fee262fe20fe36b8cd15',\n zipped=False,\n ),\n build_data.DownloadableFile(\n 'http://parl.ai/downloads/blended_skill_talk/safe_personas_2.txt',\n 'safe_personas.txt',\n '2ee292aa0006ea002e9b23d4f7326fe9e17514ce5793d31fd8d679035d4366a7',\n zipped=False,\n ),\n]\n\n\ndef build(opt):\n dpath = os.path.join(opt['datapath'], 'blended_skill_talk')\n version = 'v1.4'\n\n if not build_data.built(dpath, version_string=version):\n print('[building data: ' + dpath + ']')\n if build_data.built(dpath):\n # An older version exists, so remove these outdated files\n build_data.remove_dir(dpath)\n build_data.make_dir(dpath)\n\n # Download the data\n for downloadable_file in RESOURCES:\n downloadable_file.download_file(dpath)\n\n # Format it for use with ParlAIDialogTeacher\n _create_parlai_format(dpath)\n\n # Mark the data as built\n build_data.mark_done(dpath, version_string=version)\n\n\ndef _create_parlai_format(dpath: str):\n \"\"\"\n Copy data into the format read by ParlAIDialogTeacher.\n\n 'text' will be from the free Turker, who speaks first, and 'label' will be from the\n guided Turker.\n \"\"\"\n\n datatypes = ['train', 'valid', 'test']\n for datatype in datatypes:\n\n load_path = os.path.join(dpath, f'{datatype}.json')\n save_path = os.path.join(dpath, f'{datatype}.txt')\n\n print(f'Loading {load_path}.')\n with PathManager.open(load_path, 'r', encoding='utf8') as f_read:\n data = json.load(f_read)\n\n print(f'Saving to {save_path}')\n with PathManager.open(save_path, 'w', encoding='utf8') as f_write:\n for episode in data:\n assert (\n len(episode['dialog'])\n == len(episode['suggestions'])\n == len(episode['chosen_suggestions'])\n )\n num_entries = len(episode['dialog']) // 2\n for entry_idx in range(num_entries):\n line = _get_line(\n episode=episode, num_entries=num_entries, entry_idx=entry_idx\n )\n f_write.write(f'{line} \\n')\n\n\ndef _get_line(episode: dict, num_entries: int, entry_idx: int) -> str:\n \"\"\"\n Return the line to print in the reformatted file.\n \"\"\"\n episode_done = entry_idx == num_entries - 1\n\n # Compile original context\n if entry_idx == 0:\n # Add those pieces of context that appear in the datasets that this one was\n # based on. Specifically:\n # - Your persona, but not your partner's persona (from ConvAI2)\n # - Topic (from Wizard of Wikipedia)\n # - **Not** the situation (from EmpatheticDialogues)\n persona_pieces = [\n f\"your persona: {episode['personas'][1][0]}\",\n f\"your persona: {episode['personas'][1][1]}\",\n ]\n if episode['context_dataset'] == 'wizard_of_wikipedia':\n additional_context_pieces = [episode['additional_context']]\n else:\n additional_context_pieces = []\n previous_utterance_pieces = [\n episode['free_turker_utterance'],\n episode['guided_turker_utterance'],\n ]\n original_context = (\n '\\n'.join(\n persona_pieces + additional_context_pieces + previous_utterance_pieces\n )\n + '\\n'\n )\n else:\n original_context = ''\n\n # Gather messages and suggestions\n free_message = episode['dialog'][2 * entry_idx][1]\n guided_message = episode['dialog'][2 * entry_idx + 1][1]\n single_task_suggestions = {\n task: episode['suggestions'][2 * entry_idx + 1][task]\n for task in ['convai2', 'empathetic_dialogues', 'wizard_of_wikipedia']\n }\n guided_chosen_suggestion = episode['chosen_suggestions'][2 * entry_idx + 1]\n\n # Compile into text string\n parts = {\n 'text': original_context + free_message,\n 'labels': guided_message,\n 'context_dataset': episode['context_dataset'],\n 'free_message': free_message,\n **single_task_suggestions,\n 'guided_chosen_suggestion': guided_chosen_suggestion,\n }\n assert all([isinstance(part, str) for part in parts.values()])\n line = '\\t'.join([f'{key}:{_escape(value)}' for key, value in parts.items()])\n\n # Add episode_done\n if episode_done:\n line += '\\tepisode_done:True'\n\n # Add label_candidates\n if 'label_candidates' in episode:\n label_candidates = episode['label_candidates'][entry_idx]\n # Note that episode['dialog'] is indexed by utterance (from either Turker) and\n # episode['label_candidates'] is indexed by guided Turker response\n assert all([isinstance(cand, str) for cand in label_candidates])\n escaped_label_candidates = [_escape(cand) for cand in label_candidates]\n line += '\\tlabel_candidates:' + '|'.join(escaped_label_candidates)\n return line\n\n\ndef _escape(value: str) -> str:\n return value.replace('\\t', '\\\\t').replace('\\n', '\\\\n').replace('|', '__PIPE__')\n", "path": "parlai/tasks/blended_skill_talk/build.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\n# Copyright (c) Facebook, Inc. and its affiliates.\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n# Download and build the data if it does not exist.\n\nimport json\nimport os\n\nfrom parlai.core import build_data\nfrom parlai.utils.io import PathManager\n\n\nRESOURCES = [\n build_data.DownloadableFile(\n 'http://parl.ai/downloads/blended_skill_talk/blended_skill_talk.tar.gz',\n 'blended_skill_talk.tar.gz',\n '5fbed0068ee89e2d43b93c3ecb341e784617033efa5e8e911a219d4eda6134a6',\n ),\n build_data.DownloadableFile(\n 'http://parl.ai/downloads/blended_skill_talk/personas_list.txt',\n 'persona_list.txt',\n '59a51adedc78e806a380f16477de3740cefe3494d20f8a2a733841bedaaa3ee5',\n zipped=False,\n ),\n build_data.DownloadableFile(\n 'http://parl.ai/downloads/blended_skill_talk/topic_to_persona_list.txt',\n 'topic_to_persona_list.txt',\n '47cdb6cbee0516ca7400be35fa07761339b86c6c026425bf5dba00e5534e8182',\n zipped=False,\n ),\n build_data.DownloadableFile(\n 'http://parl.ai/downloads/blended_skill_talk/ed_persona_topicifier__train__both_sides.json',\n 'ed_persona_topicifier__train__both_sides.json',\n 'ff2ea7c5fcb0449890d57a629cc3e8794ab95ac6db1057bf58d540c2b576e4cc',\n zipped=False,\n ),\n build_data.DownloadableFile(\n 'http://parl.ai/downloads/blended_skill_talk/ed_persona_topicifier__train__experiencer_only.json',\n 'ed_persona_topicifier__train__experiencer_only.json',\n '751f0ba2f421a11eee2bfc896d60ab70d669093c3a5f6cb30e8d202133a90ec7',\n zipped=False,\n ),\n build_data.DownloadableFile(\n 'http://parl.ai/downloads/blended_skill_talk/ed_persona_topicifier__valid__experiencer_only.json',\n 'ed_persona_topicifier__valid__experiencer_only.json',\n '15d5412f5990a8a9c892305009d8597a737322aafe878b03ec71143703b25ba0',\n zipped=False,\n ),\n build_data.DownloadableFile(\n 'http://parl.ai/downloads/blended_skill_talk/ed_persona_topicifier__test__experiencer_only.json',\n 'ed_persona_topicifier__test__experiencer_only.json',\n '2604e977787be0b5edc54561f7ce8a54c40758d235a3fee262fe20fe36b8cd15',\n zipped=False,\n ),\n build_data.DownloadableFile(\n 'http://parl.ai/downloads/blended_skill_talk/safe_personas_2.txt',\n 'safe_personas.txt',\n '2ee292aa0006ea002e9b23d4f7326fe9e17514ce5793d31fd8d679035d4366a7',\n zipped=False,\n ),\n build_data.DownloadableFile(\n 'http://parl.ai/downloads/blended_skill_talk/human_annotations.json',\n 'human_annotations.json',\n 'fe76a989d4fdcfa4a5beb214054069ed0abf45f3530eaeb8df38e75f7bb14795',\n zipped=False,\n ),\n]\n\n\ndef build(opt):\n dpath = os.path.join(opt['datapath'], 'blended_skill_talk')\n version = 'v1.5'\n\n if not build_data.built(dpath, version_string=version):\n print('[building data: ' + dpath + ']')\n if build_data.built(dpath):\n # An older version exists, so remove these outdated files\n build_data.remove_dir(dpath)\n build_data.make_dir(dpath)\n\n # Download the data\n for downloadable_file in RESOURCES:\n downloadable_file.download_file(dpath)\n\n # Format it for use with ParlAIDialogTeacher\n _create_parlai_format(dpath)\n\n # Mark the data as built\n build_data.mark_done(dpath, version_string=version)\n\n\ndef _create_parlai_format(dpath: str):\n \"\"\"\n Copy data into the format read by ParlAIDialogTeacher.\n\n 'text' will be from the free Turker, who speaks first, and 'label' will be from the\n guided Turker.\n \"\"\"\n\n datatypes = ['train', 'valid', 'test']\n for datatype in datatypes:\n\n load_path = os.path.join(dpath, f'{datatype}.json')\n save_path = os.path.join(dpath, f'{datatype}.txt')\n\n print(f'Loading {load_path}.')\n with PathManager.open(load_path, 'r', encoding='utf8') as f_read:\n data = json.load(f_read)\n\n print(f'Saving to {save_path}')\n with PathManager.open(save_path, 'w', encoding='utf8') as f_write:\n for episode in data:\n assert (\n len(episode['dialog'])\n == len(episode['suggestions'])\n == len(episode['chosen_suggestions'])\n )\n num_entries = len(episode['dialog']) // 2\n for entry_idx in range(num_entries):\n line = _get_line(\n episode=episode, num_entries=num_entries, entry_idx=entry_idx\n )\n f_write.write(f'{line} \\n')\n\n\ndef _get_line(episode: dict, num_entries: int, entry_idx: int) -> str:\n \"\"\"\n Return the line to print in the reformatted file.\n \"\"\"\n episode_done = entry_idx == num_entries - 1\n\n # Compile original context\n if entry_idx == 0:\n # Add those pieces of context that appear in the datasets that this one was\n # based on. Specifically:\n # - Your persona, but not your partner's persona (from ConvAI2)\n # - Topic (from Wizard of Wikipedia)\n # - **Not** the situation (from EmpatheticDialogues)\n persona_pieces = [\n f\"your persona: {episode['personas'][1][0]}\",\n f\"your persona: {episode['personas'][1][1]}\",\n ]\n if episode['context_dataset'] == 'wizard_of_wikipedia':\n additional_context_pieces = [episode['additional_context']]\n else:\n additional_context_pieces = []\n previous_utterance_pieces = [\n episode['free_turker_utterance'],\n episode['guided_turker_utterance'],\n ]\n original_context = (\n '\\n'.join(\n persona_pieces + additional_context_pieces + previous_utterance_pieces\n )\n + '\\n'\n )\n else:\n original_context = ''\n\n # Gather messages and suggestions\n free_message = episode['dialog'][2 * entry_idx][1]\n guided_message = episode['dialog'][2 * entry_idx + 1][1]\n single_task_suggestions = {\n task: episode['suggestions'][2 * entry_idx + 1][task]\n for task in ['convai2', 'empathetic_dialogues', 'wizard_of_wikipedia']\n }\n guided_chosen_suggestion = episode['chosen_suggestions'][2 * entry_idx + 1]\n\n # Compile into text string\n parts = {\n 'text': original_context + free_message,\n 'labels': guided_message,\n 'context_dataset': episode['context_dataset'],\n 'free_message': free_message,\n **single_task_suggestions,\n 'guided_chosen_suggestion': guided_chosen_suggestion,\n }\n assert all([isinstance(part, str) for part in parts.values()])\n line = '\\t'.join([f'{key}:{_escape(value)}' for key, value in parts.items()])\n\n # Add episode_done\n if episode_done:\n line += '\\tepisode_done:True'\n\n # Add label_candidates\n if 'label_candidates' in episode:\n label_candidates = episode['label_candidates'][entry_idx]\n # Note that episode['dialog'] is indexed by utterance (from either Turker) and\n # episode['label_candidates'] is indexed by guided Turker response\n assert all([isinstance(cand, str) for cand in label_candidates])\n escaped_label_candidates = [_escape(cand) for cand in label_candidates]\n line += '\\tlabel_candidates:' + '|'.join(escaped_label_candidates)\n return line\n\n\ndef _escape(value: str) -> str:\n return value.replace('\\t', '\\\\t').replace('\\n', '\\\\n').replace('|', '__PIPE__')\n", "path": "parlai/tasks/blended_skill_talk/build.py"}]}
| 3,066 | 315 |
gh_patches_debug_5634
|
rasdani/github-patches
|
git_diff
|
networkx__networkx-768
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Pajek exporter should not write first line "*network NetworkX"
Currently, the first line of a pajek file created by networkx looks like:
```
*network NetworkX
```
Many programs that import pajek files crash, because they expect the first line to be
```
*vertices 762
```
Given that the `*network NetworkX` serves no purpose, could we modify the pajek writer to not create this first line?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `networkx/readwrite/pajek.py`
Content:
```
1 """
2 *****
3 Pajek
4 *****
5 Read graphs in Pajek format.
6
7 This implementation handles directed and undirected graphs including
8 those with self loops and parallel edges.
9
10 Format
11 ------
12 See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm
13 for format information.
14 """
15 # Copyright (C) 2008-2011 by
16 # Aric Hagberg <[email protected]>
17 # Dan Schult <[email protected]>
18 # Pieter Swart <[email protected]>
19 # All rights reserved.
20 # BSD license.
21 import networkx as nx
22 from networkx.utils import is_string_like, open_file, make_str
23 __author__ = """Aric Hagberg ([email protected])"""
24 __all__ = ['read_pajek', 'parse_pajek', 'generate_pajek', 'write_pajek']
25
26 def generate_pajek(G):
27 """Generate lines in Pajek graph format.
28
29 Parameters
30 ----------
31 G : graph
32 A Networkx graph
33
34 References
35 ----------
36 See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm
37 for format information.
38 """
39 if G.name=='':
40 name='NetworkX'
41 else:
42 name=G.name
43 yield '*network %s'%name
44
45 # write nodes with attributes
46 yield '*vertices %s'%(G.order())
47 nodes = G.nodes()
48 # make dictionary mapping nodes to integers
49 nodenumber=dict(zip(nodes,range(1,len(nodes)+1)))
50 for n in nodes:
51 na=G.node.get(n,{})
52 x=na.get('x',0.0)
53 y=na.get('y',0.0)
54 id=int(na.get('id',nodenumber[n]))
55 nodenumber[n]=id
56 shape=na.get('shape','ellipse')
57 s=' '.join(map(make_qstr,(id,n,x,y,shape)))
58 for k,v in na.items():
59 s+=' %s %s'%(make_qstr(k),make_qstr(v))
60 yield s
61
62 # write edges with attributes
63 if G.is_directed():
64 yield '*arcs'
65 else:
66 yield '*edges'
67 for u,v,edgedata in G.edges(data=True):
68 d=edgedata.copy()
69 value=d.pop('weight',1.0) # use 1 as default edge value
70 s=' '.join(map(make_qstr,(nodenumber[u],nodenumber[v],value)))
71 for k,v in d.items():
72 s+=' %s %s'%(make_qstr(k),make_qstr(v))
73 s+=' %s %s'%(k,v)
74 yield s
75
76 @open_file(1,mode='wb')
77 def write_pajek(G, path, encoding='UTF-8'):
78 """Write graph in Pajek format to path.
79
80 Parameters
81 ----------
82 G : graph
83 A Networkx graph
84 path : file or string
85 File or filename to write.
86 Filenames ending in .gz or .bz2 will be compressed.
87
88 Examples
89 --------
90 >>> G=nx.path_graph(4)
91 >>> nx.write_pajek(G, "test.net")
92
93 References
94 ----------
95 See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm
96 for format information.
97 """
98 for line in generate_pajek(G):
99 line+='\n'
100 path.write(line.encode(encoding))
101
102 @open_file(0,mode='rb')
103 def read_pajek(path,encoding='UTF-8'):
104 """Read graph in Pajek format from path.
105
106 Parameters
107 ----------
108 path : file or string
109 File or filename to write.
110 Filenames ending in .gz or .bz2 will be uncompressed.
111
112 Returns
113 -------
114 G : NetworkX MultiGraph or MultiDiGraph.
115
116 Examples
117 --------
118 >>> G=nx.path_graph(4)
119 >>> nx.write_pajek(G, "test.net")
120 >>> G=nx.read_pajek("test.net")
121
122 To create a Graph instead of a MultiGraph use
123
124 >>> G1=nx.Graph(G)
125
126 References
127 ----------
128 See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm
129 for format information.
130 """
131 lines = (line.decode(encoding) for line in path)
132 return parse_pajek(lines)
133
134 def parse_pajek(lines):
135 """Parse Pajek format graph from string or iterable.
136
137 Parameters
138 ----------
139 lines : string or iterable
140 Data in Pajek format.
141
142 Returns
143 -------
144 G : NetworkX graph
145
146 See Also
147 --------
148 read_pajek()
149
150 """
151 import shlex
152 # multigraph=False
153 if is_string_like(lines): lines=iter(lines.split('\n'))
154 lines = iter([line.rstrip('\n') for line in lines])
155 G=nx.MultiDiGraph() # are multiedges allowed in Pajek? assume yes
156 while lines:
157 try:
158 l=next(lines)
159 except: #EOF
160 break
161 if l.lower().startswith("*network"):
162 label,name=l.split()
163 G.name=name
164 if l.lower().startswith("*vertices"):
165 nodelabels={}
166 l,nnodes=l.split()
167 for i in range(int(nnodes)):
168 splitline=shlex.split(str(next(lines)))
169 id,label=splitline[0:2]
170 G.add_node(label)
171 nodelabels[id]=label
172 G.node[label]={'id':id}
173 try:
174 x,y,shape=splitline[2:5]
175 G.node[label].update({'x':float(x),
176 'y':float(y),
177 'shape':shape})
178 except:
179 pass
180 extra_attr=zip(splitline[5::2],splitline[6::2])
181 G.node[label].update(extra_attr)
182 if l.lower().startswith("*edges") or l.lower().startswith("*arcs"):
183 if l.lower().startswith("*edge"):
184 # switch from multidigraph to multigraph
185 G=nx.MultiGraph(G)
186 if l.lower().startswith("*arcs"):
187 # switch to directed with multiple arcs for each existing edge
188 G=G.to_directed()
189 for l in lines:
190 splitline=shlex.split(str(l))
191 if len(splitline)<2:
192 continue
193 ui,vi=splitline[0:2]
194 u=nodelabels.get(ui,ui)
195 v=nodelabels.get(vi,vi)
196 # parse the data attached to this edge and put in a dictionary
197 edge_data={}
198 try:
199 # there should always be a single value on the edge?
200 w=splitline[2:3]
201 edge_data.update({'weight':float(w[0])})
202 except:
203 pass
204 # if there isn't, just assign a 1
205 # edge_data.update({'value':1})
206 extra_attr=zip(splitline[3::2],splitline[4::2])
207 edge_data.update(extra_attr)
208 # if G.has_edge(u,v):
209 # multigraph=True
210 G.add_edge(u,v,**edge_data)
211 return G
212
213
214
215 def make_qstr(t):
216 """Return the string representation of t.
217 Add outer double-quotes if the string has a space.
218 """
219 if not is_string_like(t):
220 t = str(t)
221 if " " in t:
222 t=r'"%s"'%t
223 return t
224
225
226 # fixture for nose tests
227 def teardown_module(module):
228 import os
229 os.unlink('test.net')
230
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/networkx/readwrite/pajek.py b/networkx/readwrite/pajek.py
--- a/networkx/readwrite/pajek.py
+++ b/networkx/readwrite/pajek.py
@@ -37,10 +37,12 @@
for format information.
"""
if G.name=='':
- name='NetworkX'
+ name='NetworkX'
else:
- name=G.name
- yield '*network %s'%name
+ name=G.name
+ # Apparently many Pajek format readers can't process this line
+ # So we'll leave it out for now.
+ # yield '*network %s'%name
# write nodes with attributes
yield '*vertices %s'%(G.order())
|
{"golden_diff": "diff --git a/networkx/readwrite/pajek.py b/networkx/readwrite/pajek.py\n--- a/networkx/readwrite/pajek.py\n+++ b/networkx/readwrite/pajek.py\n@@ -37,10 +37,12 @@\n for format information.\n \"\"\"\n if G.name=='': \n- name='NetworkX'\n+ name='NetworkX'\n else:\n- name=G.name\n- yield '*network %s'%name\n+ name=G.name\n+ # Apparently many Pajek format readers can't process this line\n+ # So we'll leave it out for now.\n+ # yield '*network %s'%name\n \n # write nodes with attributes\n yield '*vertices %s'%(G.order())\n", "issue": "Pajek exporter should not write first line \"*network NetworkX\"\nCurrently, the first line of a pajek file created by networkx looks like:\n\n```\n*network NetworkX\n```\n\nMany programs that import pajek files crash, because they expect the first line to be \n\n```\n*vertices 762\n```\n\nGiven that the `*network NetworkX` serves no purpose, could we modify the pajek writer to not create this first line?\n\n", "before_files": [{"content": "\"\"\"\n*****\nPajek\n*****\nRead graphs in Pajek format.\n\nThis implementation handles directed and undirected graphs including\nthose with self loops and parallel edges. \n\nFormat\n------\nSee http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm\nfor format information.\n\"\"\"\n# Copyright (C) 2008-2011 by \n# Aric Hagberg <[email protected]>\n# Dan Schult <[email protected]>\n# Pieter Swart <[email protected]>\n# All rights reserved.\n# BSD license.\nimport networkx as nx\nfrom networkx.utils import is_string_like, open_file, make_str\n__author__ = \"\"\"Aric Hagberg ([email protected])\"\"\"\n__all__ = ['read_pajek', 'parse_pajek', 'generate_pajek', 'write_pajek']\n\ndef generate_pajek(G):\n \"\"\"Generate lines in Pajek graph format.\n\n Parameters\n ----------\n G : graph\n A Networkx graph\n\n References\n ----------\n See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm\n for format information.\n \"\"\"\n if G.name=='': \n name='NetworkX'\n else:\n name=G.name\n yield '*network %s'%name\n\n # write nodes with attributes\n yield '*vertices %s'%(G.order())\n nodes = G.nodes()\n # make dictionary mapping nodes to integers\n nodenumber=dict(zip(nodes,range(1,len(nodes)+1))) \n for n in nodes:\n na=G.node.get(n,{})\n x=na.get('x',0.0)\n y=na.get('y',0.0)\n id=int(na.get('id',nodenumber[n]))\n nodenumber[n]=id\n shape=na.get('shape','ellipse')\n s=' '.join(map(make_qstr,(id,n,x,y,shape)))\n for k,v in na.items():\n s+=' %s %s'%(make_qstr(k),make_qstr(v))\n yield s\n\n # write edges with attributes \n if G.is_directed():\n yield '*arcs'\n else:\n yield '*edges'\n for u,v,edgedata in G.edges(data=True):\n d=edgedata.copy()\n value=d.pop('weight',1.0) # use 1 as default edge value\n s=' '.join(map(make_qstr,(nodenumber[u],nodenumber[v],value)))\n for k,v in d.items():\n s+=' %s %s'%(make_qstr(k),make_qstr(v))\n s+=' %s %s'%(k,v)\n yield s\n\n@open_file(1,mode='wb')\ndef write_pajek(G, path, encoding='UTF-8'):\n \"\"\"Write graph in Pajek format to path.\n\n Parameters\n ----------\n G : graph\n A Networkx graph\n path : file or string\n File or filename to write. \n Filenames ending in .gz or .bz2 will be compressed.\n\n Examples\n --------\n >>> G=nx.path_graph(4)\n >>> nx.write_pajek(G, \"test.net\")\n\n References\n ----------\n See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm\n for format information.\n \"\"\"\n for line in generate_pajek(G):\n line+='\\n'\n path.write(line.encode(encoding))\n\n@open_file(0,mode='rb')\ndef read_pajek(path,encoding='UTF-8'):\n \"\"\"Read graph in Pajek format from path. \n\n Parameters\n ----------\n path : file or string\n File or filename to write. \n Filenames ending in .gz or .bz2 will be uncompressed.\n\n Returns\n -------\n G : NetworkX MultiGraph or MultiDiGraph.\n\n Examples\n --------\n >>> G=nx.path_graph(4)\n >>> nx.write_pajek(G, \"test.net\")\n >>> G=nx.read_pajek(\"test.net\")\n\n To create a Graph instead of a MultiGraph use\n\n >>> G1=nx.Graph(G)\n\n References\n ----------\n See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm\n for format information.\n \"\"\"\n lines = (line.decode(encoding) for line in path)\n return parse_pajek(lines)\n\ndef parse_pajek(lines):\n \"\"\"Parse Pajek format graph from string or iterable.\n\n Parameters\n ----------\n lines : string or iterable\n Data in Pajek format.\n\n Returns\n -------\n G : NetworkX graph\n\n See Also\n --------\n read_pajek()\n\n \"\"\"\n import shlex\n # multigraph=False\n if is_string_like(lines): lines=iter(lines.split('\\n'))\n lines = iter([line.rstrip('\\n') for line in lines])\n G=nx.MultiDiGraph() # are multiedges allowed in Pajek? assume yes\n while lines:\n try:\n l=next(lines)\n except: #EOF\n break\n if l.lower().startswith(\"*network\"):\n label,name=l.split()\n G.name=name\n if l.lower().startswith(\"*vertices\"):\n nodelabels={}\n l,nnodes=l.split()\n for i in range(int(nnodes)):\n splitline=shlex.split(str(next(lines)))\n id,label=splitline[0:2]\n G.add_node(label)\n nodelabels[id]=label\n G.node[label]={'id':id}\n try: \n x,y,shape=splitline[2:5]\n G.node[label].update({'x':float(x),\n 'y':float(y),\n 'shape':shape})\n except:\n pass\n extra_attr=zip(splitline[5::2],splitline[6::2])\n G.node[label].update(extra_attr)\n if l.lower().startswith(\"*edges\") or l.lower().startswith(\"*arcs\"):\n if l.lower().startswith(\"*edge\"):\n # switch from multidigraph to multigraph\n G=nx.MultiGraph(G)\n if l.lower().startswith(\"*arcs\"):\n # switch to directed with multiple arcs for each existing edge\n G=G.to_directed()\n for l in lines:\n splitline=shlex.split(str(l))\n if len(splitline)<2:\n continue\n ui,vi=splitline[0:2]\n u=nodelabels.get(ui,ui)\n v=nodelabels.get(vi,vi)\n # parse the data attached to this edge and put in a dictionary \n edge_data={}\n try:\n # there should always be a single value on the edge?\n w=splitline[2:3]\n edge_data.update({'weight':float(w[0])})\n except:\n pass\n # if there isn't, just assign a 1\n# edge_data.update({'value':1})\n extra_attr=zip(splitline[3::2],splitline[4::2])\n edge_data.update(extra_attr)\n # if G.has_edge(u,v):\n # multigraph=True\n G.add_edge(u,v,**edge_data)\n return G\n\n\n\ndef make_qstr(t):\n \"\"\"Return the string representation of t. \n Add outer double-quotes if the string has a space.\n \"\"\"\n if not is_string_like(t): \n t = str(t)\n if \" \" in t: \n t=r'\"%s\"'%t\n return t\n\n\n# fixture for nose tests\ndef teardown_module(module):\n import os\n os.unlink('test.net')\n", "path": "networkx/readwrite/pajek.py"}], "after_files": [{"content": "\"\"\"\n*****\nPajek\n*****\nRead graphs in Pajek format.\n\nThis implementation handles directed and undirected graphs including\nthose with self loops and parallel edges. \n\nFormat\n------\nSee http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm\nfor format information.\n\"\"\"\n# Copyright (C) 2008-2011 by \n# Aric Hagberg <[email protected]>\n# Dan Schult <[email protected]>\n# Pieter Swart <[email protected]>\n# All rights reserved.\n# BSD license.\nimport networkx as nx\nfrom networkx.utils import is_string_like, open_file, make_str\n__author__ = \"\"\"Aric Hagberg ([email protected])\"\"\"\n__all__ = ['read_pajek', 'parse_pajek', 'generate_pajek', 'write_pajek']\n\ndef generate_pajek(G):\n \"\"\"Generate lines in Pajek graph format.\n\n Parameters\n ----------\n G : graph\n A Networkx graph\n\n References\n ----------\n See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm\n for format information.\n \"\"\"\n if G.name=='': \n name='NetworkX'\n else:\n name=G.name\n # Apparently many Pajek format readers can't process this line\n # So we'll leave it out for now.\n # yield '*network %s'%name\n\n # write nodes with attributes\n yield '*vertices %s'%(G.order())\n nodes = G.nodes()\n # make dictionary mapping nodes to integers\n nodenumber=dict(zip(nodes,range(1,len(nodes)+1))) \n for n in nodes:\n na=G.node.get(n,{})\n x=na.get('x',0.0)\n y=na.get('y',0.0)\n id=int(na.get('id',nodenumber[n]))\n nodenumber[n]=id\n shape=na.get('shape','ellipse')\n s=' '.join(map(make_qstr,(id,n,x,y,shape)))\n for k,v in na.items():\n s+=' %s %s'%(make_qstr(k),make_qstr(v))\n yield s\n\n # write edges with attributes \n if G.is_directed():\n yield '*arcs'\n else:\n yield '*edges'\n for u,v,edgedata in G.edges(data=True):\n d=edgedata.copy()\n value=d.pop('weight',1.0) # use 1 as default edge value\n s=' '.join(map(make_qstr,(nodenumber[u],nodenumber[v],value)))\n for k,v in d.items():\n s+=' %s %s'%(make_qstr(k),make_qstr(v))\n s+=' %s %s'%(k,v)\n yield s\n\n@open_file(1,mode='wb')\ndef write_pajek(G, path, encoding='UTF-8'):\n \"\"\"Write graph in Pajek format to path.\n\n Parameters\n ----------\n G : graph\n A Networkx graph\n path : file or string\n File or filename to write. \n Filenames ending in .gz or .bz2 will be compressed.\n\n Examples\n --------\n >>> G=nx.path_graph(4)\n >>> nx.write_pajek(G, \"test.net\")\n\n References\n ----------\n See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm\n for format information.\n \"\"\"\n for line in generate_pajek(G):\n line+='\\n'\n path.write(line.encode(encoding))\n\n@open_file(0,mode='rb')\ndef read_pajek(path,encoding='UTF-8'):\n \"\"\"Read graph in Pajek format from path. \n\n Parameters\n ----------\n path : file or string\n File or filename to write. \n Filenames ending in .gz or .bz2 will be uncompressed.\n\n Returns\n -------\n G : NetworkX MultiGraph or MultiDiGraph.\n\n Examples\n --------\n >>> G=nx.path_graph(4)\n >>> nx.write_pajek(G, \"test.net\")\n >>> G=nx.read_pajek(\"test.net\")\n\n To create a Graph instead of a MultiGraph use\n\n >>> G1=nx.Graph(G)\n\n References\n ----------\n See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm\n for format information.\n \"\"\"\n lines = (line.decode(encoding) for line in path)\n return parse_pajek(lines)\n\ndef parse_pajek(lines):\n \"\"\"Parse Pajek format graph from string or iterable.\n\n Parameters\n ----------\n lines : string or iterable\n Data in Pajek format.\n\n Returns\n -------\n G : NetworkX graph\n\n See Also\n --------\n read_pajek()\n\n \"\"\"\n import shlex\n # multigraph=False\n if is_string_like(lines): lines=iter(lines.split('\\n'))\n lines = iter([line.rstrip('\\n') for line in lines])\n G=nx.MultiDiGraph() # are multiedges allowed in Pajek? assume yes\n while lines:\n try:\n l=next(lines)\n except: #EOF\n break\n if l.lower().startswith(\"*network\"):\n label,name=l.split()\n G.name=name\n if l.lower().startswith(\"*vertices\"):\n nodelabels={}\n l,nnodes=l.split()\n for i in range(int(nnodes)):\n splitline=shlex.split(str(next(lines)))\n id,label=splitline[0:2]\n G.add_node(label)\n nodelabels[id]=label\n G.node[label]={'id':id}\n try: \n x,y,shape=splitline[2:5]\n G.node[label].update({'x':float(x),\n 'y':float(y),\n 'shape':shape})\n except:\n pass\n extra_attr=zip(splitline[5::2],splitline[6::2])\n G.node[label].update(extra_attr)\n if l.lower().startswith(\"*edges\") or l.lower().startswith(\"*arcs\"):\n if l.lower().startswith(\"*edge\"):\n # switch from multidigraph to multigraph\n G=nx.MultiGraph(G)\n if l.lower().startswith(\"*arcs\"):\n # switch to directed with multiple arcs for each existing edge\n G=G.to_directed()\n for l in lines:\n splitline=shlex.split(str(l))\n if len(splitline)<2:\n continue\n ui,vi=splitline[0:2]\n u=nodelabels.get(ui,ui)\n v=nodelabels.get(vi,vi)\n # parse the data attached to this edge and put in a dictionary \n edge_data={}\n try:\n # there should always be a single value on the edge?\n w=splitline[2:3]\n edge_data.update({'weight':float(w[0])})\n except:\n pass\n # if there isn't, just assign a 1\n# edge_data.update({'value':1})\n extra_attr=zip(splitline[3::2],splitline[4::2])\n edge_data.update(extra_attr)\n # if G.has_edge(u,v):\n # multigraph=True\n G.add_edge(u,v,**edge_data)\n return G\n\n\n\ndef make_qstr(t):\n \"\"\"Return the string representation of t. \n Add outer double-quotes if the string has a space.\n \"\"\"\n if not is_string_like(t): \n t = str(t)\n if \" \" in t: \n t=r'\"%s\"'%t\n return t\n\n\n# fixture for nose tests\ndef teardown_module(module):\n import os\n os.unlink('test.net')\n", "path": "networkx/readwrite/pajek.py"}]}
| 2,648 | 167 |
gh_patches_debug_22156
|
rasdani/github-patches
|
git_diff
|
open-telemetry__opentelemetry-python-contrib-1510
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add readthedocs documentation for urlib instrumentation
Part of [1491](https://github.com/open-telemetry/opentelemetry-python-contrib/issues/1491)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `instrumentation/opentelemetry-instrumentation-urllib/src/opentelemetry/instrumentation/urllib/__init__.py`
Content:
```
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15
16 """
17 This library allows tracing HTTP requests made by the
18 `urllib https://docs.python.org/3/library/urllib.html>`_ library.
19
20 Usage
21 -----
22
23 .. code-block:: python
24
25 from urllib import request
26 from opentelemetry.instrumentation.urllib import URLLibInstrumentor
27
28 # You can optionally pass a custom TracerProvider to
29 # URLLibInstrumentor().instrument()
30
31 URLLibInstrumentor().instrument()
32 req = request.Request('https://postman-echo.com/post', method="POST")
33 r = request.urlopen(req)
34
35 Configuration
36 -------------
37
38 Request/Response hooks
39 **********************
40
41 The urllib instrumentation supports extending tracing behavior with the help of
42 request and response hooks. These are functions that are called back by the instrumentation
43 right after a Span is created for a request and right before the span is finished processing a response respectively.
44 The hooks can be configured as follows:
45
46 ..code:: python
47
48 # `request_obj` is an instance of urllib.request.Request
49 def request_hook(span, request_obj):
50 pass
51
52 # `request_obj` is an instance of urllib.request.Request
53 # `response` is an instance of http.client.HTTPResponse
54 def response_hook(span, request_obj, response)
55 pass
56
57 URLLibInstrumentor.instrument(
58 request_hook=request_hook, response_hook=response_hook)
59 )
60
61 API
62 ---
63 """
64
65 import functools
66 import types
67 import typing
68
69 # from urllib import response
70 from http import client
71 from typing import Collection
72 from urllib.request import ( # pylint: disable=no-name-in-module,import-error
73 OpenerDirector,
74 Request,
75 )
76
77 from opentelemetry import context
78
79 # FIXME: fix the importing of this private attribute when the location of the _SUPPRESS_HTTP_INSTRUMENTATION_KEY is defined.
80 from opentelemetry.context import _SUPPRESS_HTTP_INSTRUMENTATION_KEY
81 from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
82 from opentelemetry.instrumentation.urllib.package import _instruments
83 from opentelemetry.instrumentation.urllib.version import __version__
84 from opentelemetry.instrumentation.utils import (
85 _SUPPRESS_INSTRUMENTATION_KEY,
86 http_status_to_status_code,
87 )
88 from opentelemetry.propagate import inject
89 from opentelemetry.semconv.trace import SpanAttributes
90 from opentelemetry.trace import Span, SpanKind, get_tracer
91 from opentelemetry.trace.status import Status
92 from opentelemetry.util.http import remove_url_credentials
93
94 _RequestHookT = typing.Optional[typing.Callable[[Span, Request], None]]
95 _ResponseHookT = typing.Optional[
96 typing.Callable[[Span, Request, client.HTTPResponse], None]
97 ]
98
99
100 class URLLibInstrumentor(BaseInstrumentor):
101 """An instrumentor for urllib
102 See `BaseInstrumentor`
103 """
104
105 def instrumentation_dependencies(self) -> Collection[str]:
106 return _instruments
107
108 def _instrument(self, **kwargs):
109 """Instruments urllib module
110
111 Args:
112 **kwargs: Optional arguments
113 ``tracer_provider``: a TracerProvider, defaults to global
114 ``request_hook``: An optional callback invoked that is invoked right after a span is created.
115 ``response_hook``: An optional callback which is invoked right before the span is finished processing a response
116 """
117 tracer_provider = kwargs.get("tracer_provider")
118 tracer = get_tracer(__name__, __version__, tracer_provider)
119 _instrument(
120 tracer,
121 request_hook=kwargs.get("request_hook"),
122 response_hook=kwargs.get("response_hook"),
123 )
124
125 def _uninstrument(self, **kwargs):
126 _uninstrument()
127
128 def uninstrument_opener(
129 self, opener: OpenerDirector
130 ): # pylint: disable=no-self-use
131 """uninstrument_opener a specific instance of urllib.request.OpenerDirector"""
132 _uninstrument_from(opener, restore_as_bound_func=True)
133
134
135 def _instrument(
136 tracer,
137 request_hook: _RequestHookT = None,
138 response_hook: _ResponseHookT = None,
139 ):
140 """Enables tracing of all requests calls that go through
141 :code:`urllib.Client._make_request`"""
142
143 opener_open = OpenerDirector.open
144
145 @functools.wraps(opener_open)
146 def instrumented_open(opener, fullurl, data=None, timeout=None):
147
148 if isinstance(fullurl, str):
149 request_ = Request(fullurl, data)
150 else:
151 request_ = fullurl
152
153 def get_or_create_headers():
154 return getattr(request_, "headers", {})
155
156 def call_wrapped():
157 return opener_open(opener, request_, data=data, timeout=timeout)
158
159 return _instrumented_open_call(
160 opener, request_, call_wrapped, get_or_create_headers
161 )
162
163 def _instrumented_open_call(
164 _, request, call_wrapped, get_or_create_headers
165 ): # pylint: disable=too-many-locals
166 if context.get_value(
167 _SUPPRESS_INSTRUMENTATION_KEY
168 ) or context.get_value(_SUPPRESS_HTTP_INSTRUMENTATION_KEY):
169 return call_wrapped()
170
171 method = request.get_method().upper()
172 url = request.full_url
173
174 span_name = f"HTTP {method}".strip()
175
176 url = remove_url_credentials(url)
177
178 labels = {
179 SpanAttributes.HTTP_METHOD: method,
180 SpanAttributes.HTTP_URL: url,
181 }
182
183 with tracer.start_as_current_span(
184 span_name, kind=SpanKind.CLIENT, attributes=labels
185 ) as span:
186 exception = None
187 if callable(request_hook):
188 request_hook(span, request)
189
190 headers = get_or_create_headers()
191 inject(headers)
192
193 token = context.attach(
194 context.set_value(_SUPPRESS_HTTP_INSTRUMENTATION_KEY, True)
195 )
196 try:
197 result = call_wrapped() # *** PROCEED
198 except Exception as exc: # pylint: disable=W0703
199 exception = exc
200 result = getattr(exc, "file", None)
201 finally:
202 context.detach(token)
203
204 if result is not None:
205
206 code_ = result.getcode()
207 labels[SpanAttributes.HTTP_STATUS_CODE] = str(code_)
208
209 if span.is_recording() and code_ is not None:
210 span.set_attribute(SpanAttributes.HTTP_STATUS_CODE, code_)
211 span.set_status(Status(http_status_to_status_code(code_)))
212
213 ver_ = str(getattr(result, "version", ""))
214 if ver_:
215 labels[
216 SpanAttributes.HTTP_FLAVOR
217 ] = f"{ver_[:1]}.{ver_[:-1]}"
218
219 if callable(response_hook):
220 response_hook(span, request, result)
221
222 if exception is not None:
223 raise exception.with_traceback(exception.__traceback__)
224
225 return result
226
227 instrumented_open.opentelemetry_instrumentation_urllib_applied = True
228 OpenerDirector.open = instrumented_open
229
230
231 def _uninstrument():
232 """Disables instrumentation of :code:`urllib` through this module.
233
234 Note that this only works if no other module also patches urllib."""
235 _uninstrument_from(OpenerDirector)
236
237
238 def _uninstrument_from(instr_root, restore_as_bound_func=False):
239
240 instr_func_name = "open"
241 instr_func = getattr(instr_root, instr_func_name)
242 if not getattr(
243 instr_func,
244 "opentelemetry_instrumentation_urllib_applied",
245 False,
246 ):
247 return
248
249 original = instr_func.__wrapped__ # pylint:disable=no-member
250 if restore_as_bound_func:
251 original = types.MethodType(original, instr_root)
252 setattr(instr_root, instr_func_name, original)
253
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/instrumentation/opentelemetry-instrumentation-urllib/src/opentelemetry/instrumentation/urllib/__init__.py b/instrumentation/opentelemetry-instrumentation-urllib/src/opentelemetry/instrumentation/urllib/__init__.py
--- a/instrumentation/opentelemetry-instrumentation-urllib/src/opentelemetry/instrumentation/urllib/__init__.py
+++ b/instrumentation/opentelemetry-instrumentation-urllib/src/opentelemetry/instrumentation/urllib/__init__.py
@@ -12,14 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-
"""
This library allows tracing HTTP requests made by the
-`urllib https://docs.python.org/3/library/urllib.html>`_ library.
+`urllib <https://docs.python.org/3/library/urllib>`_ library.
Usage
-----
-
.. code-block:: python
from urllib import request
@@ -43,7 +41,7 @@
right after a Span is created for a request and right before the span is finished processing a response respectively.
The hooks can be configured as follows:
-..code:: python
+.. code:: python
# `request_obj` is an instance of urllib.request.Request
def request_hook(span, request_obj):
|
{"golden_diff": "diff --git a/instrumentation/opentelemetry-instrumentation-urllib/src/opentelemetry/instrumentation/urllib/__init__.py b/instrumentation/opentelemetry-instrumentation-urllib/src/opentelemetry/instrumentation/urllib/__init__.py\n--- a/instrumentation/opentelemetry-instrumentation-urllib/src/opentelemetry/instrumentation/urllib/__init__.py\n+++ b/instrumentation/opentelemetry-instrumentation-urllib/src/opentelemetry/instrumentation/urllib/__init__.py\n@@ -12,14 +12,12 @@\n # See the License for the specific language governing permissions and\n # limitations under the License.\n \n-\n \"\"\"\n This library allows tracing HTTP requests made by the\n-`urllib https://docs.python.org/3/library/urllib.html>`_ library.\n+`urllib <https://docs.python.org/3/library/urllib>`_ library.\n \n Usage\n -----\n-\n .. code-block:: python\n \n from urllib import request\n@@ -43,7 +41,7 @@\n right after a Span is created for a request and right before the span is finished processing a response respectively.\n The hooks can be configured as follows:\n \n-..code:: python\n+.. code:: python\n \n # `request_obj` is an instance of urllib.request.Request\n def request_hook(span, request_obj):\n", "issue": "Add readthedocs documentation for urlib instrumentation\nPart of [1491](https://github.com/open-telemetry/opentelemetry-python-contrib/issues/1491)\n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\n\"\"\"\nThis library allows tracing HTTP requests made by the\n`urllib https://docs.python.org/3/library/urllib.html>`_ library.\n\nUsage\n-----\n\n.. code-block:: python\n\n from urllib import request\n from opentelemetry.instrumentation.urllib import URLLibInstrumentor\n\n # You can optionally pass a custom TracerProvider to\n # URLLibInstrumentor().instrument()\n\n URLLibInstrumentor().instrument()\n req = request.Request('https://postman-echo.com/post', method=\"POST\")\n r = request.urlopen(req)\n\nConfiguration\n-------------\n\nRequest/Response hooks\n**********************\n\nThe urllib instrumentation supports extending tracing behavior with the help of\nrequest and response hooks. These are functions that are called back by the instrumentation\nright after a Span is created for a request and right before the span is finished processing a response respectively.\nThe hooks can be configured as follows:\n\n..code:: python\n\n # `request_obj` is an instance of urllib.request.Request\n def request_hook(span, request_obj):\n pass\n\n # `request_obj` is an instance of urllib.request.Request\n # `response` is an instance of http.client.HTTPResponse\n def response_hook(span, request_obj, response)\n pass\n\n URLLibInstrumentor.instrument(\n request_hook=request_hook, response_hook=response_hook)\n )\n\nAPI\n---\n\"\"\"\n\nimport functools\nimport types\nimport typing\n\n# from urllib import response\nfrom http import client\nfrom typing import Collection\nfrom urllib.request import ( # pylint: disable=no-name-in-module,import-error\n OpenerDirector,\n Request,\n)\n\nfrom opentelemetry import context\n\n# FIXME: fix the importing of this private attribute when the location of the _SUPPRESS_HTTP_INSTRUMENTATION_KEY is defined.\nfrom opentelemetry.context import _SUPPRESS_HTTP_INSTRUMENTATION_KEY\nfrom opentelemetry.instrumentation.instrumentor import BaseInstrumentor\nfrom opentelemetry.instrumentation.urllib.package import _instruments\nfrom opentelemetry.instrumentation.urllib.version import __version__\nfrom opentelemetry.instrumentation.utils import (\n _SUPPRESS_INSTRUMENTATION_KEY,\n http_status_to_status_code,\n)\nfrom opentelemetry.propagate import inject\nfrom opentelemetry.semconv.trace import SpanAttributes\nfrom opentelemetry.trace import Span, SpanKind, get_tracer\nfrom opentelemetry.trace.status import Status\nfrom opentelemetry.util.http import remove_url_credentials\n\n_RequestHookT = typing.Optional[typing.Callable[[Span, Request], None]]\n_ResponseHookT = typing.Optional[\n typing.Callable[[Span, Request, client.HTTPResponse], None]\n]\n\n\nclass URLLibInstrumentor(BaseInstrumentor):\n \"\"\"An instrumentor for urllib\n See `BaseInstrumentor`\n \"\"\"\n\n def instrumentation_dependencies(self) -> Collection[str]:\n return _instruments\n\n def _instrument(self, **kwargs):\n \"\"\"Instruments urllib module\n\n Args:\n **kwargs: Optional arguments\n ``tracer_provider``: a TracerProvider, defaults to global\n ``request_hook``: An optional callback invoked that is invoked right after a span is created.\n ``response_hook``: An optional callback which is invoked right before the span is finished processing a response\n \"\"\"\n tracer_provider = kwargs.get(\"tracer_provider\")\n tracer = get_tracer(__name__, __version__, tracer_provider)\n _instrument(\n tracer,\n request_hook=kwargs.get(\"request_hook\"),\n response_hook=kwargs.get(\"response_hook\"),\n )\n\n def _uninstrument(self, **kwargs):\n _uninstrument()\n\n def uninstrument_opener(\n self, opener: OpenerDirector\n ): # pylint: disable=no-self-use\n \"\"\"uninstrument_opener a specific instance of urllib.request.OpenerDirector\"\"\"\n _uninstrument_from(opener, restore_as_bound_func=True)\n\n\ndef _instrument(\n tracer,\n request_hook: _RequestHookT = None,\n response_hook: _ResponseHookT = None,\n):\n \"\"\"Enables tracing of all requests calls that go through\n :code:`urllib.Client._make_request`\"\"\"\n\n opener_open = OpenerDirector.open\n\n @functools.wraps(opener_open)\n def instrumented_open(opener, fullurl, data=None, timeout=None):\n\n if isinstance(fullurl, str):\n request_ = Request(fullurl, data)\n else:\n request_ = fullurl\n\n def get_or_create_headers():\n return getattr(request_, \"headers\", {})\n\n def call_wrapped():\n return opener_open(opener, request_, data=data, timeout=timeout)\n\n return _instrumented_open_call(\n opener, request_, call_wrapped, get_or_create_headers\n )\n\n def _instrumented_open_call(\n _, request, call_wrapped, get_or_create_headers\n ): # pylint: disable=too-many-locals\n if context.get_value(\n _SUPPRESS_INSTRUMENTATION_KEY\n ) or context.get_value(_SUPPRESS_HTTP_INSTRUMENTATION_KEY):\n return call_wrapped()\n\n method = request.get_method().upper()\n url = request.full_url\n\n span_name = f\"HTTP {method}\".strip()\n\n url = remove_url_credentials(url)\n\n labels = {\n SpanAttributes.HTTP_METHOD: method,\n SpanAttributes.HTTP_URL: url,\n }\n\n with tracer.start_as_current_span(\n span_name, kind=SpanKind.CLIENT, attributes=labels\n ) as span:\n exception = None\n if callable(request_hook):\n request_hook(span, request)\n\n headers = get_or_create_headers()\n inject(headers)\n\n token = context.attach(\n context.set_value(_SUPPRESS_HTTP_INSTRUMENTATION_KEY, True)\n )\n try:\n result = call_wrapped() # *** PROCEED\n except Exception as exc: # pylint: disable=W0703\n exception = exc\n result = getattr(exc, \"file\", None)\n finally:\n context.detach(token)\n\n if result is not None:\n\n code_ = result.getcode()\n labels[SpanAttributes.HTTP_STATUS_CODE] = str(code_)\n\n if span.is_recording() and code_ is not None:\n span.set_attribute(SpanAttributes.HTTP_STATUS_CODE, code_)\n span.set_status(Status(http_status_to_status_code(code_)))\n\n ver_ = str(getattr(result, \"version\", \"\"))\n if ver_:\n labels[\n SpanAttributes.HTTP_FLAVOR\n ] = f\"{ver_[:1]}.{ver_[:-1]}\"\n\n if callable(response_hook):\n response_hook(span, request, result)\n\n if exception is not None:\n raise exception.with_traceback(exception.__traceback__)\n\n return result\n\n instrumented_open.opentelemetry_instrumentation_urllib_applied = True\n OpenerDirector.open = instrumented_open\n\n\ndef _uninstrument():\n \"\"\"Disables instrumentation of :code:`urllib` through this module.\n\n Note that this only works if no other module also patches urllib.\"\"\"\n _uninstrument_from(OpenerDirector)\n\n\ndef _uninstrument_from(instr_root, restore_as_bound_func=False):\n\n instr_func_name = \"open\"\n instr_func = getattr(instr_root, instr_func_name)\n if not getattr(\n instr_func,\n \"opentelemetry_instrumentation_urllib_applied\",\n False,\n ):\n return\n\n original = instr_func.__wrapped__ # pylint:disable=no-member\n if restore_as_bound_func:\n original = types.MethodType(original, instr_root)\n setattr(instr_root, instr_func_name, original)\n", "path": "instrumentation/opentelemetry-instrumentation-urllib/src/opentelemetry/instrumentation/urllib/__init__.py"}], "after_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\nThis library allows tracing HTTP requests made by the\n`urllib <https://docs.python.org/3/library/urllib>`_ library.\n\nUsage\n-----\n.. code-block:: python\n\n from urllib import request\n from opentelemetry.instrumentation.urllib import URLLibInstrumentor\n\n # You can optionally pass a custom TracerProvider to\n # URLLibInstrumentor().instrument()\n\n URLLibInstrumentor().instrument()\n req = request.Request('https://postman-echo.com/post', method=\"POST\")\n r = request.urlopen(req)\n\nConfiguration\n-------------\n\nRequest/Response hooks\n**********************\n\nThe urllib instrumentation supports extending tracing behavior with the help of\nrequest and response hooks. These are functions that are called back by the instrumentation\nright after a Span is created for a request and right before the span is finished processing a response respectively.\nThe hooks can be configured as follows:\n\n.. code:: python\n\n # `request_obj` is an instance of urllib.request.Request\n def request_hook(span, request_obj):\n pass\n\n # `request_obj` is an instance of urllib.request.Request\n # `response` is an instance of http.client.HTTPResponse\n def response_hook(span, request_obj, response)\n pass\n\n URLLibInstrumentor.instrument(\n request_hook=request_hook, response_hook=response_hook)\n )\n\nAPI\n---\n\"\"\"\n\nimport functools\nimport types\nimport typing\n\n# from urllib import response\nfrom http import client\nfrom typing import Collection\nfrom urllib.request import ( # pylint: disable=no-name-in-module,import-error\n OpenerDirector,\n Request,\n)\n\nfrom opentelemetry import context\n\n# FIXME: fix the importing of this private attribute when the location of the _SUPPRESS_HTTP_INSTRUMENTATION_KEY is defined.\nfrom opentelemetry.context import _SUPPRESS_HTTP_INSTRUMENTATION_KEY\nfrom opentelemetry.instrumentation.instrumentor import BaseInstrumentor\nfrom opentelemetry.instrumentation.urllib.package import _instruments\nfrom opentelemetry.instrumentation.urllib.version import __version__\nfrom opentelemetry.instrumentation.utils import (\n _SUPPRESS_INSTRUMENTATION_KEY,\n http_status_to_status_code,\n)\nfrom opentelemetry.propagate import inject\nfrom opentelemetry.semconv.trace import SpanAttributes\nfrom opentelemetry.trace import Span, SpanKind, get_tracer\nfrom opentelemetry.trace.status import Status\nfrom opentelemetry.util.http import remove_url_credentials\n\n_RequestHookT = typing.Optional[typing.Callable[[Span, Request], None]]\n_ResponseHookT = typing.Optional[\n typing.Callable[[Span, Request, client.HTTPResponse], None]\n]\n\n\nclass URLLibInstrumentor(BaseInstrumentor):\n \"\"\"An instrumentor for urllib\n See `BaseInstrumentor`\n \"\"\"\n\n def instrumentation_dependencies(self) -> Collection[str]:\n return _instruments\n\n def _instrument(self, **kwargs):\n \"\"\"Instruments urllib module\n\n Args:\n **kwargs: Optional arguments\n ``tracer_provider``: a TracerProvider, defaults to global\n ``request_hook``: An optional callback invoked that is invoked right after a span is created.\n ``response_hook``: An optional callback which is invoked right before the span is finished processing a response\n \"\"\"\n tracer_provider = kwargs.get(\"tracer_provider\")\n tracer = get_tracer(__name__, __version__, tracer_provider)\n _instrument(\n tracer,\n request_hook=kwargs.get(\"request_hook\"),\n response_hook=kwargs.get(\"response_hook\"),\n )\n\n def _uninstrument(self, **kwargs):\n _uninstrument()\n\n def uninstrument_opener(\n self, opener: OpenerDirector\n ): # pylint: disable=no-self-use\n \"\"\"uninstrument_opener a specific instance of urllib.request.OpenerDirector\"\"\"\n _uninstrument_from(opener, restore_as_bound_func=True)\n\n\ndef _instrument(\n tracer,\n request_hook: _RequestHookT = None,\n response_hook: _ResponseHookT = None,\n):\n \"\"\"Enables tracing of all requests calls that go through\n :code:`urllib.Client._make_request`\"\"\"\n\n opener_open = OpenerDirector.open\n\n @functools.wraps(opener_open)\n def instrumented_open(opener, fullurl, data=None, timeout=None):\n\n if isinstance(fullurl, str):\n request_ = Request(fullurl, data)\n else:\n request_ = fullurl\n\n def get_or_create_headers():\n return getattr(request_, \"headers\", {})\n\n def call_wrapped():\n return opener_open(opener, request_, data=data, timeout=timeout)\n\n return _instrumented_open_call(\n opener, request_, call_wrapped, get_or_create_headers\n )\n\n def _instrumented_open_call(\n _, request, call_wrapped, get_or_create_headers\n ): # pylint: disable=too-many-locals\n if context.get_value(\n _SUPPRESS_INSTRUMENTATION_KEY\n ) or context.get_value(_SUPPRESS_HTTP_INSTRUMENTATION_KEY):\n return call_wrapped()\n\n method = request.get_method().upper()\n url = request.full_url\n\n span_name = f\"HTTP {method}\".strip()\n\n url = remove_url_credentials(url)\n\n labels = {\n SpanAttributes.HTTP_METHOD: method,\n SpanAttributes.HTTP_URL: url,\n }\n\n with tracer.start_as_current_span(\n span_name, kind=SpanKind.CLIENT, attributes=labels\n ) as span:\n exception = None\n if callable(request_hook):\n request_hook(span, request)\n\n headers = get_or_create_headers()\n inject(headers)\n\n token = context.attach(\n context.set_value(_SUPPRESS_HTTP_INSTRUMENTATION_KEY, True)\n )\n try:\n result = call_wrapped() # *** PROCEED\n except Exception as exc: # pylint: disable=W0703\n exception = exc\n result = getattr(exc, \"file\", None)\n finally:\n context.detach(token)\n\n if result is not None:\n\n code_ = result.getcode()\n labels[SpanAttributes.HTTP_STATUS_CODE] = str(code_)\n\n if span.is_recording() and code_ is not None:\n span.set_attribute(SpanAttributes.HTTP_STATUS_CODE, code_)\n span.set_status(Status(http_status_to_status_code(code_)))\n\n ver_ = str(getattr(result, \"version\", \"\"))\n if ver_:\n labels[\n SpanAttributes.HTTP_FLAVOR\n ] = f\"{ver_[:1]}.{ver_[:-1]}\"\n\n if callable(response_hook):\n response_hook(span, request, result)\n\n if exception is not None:\n raise exception.with_traceback(exception.__traceback__)\n\n return result\n\n instrumented_open.opentelemetry_instrumentation_urllib_applied = True\n OpenerDirector.open = instrumented_open\n\n\ndef _uninstrument():\n \"\"\"Disables instrumentation of :code:`urllib` through this module.\n\n Note that this only works if no other module also patches urllib.\"\"\"\n _uninstrument_from(OpenerDirector)\n\n\ndef _uninstrument_from(instr_root, restore_as_bound_func=False):\n\n instr_func_name = \"open\"\n instr_func = getattr(instr_root, instr_func_name)\n if not getattr(\n instr_func,\n \"opentelemetry_instrumentation_urllib_applied\",\n False,\n ):\n return\n\n original = instr_func.__wrapped__ # pylint:disable=no-member\n if restore_as_bound_func:\n original = types.MethodType(original, instr_root)\n setattr(instr_root, instr_func_name, original)\n", "path": "instrumentation/opentelemetry-instrumentation-urllib/src/opentelemetry/instrumentation/urllib/__init__.py"}]}
| 2,737 | 295 |
gh_patches_debug_9390
|
rasdani/github-patches
|
git_diff
|
pandas-dev__pandas-18844
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
TST: make _skip_if into pytest decorators
- [X] _skip_if_32bit (#18693)
- [X] _skip_if_no_mpl (#18427)
- [X] _skip_if_mpl_1_5 (#18682)
- [x] _skip_if_no_scipy (#18794)
- [x] _skip_if_no_lzma (#18820)
- [x] _skip_if_no_xarray (#18814)
- [X] _skip_if_windows_python_3 (#18693)
- [X] _skip_if_windows (#18693)
- [x] _skip_if_no_pathlib (#18765)
- [x] _skip_if_no_localpath (#18765)
- [x] skip_if_no_ne (#18820)
- [x] _skip_if_has_locale (#18745)
- [x] _skip_if_not_us_locale (#18745)
- [ ] _skip_if_no_mock
- [x] _skip_if_no_ipython (#18814)
- [ ] skip_if_no_package
we should move the ``_skip_if_*`` functions out of ``pandas.util.testing`` to another (private module)
then we can add [skipif decorators](http://pytest.readthedocs.io/en/reorganize-docs/new-docs/user/skipping.html)
and use like this
```
@skip_if_windows_py3
def test_.......():
```
rather than calling ``tm._skip_if_windows_py390`` in the body of the function (sometimes you also need to do that, so we leave the functions themselves as well).
this makes much more idiomatic and readable pytest code and removes the need to roll your own when using the decorator.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pandas/conftest.py`
Content:
```
1 import pytest
2
3 from distutils.version import LooseVersion
4 import numpy
5 import pandas
6 import pandas.util.testing as tm
7 import dateutil
8
9
10 def pytest_addoption(parser):
11 parser.addoption("--skip-slow", action="store_true",
12 help="skip slow tests")
13 parser.addoption("--skip-network", action="store_true",
14 help="skip network tests")
15 parser.addoption("--run-high-memory", action="store_true",
16 help="run high memory tests")
17 parser.addoption("--only-slow", action="store_true",
18 help="run only slow tests")
19
20
21 def pytest_runtest_setup(item):
22 if 'slow' in item.keywords and item.config.getoption("--skip-slow"):
23 pytest.skip("skipping due to --skip-slow")
24
25 if 'slow' not in item.keywords and item.config.getoption("--only-slow"):
26 pytest.skip("skipping due to --only-slow")
27
28 if 'network' in item.keywords and item.config.getoption("--skip-network"):
29 pytest.skip("skipping due to --skip-network")
30
31 if 'high_memory' in item.keywords and not item.config.getoption(
32 "--run-high-memory"):
33 pytest.skip(
34 "skipping high memory test since --run-high-memory was not set")
35
36
37 # Configurations for all tests and all test modules
38
39 @pytest.fixture(autouse=True)
40 def configure_tests():
41 pandas.set_option('chained_assignment', 'raise')
42
43
44 # For running doctests: make np and pd names available
45
46 @pytest.fixture(autouse=True)
47 def add_imports(doctest_namespace):
48 doctest_namespace['np'] = numpy
49 doctest_namespace['pd'] = pandas
50
51
52 @pytest.fixture(params=['bsr', 'coo', 'csc', 'csr', 'dia', 'dok', 'lil'])
53 def spmatrix(request):
54 tm._skip_if_no_scipy()
55 from scipy import sparse
56 return getattr(sparse, request.param + '_matrix')
57
58
59 @pytest.fixture
60 def ip():
61 """
62 Get an instance of IPython.InteractiveShell.
63
64 Will raise a skip if IPython is not installed.
65 """
66
67 pytest.importorskip('IPython', minversion="6.0.0")
68 from IPython.core.interactiveshell import InteractiveShell
69 return InteractiveShell()
70
71
72 is_dateutil_le_261 = pytest.mark.skipif(
73 LooseVersion(dateutil.__version__) > LooseVersion('2.6.1'),
74 reason="dateutil api change version")
75 is_dateutil_gt_261 = pytest.mark.skipif(
76 LooseVersion(dateutil.__version__) <= LooseVersion('2.6.1'),
77 reason="dateutil stable version")
78
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pandas/conftest.py b/pandas/conftest.py
--- a/pandas/conftest.py
+++ b/pandas/conftest.py
@@ -3,7 +3,6 @@
from distutils.version import LooseVersion
import numpy
import pandas
-import pandas.util.testing as tm
import dateutil
@@ -51,7 +50,6 @@
@pytest.fixture(params=['bsr', 'coo', 'csc', 'csr', 'dia', 'dok', 'lil'])
def spmatrix(request):
- tm._skip_if_no_scipy()
from scipy import sparse
return getattr(sparse, request.param + '_matrix')
|
{"golden_diff": "diff --git a/pandas/conftest.py b/pandas/conftest.py\n--- a/pandas/conftest.py\n+++ b/pandas/conftest.py\n@@ -3,7 +3,6 @@\n from distutils.version import LooseVersion\n import numpy\n import pandas\n-import pandas.util.testing as tm\n import dateutil\n \n \n@@ -51,7 +50,6 @@\n \n @pytest.fixture(params=['bsr', 'coo', 'csc', 'csr', 'dia', 'dok', 'lil'])\n def spmatrix(request):\n- tm._skip_if_no_scipy()\n from scipy import sparse\n return getattr(sparse, request.param + '_matrix')\n", "issue": "TST: make _skip_if into pytest decorators\n- [X] _skip_if_32bit (#18693)\r\n- [X] _skip_if_no_mpl (#18427)\r\n- [X] _skip_if_mpl_1_5 (#18682)\r\n- [x] _skip_if_no_scipy (#18794)\r\n- [x] _skip_if_no_lzma (#18820)\r\n- [x] _skip_if_no_xarray (#18814)\r\n- [X] _skip_if_windows_python_3 (#18693)\r\n- [X] _skip_if_windows (#18693)\r\n- [x] _skip_if_no_pathlib (#18765) \r\n- [x] _skip_if_no_localpath (#18765)\r\n- [x] skip_if_no_ne (#18820)\r\n- [x] _skip_if_has_locale (#18745) \r\n- [x] _skip_if_not_us_locale (#18745)\r\n- [ ] _skip_if_no_mock\r\n- [x] _skip_if_no_ipython (#18814)\r\n- [ ] skip_if_no_package\r\n\r\nwe should move the ``_skip_if_*`` functions out of ``pandas.util.testing`` to another (private module)\r\n\r\nthen we can add [skipif decorators](http://pytest.readthedocs.io/en/reorganize-docs/new-docs/user/skipping.html)\r\n\r\nand use like this\r\n\r\n```\r\n@skip_if_windows_py3\r\ndef test_.......():\r\n```\r\n\r\nrather than calling ``tm._skip_if_windows_py390`` in the body of the function (sometimes you also need to do that, so we leave the functions themselves as well).\r\n\r\nthis makes much more idiomatic and readable pytest code and removes the need to roll your own when using the decorator.\r\n\n", "before_files": [{"content": "import pytest\n\nfrom distutils.version import LooseVersion\nimport numpy\nimport pandas\nimport pandas.util.testing as tm\nimport dateutil\n\n\ndef pytest_addoption(parser):\n parser.addoption(\"--skip-slow\", action=\"store_true\",\n help=\"skip slow tests\")\n parser.addoption(\"--skip-network\", action=\"store_true\",\n help=\"skip network tests\")\n parser.addoption(\"--run-high-memory\", action=\"store_true\",\n help=\"run high memory tests\")\n parser.addoption(\"--only-slow\", action=\"store_true\",\n help=\"run only slow tests\")\n\n\ndef pytest_runtest_setup(item):\n if 'slow' in item.keywords and item.config.getoption(\"--skip-slow\"):\n pytest.skip(\"skipping due to --skip-slow\")\n\n if 'slow' not in item.keywords and item.config.getoption(\"--only-slow\"):\n pytest.skip(\"skipping due to --only-slow\")\n\n if 'network' in item.keywords and item.config.getoption(\"--skip-network\"):\n pytest.skip(\"skipping due to --skip-network\")\n\n if 'high_memory' in item.keywords and not item.config.getoption(\n \"--run-high-memory\"):\n pytest.skip(\n \"skipping high memory test since --run-high-memory was not set\")\n\n\n# Configurations for all tests and all test modules\n\[email protected](autouse=True)\ndef configure_tests():\n pandas.set_option('chained_assignment', 'raise')\n\n\n# For running doctests: make np and pd names available\n\[email protected](autouse=True)\ndef add_imports(doctest_namespace):\n doctest_namespace['np'] = numpy\n doctest_namespace['pd'] = pandas\n\n\[email protected](params=['bsr', 'coo', 'csc', 'csr', 'dia', 'dok', 'lil'])\ndef spmatrix(request):\n tm._skip_if_no_scipy()\n from scipy import sparse\n return getattr(sparse, request.param + '_matrix')\n\n\[email protected]\ndef ip():\n \"\"\"\n Get an instance of IPython.InteractiveShell.\n\n Will raise a skip if IPython is not installed.\n \"\"\"\n\n pytest.importorskip('IPython', minversion=\"6.0.0\")\n from IPython.core.interactiveshell import InteractiveShell\n return InteractiveShell()\n\n\nis_dateutil_le_261 = pytest.mark.skipif(\n LooseVersion(dateutil.__version__) > LooseVersion('2.6.1'),\n reason=\"dateutil api change version\")\nis_dateutil_gt_261 = pytest.mark.skipif(\n LooseVersion(dateutil.__version__) <= LooseVersion('2.6.1'),\n reason=\"dateutil stable version\")\n", "path": "pandas/conftest.py"}], "after_files": [{"content": "import pytest\n\nfrom distutils.version import LooseVersion\nimport numpy\nimport pandas\nimport dateutil\n\n\ndef pytest_addoption(parser):\n parser.addoption(\"--skip-slow\", action=\"store_true\",\n help=\"skip slow tests\")\n parser.addoption(\"--skip-network\", action=\"store_true\",\n help=\"skip network tests\")\n parser.addoption(\"--run-high-memory\", action=\"store_true\",\n help=\"run high memory tests\")\n parser.addoption(\"--only-slow\", action=\"store_true\",\n help=\"run only slow tests\")\n\n\ndef pytest_runtest_setup(item):\n if 'slow' in item.keywords and item.config.getoption(\"--skip-slow\"):\n pytest.skip(\"skipping due to --skip-slow\")\n\n if 'slow' not in item.keywords and item.config.getoption(\"--only-slow\"):\n pytest.skip(\"skipping due to --only-slow\")\n\n if 'network' in item.keywords and item.config.getoption(\"--skip-network\"):\n pytest.skip(\"skipping due to --skip-network\")\n\n if 'high_memory' in item.keywords and not item.config.getoption(\n \"--run-high-memory\"):\n pytest.skip(\n \"skipping high memory test since --run-high-memory was not set\")\n\n\n# Configurations for all tests and all test modules\n\[email protected](autouse=True)\ndef configure_tests():\n pandas.set_option('chained_assignment', 'raise')\n\n\n# For running doctests: make np and pd names available\n\[email protected](autouse=True)\ndef add_imports(doctest_namespace):\n doctest_namespace['np'] = numpy\n doctest_namespace['pd'] = pandas\n\n\[email protected](params=['bsr', 'coo', 'csc', 'csr', 'dia', 'dok', 'lil'])\ndef spmatrix(request):\n from scipy import sparse\n return getattr(sparse, request.param + '_matrix')\n\n\[email protected]\ndef ip():\n \"\"\"\n Get an instance of IPython.InteractiveShell.\n\n Will raise a skip if IPython is not installed.\n \"\"\"\n\n pytest.importorskip('IPython', minversion=\"6.0.0\")\n from IPython.core.interactiveshell import InteractiveShell\n return InteractiveShell()\n\n\nis_dateutil_le_261 = pytest.mark.skipif(\n LooseVersion(dateutil.__version__) > LooseVersion('2.6.1'),\n reason=\"dateutil api change version\")\nis_dateutil_gt_261 = pytest.mark.skipif(\n LooseVersion(dateutil.__version__) <= LooseVersion('2.6.1'),\n reason=\"dateutil stable version\")\n", "path": "pandas/conftest.py"}]}
| 1,398 | 148 |
gh_patches_debug_50931
|
rasdani/github-patches
|
git_diff
|
apache__airflow-26806
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
pdb no longer works with airflow test command since 2.3.3
Converted back to issue as I've reproduced it and traced the issue back to https://github.com/apache/airflow/pull/24362
### Discussed in https://github.com/apache/airflow/discussions/26352
<div type='discussions-op-text'>
<sup>Originally posted by **GuruComposer** September 12, 2022</sup>
### Apache Airflow version
2.3.4
### What happened
I used to be able to use ipdb to debug DAGs by running `airflow tasks test <dag_name> <dag_id>`, and hitting an ipdb breakpoint (ipdb.set_trace()).
This no longer works. I get a strange type error:
``` File "/usr/local/lib/python3.9/bdb.py", line 88, in trace_dispatch
return self.dispatch_line(frame)
File "/usr/local/lib/python3.9/bdb.py", line 112, in dispatch_line
self.user_line(frame)
File "/usr/local/lib/python3.9/pdb.py", line 262, in user_line
self.interaction(frame, None)
File "/home/astro/.local/lib/python3.9/site-packages/IPython/core/debugger.py", line 336, in interaction
OldPdb.interaction(self, frame, traceback)
File "/usr/local/lib/python3.9/pdb.py", line 357, in interaction
self._cmdloop()
File "/usr/local/lib/python3.9/pdb.py", line 322, in _cmdloop
self.cmdloop()
File "/usr/local/lib/python3.9/cmd.py", line 126, in cmdloop
line = input(self.prompt)
TypeError: an integer is required (got type NoneType)```
### What you think should happen instead
I should get the ipdb shell.
### How to reproduce
1. Add ipdb breakpoint anywhere in airflow task.
import ipdb; ipdb.set_trace()
2. Run that task:
Run `airflow tasks test <dag_name> <dag_id>`, and
### Operating System
Debian GNU/Linux
### Versions of Apache Airflow Providers
2.3.4
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `airflow/utils/log/secrets_masker.py`
Content:
```
1 # Licensed to the Apache Software Foundation (ASF) under one
2 # or more contributor license agreements. See the NOTICE file
3 # distributed with this work for additional information
4 # regarding copyright ownership. The ASF licenses this file
5 # to you under the Apache License, Version 2.0 (the
6 # "License"); you may not use this file except in compliance
7 # with the License. You may obtain a copy of the License at
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
11 # Unless required by applicable law or agreed to in writing,
12 # software distributed under the License is distributed on an
13 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 # KIND, either express or implied. See the License for the
15 # specific language governing permissions and limitations
16 # under the License.
17 """Mask sensitive information from logs"""
18 from __future__ import annotations
19
20 import collections
21 import logging
22 import re
23 import sys
24 from typing import Any, Dict, Iterable, List, TextIO, Tuple, TypeVar, Union
25
26 from airflow import settings
27 from airflow.compat.functools import cache, cached_property
28
29 Redactable = TypeVar("Redactable", str, Dict[Any, Any], Tuple[Any, ...], List[Any])
30 Redacted = Union[Redactable, str]
31
32 log = logging.getLogger(__name__)
33
34 DEFAULT_SENSITIVE_FIELDS = frozenset(
35 {
36 'access_token',
37 'api_key',
38 'apikey',
39 'authorization',
40 'passphrase',
41 'passwd',
42 'password',
43 'private_key',
44 'secret',
45 'token',
46 'keyfile_dict',
47 'service_account',
48 }
49 )
50 """Names of fields (Connection extra, Variable key name etc.) that are deemed sensitive"""
51
52 SECRETS_TO_SKIP_MASKING_FOR_TESTS = {'airflow'}
53
54
55 @cache
56 def get_sensitive_variables_fields():
57 """Get comma-separated sensitive Variable Fields from airflow.cfg."""
58 from airflow.configuration import conf
59
60 sensitive_fields = DEFAULT_SENSITIVE_FIELDS.copy()
61 sensitive_variable_fields = conf.get('core', 'sensitive_var_conn_names')
62 if sensitive_variable_fields:
63 sensitive_fields |= frozenset({field.strip() for field in sensitive_variable_fields.split(',')})
64 return sensitive_fields
65
66
67 def should_hide_value_for_key(name):
68 """Should the value for this given name (Variable name, or key in conn.extra_dejson) be hidden"""
69 from airflow import settings
70
71 if isinstance(name, str) and settings.HIDE_SENSITIVE_VAR_CONN_FIELDS:
72 name = name.strip().lower()
73 return any(s in name for s in get_sensitive_variables_fields())
74 return False
75
76
77 def mask_secret(secret: str | dict | Iterable, name: str | None = None) -> None:
78 """
79 Mask a secret from appearing in the task logs.
80
81 If ``name`` is provided, then it will only be masked if the name matches
82 one of the configured "sensitive" names.
83
84 If ``secret`` is a dict or a iterable (excluding str) then it will be
85 recursively walked and keys with sensitive names will be hidden.
86 """
87 # Filtering all log messages is not a free process, so we only do it when
88 # running tasks
89 if not secret:
90 return
91
92 _secrets_masker().add_mask(secret, name)
93
94
95 def redact(value: Redactable, name: str | None = None) -> Redacted:
96 """Redact any secrets found in ``value``."""
97 return _secrets_masker().redact(value, name)
98
99
100 @cache
101 def _secrets_masker() -> SecretsMasker:
102 for flt in logging.getLogger('airflow.task').filters:
103 if isinstance(flt, SecretsMasker):
104 return flt
105 raise RuntimeError(
106 "Logging Configuration Error! No SecretsMasker found! If you have custom logging, please make "
107 "sure you configure it taking airflow configuration as a base as explained at "
108 "https://airflow.apache.org/docs/apache-airflow/stable/logging-monitoring/logging-tasks.html"
109 "#advanced-configuration"
110 )
111
112
113 class SecretsMasker(logging.Filter):
114 """Redact secrets from logs"""
115
116 replacer: re.Pattern | None = None
117 patterns: set[str]
118
119 ALREADY_FILTERED_FLAG = "__SecretsMasker_filtered"
120 MAX_RECURSION_DEPTH = 5
121
122 def __init__(self):
123 super().__init__()
124 self.patterns = set()
125
126 @cached_property
127 def _record_attrs_to_ignore(self) -> Iterable[str]:
128 # Doing log.info(..., extra={'foo': 2}) sets extra properties on
129 # record, i.e. record.foo. And we need to filter those too. Fun
130 #
131 # Create a record, and look at what attributes are on it, and ignore
132 # all the default ones!
133
134 record = logging.getLogRecordFactory()(
135 # name, level, pathname, lineno, msg, args, exc_info, func=None, sinfo=None,
136 "x",
137 logging.INFO,
138 __file__,
139 1,
140 "",
141 tuple(),
142 exc_info=None,
143 func="funcname",
144 )
145 return frozenset(record.__dict__).difference({'msg', 'args'})
146
147 def _redact_exception_with_context(self, exception):
148 # Exception class may not be modifiable (e.g. declared by an
149 # extension module such as JDBC).
150 try:
151 exception.args = (self.redact(v) for v in exception.args)
152 except AttributeError:
153 pass
154 if exception.__context__:
155 self._redact_exception_with_context(exception.__context__)
156 if exception.__cause__ and exception.__cause__ is not exception.__context__:
157 self._redact_exception_with_context(exception.__cause__)
158
159 def filter(self, record) -> bool:
160 if settings.MASK_SECRETS_IN_LOGS is not True:
161 return True
162
163 if self.ALREADY_FILTERED_FLAG in record.__dict__:
164 # Filters are attached to multiple handlers and logs, keep a
165 # "private" flag that stops us needing to process it more than once
166 return True
167
168 if self.replacer:
169 for k, v in record.__dict__.items():
170 if k in self._record_attrs_to_ignore:
171 continue
172 record.__dict__[k] = self.redact(v)
173 if record.exc_info and record.exc_info[1] is not None:
174 exc = record.exc_info[1]
175 self._redact_exception_with_context(exc)
176 record.__dict__[self.ALREADY_FILTERED_FLAG] = True
177
178 return True
179
180 def _redact_all(self, item: Redactable, depth: int) -> Redacted:
181 if depth > self.MAX_RECURSION_DEPTH or isinstance(item, str):
182 return '***'
183 if isinstance(item, dict):
184 return {dict_key: self._redact_all(subval, depth + 1) for dict_key, subval in item.items()}
185 elif isinstance(item, (tuple, set)):
186 # Turn set in to tuple!
187 return tuple(self._redact_all(subval, depth + 1) for subval in item)
188 elif isinstance(item, list):
189 return list(self._redact_all(subval, depth + 1) for subval in item)
190 else:
191 return item
192
193 def _redact(self, item: Redactable, name: str | None, depth: int) -> Redacted:
194 # Avoid spending too much effort on redacting on deeply nested
195 # structures. This also avoid infinite recursion if a structure has
196 # reference to self.
197 if depth > self.MAX_RECURSION_DEPTH:
198 return item
199 try:
200 if name and should_hide_value_for_key(name):
201 return self._redact_all(item, depth)
202 if isinstance(item, dict):
203 return {
204 dict_key: self._redact(subval, name=dict_key, depth=(depth + 1))
205 for dict_key, subval in item.items()
206 }
207 elif isinstance(item, str):
208 if self.replacer:
209 # We can't replace specific values, but the key-based redacting
210 # can still happen, so we can't short-circuit, we need to walk
211 # the structure.
212 return self.replacer.sub('***', item)
213 return item
214 elif isinstance(item, (tuple, set)):
215 # Turn set in to tuple!
216 return tuple(self._redact(subval, name=None, depth=(depth + 1)) for subval in item)
217 elif isinstance(item, list):
218 return [self._redact(subval, name=None, depth=(depth + 1)) for subval in item]
219 else:
220 return item
221 # I think this should never happen, but it does not hurt to leave it just in case
222 # Well. It happened (see https://github.com/apache/airflow/issues/19816#issuecomment-983311373)
223 # but it caused infinite recursion, so we need to cast it to str first.
224 except Exception as e:
225 log.warning(
226 "Unable to redact %s, please report this via <https://github.com/apache/airflow/issues>. "
227 "Error was: %s: %s",
228 repr(item),
229 type(e).__name__,
230 str(e),
231 )
232 return item
233
234 def redact(self, item: Redactable, name: str | None = None) -> Redacted:
235 """Redact an any secrets found in ``item``, if it is a string.
236
237 If ``name`` is given, and it's a "sensitive" name (see
238 :func:`should_hide_value_for_key`) then all string values in the item
239 is redacted.
240 """
241 return self._redact(item, name, depth=0)
242
243 def add_mask(self, secret: str | dict | Iterable, name: str | None = None):
244 """Add a new secret to be masked to this filter instance."""
245 from airflow.configuration import conf
246
247 test_mode: bool = conf.getboolean('core', 'unit_test_mode')
248 if isinstance(secret, dict):
249 for k, v in secret.items():
250 self.add_mask(v, k)
251 elif isinstance(secret, str):
252 if not secret or (test_mode and secret in SECRETS_TO_SKIP_MASKING_FOR_TESTS):
253 return
254 pattern = re.escape(secret)
255 if pattern not in self.patterns and (not name or should_hide_value_for_key(name)):
256 self.patterns.add(pattern)
257 self.replacer = re.compile('|'.join(self.patterns))
258 elif isinstance(secret, collections.abc.Iterable):
259 for v in secret:
260 self.add_mask(v, name)
261
262
263 class RedactedIO(TextIO):
264 """IO class that redacts values going into stdout.
265
266 Expected usage::
267
268 with contextlib.redirect_stdout(RedactedIO()):
269 ... # Writes to stdout will be redacted.
270 """
271
272 def __init__(self):
273 self.target = sys.stdout
274
275 def write(self, s: str) -> int:
276 s = redact(s)
277 return self.target.write(s)
278
279 def flush(self) -> None:
280 return self.target.flush()
281
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/airflow/utils/log/secrets_masker.py b/airflow/utils/log/secrets_masker.py
--- a/airflow/utils/log/secrets_masker.py
+++ b/airflow/utils/log/secrets_masker.py
@@ -271,6 +271,7 @@
def __init__(self):
self.target = sys.stdout
+ self.fileno = sys.stdout.fileno
def write(self, s: str) -> int:
s = redact(s)
|
{"golden_diff": "diff --git a/airflow/utils/log/secrets_masker.py b/airflow/utils/log/secrets_masker.py\n--- a/airflow/utils/log/secrets_masker.py\n+++ b/airflow/utils/log/secrets_masker.py\n@@ -271,6 +271,7 @@\n \n def __init__(self):\n self.target = sys.stdout\n+ self.fileno = sys.stdout.fileno\n \n def write(self, s: str) -> int:\n s = redact(s)\n", "issue": "pdb no longer works with airflow test command since 2.3.3\nConverted back to issue as I've reproduced it and traced the issue back to https://github.com/apache/airflow/pull/24362\r\n\r\n### Discussed in https://github.com/apache/airflow/discussions/26352\r\n\r\n<div type='discussions-op-text'>\r\n\r\n<sup>Originally posted by **GuruComposer** September 12, 2022</sup>\r\n### Apache Airflow version\r\n\r\n2.3.4\r\n\r\n### What happened\r\n\r\nI used to be able to use ipdb to debug DAGs by running `airflow tasks test <dag_name> <dag_id>`, and hitting an ipdb breakpoint (ipdb.set_trace()).\r\n\r\nThis no longer works. I get a strange type error:\r\n\r\n``` File \"/usr/local/lib/python3.9/bdb.py\", line 88, in trace_dispatch\r\n return self.dispatch_line(frame)\r\n File \"/usr/local/lib/python3.9/bdb.py\", line 112, in dispatch_line\r\n self.user_line(frame)\r\n File \"/usr/local/lib/python3.9/pdb.py\", line 262, in user_line\r\n self.interaction(frame, None)\r\n File \"/home/astro/.local/lib/python3.9/site-packages/IPython/core/debugger.py\", line 336, in interaction\r\n OldPdb.interaction(self, frame, traceback)\r\n File \"/usr/local/lib/python3.9/pdb.py\", line 357, in interaction\r\n self._cmdloop()\r\n File \"/usr/local/lib/python3.9/pdb.py\", line 322, in _cmdloop\r\n self.cmdloop()\r\n File \"/usr/local/lib/python3.9/cmd.py\", line 126, in cmdloop\r\n line = input(self.prompt)\r\nTypeError: an integer is required (got type NoneType)```\r\n\r\n\r\n\r\n### What you think should happen instead\r\n\r\nI should get the ipdb shell.\r\n\r\n### How to reproduce\r\n\r\n1. Add ipdb breakpoint anywhere in airflow task.\r\nimport ipdb; ipdb.set_trace()\r\n\r\n2. Run that task:\r\nRun `airflow tasks test <dag_name> <dag_id>`, and \r\n\r\n### Operating System\r\n\r\nDebian GNU/Linux\r\n\r\n### Versions of Apache Airflow Providers\r\n\r\n2.3.4\n", "before_files": [{"content": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\"\"\"Mask sensitive information from logs\"\"\"\nfrom __future__ import annotations\n\nimport collections\nimport logging\nimport re\nimport sys\nfrom typing import Any, Dict, Iterable, List, TextIO, Tuple, TypeVar, Union\n\nfrom airflow import settings\nfrom airflow.compat.functools import cache, cached_property\n\nRedactable = TypeVar(\"Redactable\", str, Dict[Any, Any], Tuple[Any, ...], List[Any])\nRedacted = Union[Redactable, str]\n\nlog = logging.getLogger(__name__)\n\nDEFAULT_SENSITIVE_FIELDS = frozenset(\n {\n 'access_token',\n 'api_key',\n 'apikey',\n 'authorization',\n 'passphrase',\n 'passwd',\n 'password',\n 'private_key',\n 'secret',\n 'token',\n 'keyfile_dict',\n 'service_account',\n }\n)\n\"\"\"Names of fields (Connection extra, Variable key name etc.) that are deemed sensitive\"\"\"\n\nSECRETS_TO_SKIP_MASKING_FOR_TESTS = {'airflow'}\n\n\n@cache\ndef get_sensitive_variables_fields():\n \"\"\"Get comma-separated sensitive Variable Fields from airflow.cfg.\"\"\"\n from airflow.configuration import conf\n\n sensitive_fields = DEFAULT_SENSITIVE_FIELDS.copy()\n sensitive_variable_fields = conf.get('core', 'sensitive_var_conn_names')\n if sensitive_variable_fields:\n sensitive_fields |= frozenset({field.strip() for field in sensitive_variable_fields.split(',')})\n return sensitive_fields\n\n\ndef should_hide_value_for_key(name):\n \"\"\"Should the value for this given name (Variable name, or key in conn.extra_dejson) be hidden\"\"\"\n from airflow import settings\n\n if isinstance(name, str) and settings.HIDE_SENSITIVE_VAR_CONN_FIELDS:\n name = name.strip().lower()\n return any(s in name for s in get_sensitive_variables_fields())\n return False\n\n\ndef mask_secret(secret: str | dict | Iterable, name: str | None = None) -> None:\n \"\"\"\n Mask a secret from appearing in the task logs.\n\n If ``name`` is provided, then it will only be masked if the name matches\n one of the configured \"sensitive\" names.\n\n If ``secret`` is a dict or a iterable (excluding str) then it will be\n recursively walked and keys with sensitive names will be hidden.\n \"\"\"\n # Filtering all log messages is not a free process, so we only do it when\n # running tasks\n if not secret:\n return\n\n _secrets_masker().add_mask(secret, name)\n\n\ndef redact(value: Redactable, name: str | None = None) -> Redacted:\n \"\"\"Redact any secrets found in ``value``.\"\"\"\n return _secrets_masker().redact(value, name)\n\n\n@cache\ndef _secrets_masker() -> SecretsMasker:\n for flt in logging.getLogger('airflow.task').filters:\n if isinstance(flt, SecretsMasker):\n return flt\n raise RuntimeError(\n \"Logging Configuration Error! No SecretsMasker found! If you have custom logging, please make \"\n \"sure you configure it taking airflow configuration as a base as explained at \"\n \"https://airflow.apache.org/docs/apache-airflow/stable/logging-monitoring/logging-tasks.html\"\n \"#advanced-configuration\"\n )\n\n\nclass SecretsMasker(logging.Filter):\n \"\"\"Redact secrets from logs\"\"\"\n\n replacer: re.Pattern | None = None\n patterns: set[str]\n\n ALREADY_FILTERED_FLAG = \"__SecretsMasker_filtered\"\n MAX_RECURSION_DEPTH = 5\n\n def __init__(self):\n super().__init__()\n self.patterns = set()\n\n @cached_property\n def _record_attrs_to_ignore(self) -> Iterable[str]:\n # Doing log.info(..., extra={'foo': 2}) sets extra properties on\n # record, i.e. record.foo. And we need to filter those too. Fun\n #\n # Create a record, and look at what attributes are on it, and ignore\n # all the default ones!\n\n record = logging.getLogRecordFactory()(\n # name, level, pathname, lineno, msg, args, exc_info, func=None, sinfo=None,\n \"x\",\n logging.INFO,\n __file__,\n 1,\n \"\",\n tuple(),\n exc_info=None,\n func=\"funcname\",\n )\n return frozenset(record.__dict__).difference({'msg', 'args'})\n\n def _redact_exception_with_context(self, exception):\n # Exception class may not be modifiable (e.g. declared by an\n # extension module such as JDBC).\n try:\n exception.args = (self.redact(v) for v in exception.args)\n except AttributeError:\n pass\n if exception.__context__:\n self._redact_exception_with_context(exception.__context__)\n if exception.__cause__ and exception.__cause__ is not exception.__context__:\n self._redact_exception_with_context(exception.__cause__)\n\n def filter(self, record) -> bool:\n if settings.MASK_SECRETS_IN_LOGS is not True:\n return True\n\n if self.ALREADY_FILTERED_FLAG in record.__dict__:\n # Filters are attached to multiple handlers and logs, keep a\n # \"private\" flag that stops us needing to process it more than once\n return True\n\n if self.replacer:\n for k, v in record.__dict__.items():\n if k in self._record_attrs_to_ignore:\n continue\n record.__dict__[k] = self.redact(v)\n if record.exc_info and record.exc_info[1] is not None:\n exc = record.exc_info[1]\n self._redact_exception_with_context(exc)\n record.__dict__[self.ALREADY_FILTERED_FLAG] = True\n\n return True\n\n def _redact_all(self, item: Redactable, depth: int) -> Redacted:\n if depth > self.MAX_RECURSION_DEPTH or isinstance(item, str):\n return '***'\n if isinstance(item, dict):\n return {dict_key: self._redact_all(subval, depth + 1) for dict_key, subval in item.items()}\n elif isinstance(item, (tuple, set)):\n # Turn set in to tuple!\n return tuple(self._redact_all(subval, depth + 1) for subval in item)\n elif isinstance(item, list):\n return list(self._redact_all(subval, depth + 1) for subval in item)\n else:\n return item\n\n def _redact(self, item: Redactable, name: str | None, depth: int) -> Redacted:\n # Avoid spending too much effort on redacting on deeply nested\n # structures. This also avoid infinite recursion if a structure has\n # reference to self.\n if depth > self.MAX_RECURSION_DEPTH:\n return item\n try:\n if name and should_hide_value_for_key(name):\n return self._redact_all(item, depth)\n if isinstance(item, dict):\n return {\n dict_key: self._redact(subval, name=dict_key, depth=(depth + 1))\n for dict_key, subval in item.items()\n }\n elif isinstance(item, str):\n if self.replacer:\n # We can't replace specific values, but the key-based redacting\n # can still happen, so we can't short-circuit, we need to walk\n # the structure.\n return self.replacer.sub('***', item)\n return item\n elif isinstance(item, (tuple, set)):\n # Turn set in to tuple!\n return tuple(self._redact(subval, name=None, depth=(depth + 1)) for subval in item)\n elif isinstance(item, list):\n return [self._redact(subval, name=None, depth=(depth + 1)) for subval in item]\n else:\n return item\n # I think this should never happen, but it does not hurt to leave it just in case\n # Well. It happened (see https://github.com/apache/airflow/issues/19816#issuecomment-983311373)\n # but it caused infinite recursion, so we need to cast it to str first.\n except Exception as e:\n log.warning(\n \"Unable to redact %s, please report this via <https://github.com/apache/airflow/issues>. \"\n \"Error was: %s: %s\",\n repr(item),\n type(e).__name__,\n str(e),\n )\n return item\n\n def redact(self, item: Redactable, name: str | None = None) -> Redacted:\n \"\"\"Redact an any secrets found in ``item``, if it is a string.\n\n If ``name`` is given, and it's a \"sensitive\" name (see\n :func:`should_hide_value_for_key`) then all string values in the item\n is redacted.\n \"\"\"\n return self._redact(item, name, depth=0)\n\n def add_mask(self, secret: str | dict | Iterable, name: str | None = None):\n \"\"\"Add a new secret to be masked to this filter instance.\"\"\"\n from airflow.configuration import conf\n\n test_mode: bool = conf.getboolean('core', 'unit_test_mode')\n if isinstance(secret, dict):\n for k, v in secret.items():\n self.add_mask(v, k)\n elif isinstance(secret, str):\n if not secret or (test_mode and secret in SECRETS_TO_SKIP_MASKING_FOR_TESTS):\n return\n pattern = re.escape(secret)\n if pattern not in self.patterns and (not name or should_hide_value_for_key(name)):\n self.patterns.add(pattern)\n self.replacer = re.compile('|'.join(self.patterns))\n elif isinstance(secret, collections.abc.Iterable):\n for v in secret:\n self.add_mask(v, name)\n\n\nclass RedactedIO(TextIO):\n \"\"\"IO class that redacts values going into stdout.\n\n Expected usage::\n\n with contextlib.redirect_stdout(RedactedIO()):\n ... # Writes to stdout will be redacted.\n \"\"\"\n\n def __init__(self):\n self.target = sys.stdout\n\n def write(self, s: str) -> int:\n s = redact(s)\n return self.target.write(s)\n\n def flush(self) -> None:\n return self.target.flush()\n", "path": "airflow/utils/log/secrets_masker.py"}], "after_files": [{"content": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\"\"\"Mask sensitive information from logs\"\"\"\nfrom __future__ import annotations\n\nimport collections\nimport logging\nimport re\nimport sys\nfrom typing import Any, Dict, Iterable, List, TextIO, Tuple, TypeVar, Union\n\nfrom airflow import settings\nfrom airflow.compat.functools import cache, cached_property\n\nRedactable = TypeVar(\"Redactable\", str, Dict[Any, Any], Tuple[Any, ...], List[Any])\nRedacted = Union[Redactable, str]\n\nlog = logging.getLogger(__name__)\n\nDEFAULT_SENSITIVE_FIELDS = frozenset(\n {\n 'access_token',\n 'api_key',\n 'apikey',\n 'authorization',\n 'passphrase',\n 'passwd',\n 'password',\n 'private_key',\n 'secret',\n 'token',\n 'keyfile_dict',\n 'service_account',\n }\n)\n\"\"\"Names of fields (Connection extra, Variable key name etc.) that are deemed sensitive\"\"\"\n\nSECRETS_TO_SKIP_MASKING_FOR_TESTS = {'airflow'}\n\n\n@cache\ndef get_sensitive_variables_fields():\n \"\"\"Get comma-separated sensitive Variable Fields from airflow.cfg.\"\"\"\n from airflow.configuration import conf\n\n sensitive_fields = DEFAULT_SENSITIVE_FIELDS.copy()\n sensitive_variable_fields = conf.get('core', 'sensitive_var_conn_names')\n if sensitive_variable_fields:\n sensitive_fields |= frozenset({field.strip() for field in sensitive_variable_fields.split(',')})\n return sensitive_fields\n\n\ndef should_hide_value_for_key(name):\n \"\"\"Should the value for this given name (Variable name, or key in conn.extra_dejson) be hidden\"\"\"\n from airflow import settings\n\n if isinstance(name, str) and settings.HIDE_SENSITIVE_VAR_CONN_FIELDS:\n name = name.strip().lower()\n return any(s in name for s in get_sensitive_variables_fields())\n return False\n\n\ndef mask_secret(secret: str | dict | Iterable, name: str | None = None) -> None:\n \"\"\"\n Mask a secret from appearing in the task logs.\n\n If ``name`` is provided, then it will only be masked if the name matches\n one of the configured \"sensitive\" names.\n\n If ``secret`` is a dict or a iterable (excluding str) then it will be\n recursively walked and keys with sensitive names will be hidden.\n \"\"\"\n # Filtering all log messages is not a free process, so we only do it when\n # running tasks\n if not secret:\n return\n\n _secrets_masker().add_mask(secret, name)\n\n\ndef redact(value: Redactable, name: str | None = None) -> Redacted:\n \"\"\"Redact any secrets found in ``value``.\"\"\"\n return _secrets_masker().redact(value, name)\n\n\n@cache\ndef _secrets_masker() -> SecretsMasker:\n for flt in logging.getLogger('airflow.task').filters:\n if isinstance(flt, SecretsMasker):\n return flt\n raise RuntimeError(\n \"Logging Configuration Error! No SecretsMasker found! If you have custom logging, please make \"\n \"sure you configure it taking airflow configuration as a base as explained at \"\n \"https://airflow.apache.org/docs/apache-airflow/stable/logging-monitoring/logging-tasks.html\"\n \"#advanced-configuration\"\n )\n\n\nclass SecretsMasker(logging.Filter):\n \"\"\"Redact secrets from logs\"\"\"\n\n replacer: re.Pattern | None = None\n patterns: set[str]\n\n ALREADY_FILTERED_FLAG = \"__SecretsMasker_filtered\"\n MAX_RECURSION_DEPTH = 5\n\n def __init__(self):\n super().__init__()\n self.patterns = set()\n\n @cached_property\n def _record_attrs_to_ignore(self) -> Iterable[str]:\n # Doing log.info(..., extra={'foo': 2}) sets extra properties on\n # record, i.e. record.foo. And we need to filter those too. Fun\n #\n # Create a record, and look at what attributes are on it, and ignore\n # all the default ones!\n\n record = logging.getLogRecordFactory()(\n # name, level, pathname, lineno, msg, args, exc_info, func=None, sinfo=None,\n \"x\",\n logging.INFO,\n __file__,\n 1,\n \"\",\n tuple(),\n exc_info=None,\n func=\"funcname\",\n )\n return frozenset(record.__dict__).difference({'msg', 'args'})\n\n def _redact_exception_with_context(self, exception):\n # Exception class may not be modifiable (e.g. declared by an\n # extension module such as JDBC).\n try:\n exception.args = (self.redact(v) for v in exception.args)\n except AttributeError:\n pass\n if exception.__context__:\n self._redact_exception_with_context(exception.__context__)\n if exception.__cause__ and exception.__cause__ is not exception.__context__:\n self._redact_exception_with_context(exception.__cause__)\n\n def filter(self, record) -> bool:\n if settings.MASK_SECRETS_IN_LOGS is not True:\n return True\n\n if self.ALREADY_FILTERED_FLAG in record.__dict__:\n # Filters are attached to multiple handlers and logs, keep a\n # \"private\" flag that stops us needing to process it more than once\n return True\n\n if self.replacer:\n for k, v in record.__dict__.items():\n if k in self._record_attrs_to_ignore:\n continue\n record.__dict__[k] = self.redact(v)\n if record.exc_info and record.exc_info[1] is not None:\n exc = record.exc_info[1]\n self._redact_exception_with_context(exc)\n record.__dict__[self.ALREADY_FILTERED_FLAG] = True\n\n return True\n\n def _redact_all(self, item: Redactable, depth: int) -> Redacted:\n if depth > self.MAX_RECURSION_DEPTH or isinstance(item, str):\n return '***'\n if isinstance(item, dict):\n return {dict_key: self._redact_all(subval, depth + 1) for dict_key, subval in item.items()}\n elif isinstance(item, (tuple, set)):\n # Turn set in to tuple!\n return tuple(self._redact_all(subval, depth + 1) for subval in item)\n elif isinstance(item, list):\n return list(self._redact_all(subval, depth + 1) for subval in item)\n else:\n return item\n\n def _redact(self, item: Redactable, name: str | None, depth: int) -> Redacted:\n # Avoid spending too much effort on redacting on deeply nested\n # structures. This also avoid infinite recursion if a structure has\n # reference to self.\n if depth > self.MAX_RECURSION_DEPTH:\n return item\n try:\n if name and should_hide_value_for_key(name):\n return self._redact_all(item, depth)\n if isinstance(item, dict):\n return {\n dict_key: self._redact(subval, name=dict_key, depth=(depth + 1))\n for dict_key, subval in item.items()\n }\n elif isinstance(item, str):\n if self.replacer:\n # We can't replace specific values, but the key-based redacting\n # can still happen, so we can't short-circuit, we need to walk\n # the structure.\n return self.replacer.sub('***', item)\n return item\n elif isinstance(item, (tuple, set)):\n # Turn set in to tuple!\n return tuple(self._redact(subval, name=None, depth=(depth + 1)) for subval in item)\n elif isinstance(item, list):\n return [self._redact(subval, name=None, depth=(depth + 1)) for subval in item]\n else:\n return item\n # I think this should never happen, but it does not hurt to leave it just in case\n # Well. It happened (see https://github.com/apache/airflow/issues/19816#issuecomment-983311373)\n # but it caused infinite recursion, so we need to cast it to str first.\n except Exception as e:\n log.warning(\n \"Unable to redact %s, please report this via <https://github.com/apache/airflow/issues>. \"\n \"Error was: %s: %s\",\n repr(item),\n type(e).__name__,\n str(e),\n )\n return item\n\n def redact(self, item: Redactable, name: str | None = None) -> Redacted:\n \"\"\"Redact an any secrets found in ``item``, if it is a string.\n\n If ``name`` is given, and it's a \"sensitive\" name (see\n :func:`should_hide_value_for_key`) then all string values in the item\n is redacted.\n \"\"\"\n return self._redact(item, name, depth=0)\n\n def add_mask(self, secret: str | dict | Iterable, name: str | None = None):\n \"\"\"Add a new secret to be masked to this filter instance.\"\"\"\n from airflow.configuration import conf\n\n test_mode: bool = conf.getboolean('core', 'unit_test_mode')\n if isinstance(secret, dict):\n for k, v in secret.items():\n self.add_mask(v, k)\n elif isinstance(secret, str):\n if not secret or (test_mode and secret in SECRETS_TO_SKIP_MASKING_FOR_TESTS):\n return\n pattern = re.escape(secret)\n if pattern not in self.patterns and (not name or should_hide_value_for_key(name)):\n self.patterns.add(pattern)\n self.replacer = re.compile('|'.join(self.patterns))\n elif isinstance(secret, collections.abc.Iterable):\n for v in secret:\n self.add_mask(v, name)\n\n\nclass RedactedIO(TextIO):\n \"\"\"IO class that redacts values going into stdout.\n\n Expected usage::\n\n with contextlib.redirect_stdout(RedactedIO()):\n ... # Writes to stdout will be redacted.\n \"\"\"\n\n def __init__(self):\n self.target = sys.stdout\n self.fileno = sys.stdout.fileno\n\n def write(self, s: str) -> int:\n s = redact(s)\n return self.target.write(s)\n\n def flush(self) -> None:\n return self.target.flush()\n", "path": "airflow/utils/log/secrets_masker.py"}]}
| 3,908 | 111 |
gh_patches_debug_38130
|
rasdani/github-patches
|
git_diff
|
modoboa__modoboa-726
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Create alias with tag (+) in recipient address with internal domain
It's impossible to create new alias with tag in recipient address.
Example :
- I've [email protected] mailbox
- I would like to create [email protected] alias with [email protected] recipient
I've this error « Local recipient [email protected] not found ».
Solution proposition :
- use Alias.extmboxes to record this email address with tag
Do you see other solution ?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `modoboa/extensions/admin/forms/alias.py`
Content:
```
1 from django import forms
2 from django.utils.translation import ugettext as _, ugettext_lazy
3 from django.http import QueryDict
4 from modoboa.lib.exceptions import BadRequest, NotFound, Conflict
5 from modoboa.lib.emailutils import split_mailbox
6 from modoboa.lib.formutils import (
7 DynamicForm
8 )
9 from modoboa.extensions.admin.models import (
10 Domain, Mailbox, Alias
11 )
12
13
14 class AliasForm(forms.ModelForm, DynamicForm):
15 email = forms.EmailField(
16 label=ugettext_lazy("Email address"),
17 help_text=ugettext_lazy(
18 "The distribution list address. Use the '*' character to create a "
19 "'catchall' address (ex: *@domain.tld)."
20 ),
21 widget=forms.TextInput(attrs={"class": "form-control"})
22 )
23 recipients = forms.EmailField(
24 label=ugettext_lazy("Recipients"), required=False,
25 help_text=ugettext_lazy(
26 "Mailbox(es) this alias will point to. Indicate only one address "
27 "per input, press ENTER to add a new input."
28 ),
29 widget=forms.TextInput(attrs={"class": "form-control"})
30 )
31
32 class Meta:
33 model = Alias
34 fields = ("enabled",)
35
36 def __init__(self, user, *args, **kwargs):
37 self.user = user
38 super(AliasForm, self).__init__(*args, **kwargs)
39 self.fields.keyOrder = ['email', 'recipients', 'enabled']
40
41 if len(args) and isinstance(args[0], QueryDict):
42 if "instance" in kwargs:
43 if not kwargs["instance"].domain.enabled:
44 del self.fields["enabled"]
45 self._load_from_qdict(args[0], "recipients", forms.EmailField)
46 elif "instance" in kwargs:
47 dlist = kwargs["instance"]
48 self.fields["email"].initial = dlist.full_address
49 if not dlist.domain.enabled:
50 self.fields["enabled"].widget.attrs["disabled"] = "disabled"
51 cpt = 1
52 for al in dlist.aliases.all():
53 name = "recipients_%d" % cpt
54 self._create_field(forms.EmailField, name, al.full_address, 2)
55 cpt += 1
56 for mb in dlist.mboxes.all():
57 name = "recipients_%d" % (cpt)
58 self._create_field(forms.EmailField, name, mb.full_address, 2)
59 cpt += 1
60 for addr in dlist.extmboxes.split(','):
61 if addr == "":
62 continue
63 name = "recipients_%d" % (cpt)
64 self._create_field(forms.EmailField, name, addr, 2)
65 cpt += 1
66
67 def clean_email(self):
68 localpart, domname = split_mailbox(self.cleaned_data["email"])
69 try:
70 domain = Domain.objects.get(name=domname)
71 except Domain.DoesNotExist:
72 raise forms.ValidationError(_("Domain does not exist"))
73 if not self.user.can_access(domain):
74 raise forms.ValidationError(
75 _("You don't have access to this domain")
76 )
77 return self.cleaned_data["email"].lower()
78
79 def set_recipients(self):
80 """Recipients dispatching
81
82 We make a difference between 'local' recipients (the ones hosted
83 by Modoboa) and 'external' recipients.
84 """
85 self.ext_rcpts = []
86 self.int_rcpts = []
87 total = 0
88
89 for k, v in self.cleaned_data.items():
90 if not k.startswith("recipients"):
91 continue
92 if v == "":
93 continue
94 local_part, domname = split_mailbox(v)
95 if domname is None:
96 raise BadRequest(
97 u"%s %s" % (_("Invalid mailbox"), v)
98 )
99 try:
100 domain = Domain.objects.get(name=domname)
101 except Domain.DoesNotExist:
102 domain = None
103 if domain is not None:
104 try:
105 rcpt = Alias.objects.get(domain=domain, address=local_part)
106 if rcpt.full_address == self.cleaned_data["email"]:
107 rcpt = None
108 except Alias.DoesNotExist:
109 rcpt = None
110 if rcpt is None:
111 try:
112 rcpt = Mailbox.objects.get(domain=domain, address=local_part)
113 except Mailbox.DoesNotExist:
114 raise NotFound(
115 _("Local recipient %s not found" % v)
116 )
117 if rcpt in self.int_rcpts:
118 raise Conflict(
119 _("Recipient %s already present" % v)
120 )
121 self.int_rcpts += [rcpt]
122 total += 1
123 continue
124
125 if v in self.ext_rcpts:
126 raise Conflict(
127 _("Recipient %s already present" % v)
128 )
129 self.ext_rcpts += [v]
130 total += 1
131
132 if total == 0:
133 raise BadRequest(_("No recipient defined"))
134
135 def save(self, commit=True):
136 alias = super(AliasForm, self).save(commit=False)
137 localpart, domname = split_mailbox(self.cleaned_data["email"])
138 alias.address = localpart
139 alias.domain = Domain.objects.get(name=domname)
140 if commit:
141 alias.save(int_rcpts=self.int_rcpts, ext_rcpts=self.ext_rcpts)
142 self.save_m2m()
143 return alias
144
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/modoboa/extensions/admin/forms/alias.py b/modoboa/extensions/admin/forms/alias.py
--- a/modoboa/extensions/admin/forms/alias.py
+++ b/modoboa/extensions/admin/forms/alias.py
@@ -96,31 +96,36 @@
raise BadRequest(
u"%s %s" % (_("Invalid mailbox"), v)
)
- try:
- domain = Domain.objects.get(name=domname)
- except Domain.DoesNotExist:
- domain = None
+
+ # Support tag in recipient, see https://github.com/tonioo/modoboa/issues/713
+ local_part_with_tag = None
+ if '+' in local_part:
+ local_part_with_tag = local_part
+ local_part = local_part[0:local_part.find('+')]
+
+ domain = Domain.objects.filter(name=domname).first()
+
if domain is not None:
- try:
- rcpt = Alias.objects.get(domain=domain, address=local_part)
- if rcpt.full_address == self.cleaned_data["email"]:
- rcpt = None
- except Alias.DoesNotExist:
+ rcpt = Alias.objects.filter(domain=domain, address=local_part).first()
+ if rcpt and (rcpt.full_address == self.cleaned_data["email"]):
rcpt = None
+
if rcpt is None:
try:
rcpt = Mailbox.objects.get(domain=domain, address=local_part)
except Mailbox.DoesNotExist:
raise NotFound(
- _("Local recipient %s not found" % v)
+ _("Local recipient %s@%s not found" % (local_part, domname))
)
- if rcpt in self.int_rcpts:
- raise Conflict(
- _("Recipient %s already present" % v)
- )
- self.int_rcpts += [rcpt]
- total += 1
- continue
+
+ if local_part_with_tag is None:
+ if rcpt in self.int_rcpts:
+ raise Conflict(
+ _("Recipient %s already present" % v)
+ )
+ self.int_rcpts += [rcpt]
+ total += 1
+ continue
if v in self.ext_rcpts:
raise Conflict(
@@ -134,8 +139,8 @@
def save(self, commit=True):
alias = super(AliasForm, self).save(commit=False)
- localpart, domname = split_mailbox(self.cleaned_data["email"])
- alias.address = localpart
+ local_part, domname = split_mailbox(self.cleaned_data["email"])
+ alias.address = local_part
alias.domain = Domain.objects.get(name=domname)
if commit:
alias.save(int_rcpts=self.int_rcpts, ext_rcpts=self.ext_rcpts)
|
{"golden_diff": "diff --git a/modoboa/extensions/admin/forms/alias.py b/modoboa/extensions/admin/forms/alias.py\n--- a/modoboa/extensions/admin/forms/alias.py\n+++ b/modoboa/extensions/admin/forms/alias.py\n@@ -96,31 +96,36 @@\n raise BadRequest(\n u\"%s %s\" % (_(\"Invalid mailbox\"), v)\n )\n- try:\n- domain = Domain.objects.get(name=domname)\n- except Domain.DoesNotExist:\n- domain = None\n+\n+ # Support tag in recipient, see https://github.com/tonioo/modoboa/issues/713\n+ local_part_with_tag = None\n+ if '+' in local_part:\n+ local_part_with_tag = local_part\n+ local_part = local_part[0:local_part.find('+')]\n+\n+ domain = Domain.objects.filter(name=domname).first()\n+\n if domain is not None:\n- try:\n- rcpt = Alias.objects.get(domain=domain, address=local_part)\n- if rcpt.full_address == self.cleaned_data[\"email\"]:\n- rcpt = None\n- except Alias.DoesNotExist:\n+ rcpt = Alias.objects.filter(domain=domain, address=local_part).first()\n+ if rcpt and (rcpt.full_address == self.cleaned_data[\"email\"]):\n rcpt = None\n+\n if rcpt is None:\n try:\n rcpt = Mailbox.objects.get(domain=domain, address=local_part)\n except Mailbox.DoesNotExist:\n raise NotFound(\n- _(\"Local recipient %s not found\" % v)\n+ _(\"Local recipient %s@%s not found\" % (local_part, domname))\n )\n- if rcpt in self.int_rcpts:\n- raise Conflict(\n- _(\"Recipient %s already present\" % v)\n- )\n- self.int_rcpts += [rcpt]\n- total += 1\n- continue\n+\n+ if local_part_with_tag is None:\n+ if rcpt in self.int_rcpts:\n+ raise Conflict(\n+ _(\"Recipient %s already present\" % v)\n+ )\n+ self.int_rcpts += [rcpt]\n+ total += 1\n+ continue\n \n if v in self.ext_rcpts:\n raise Conflict(\n@@ -134,8 +139,8 @@\n \n def save(self, commit=True):\n alias = super(AliasForm, self).save(commit=False)\n- localpart, domname = split_mailbox(self.cleaned_data[\"email\"])\n- alias.address = localpart\n+ local_part, domname = split_mailbox(self.cleaned_data[\"email\"])\n+ alias.address = local_part\n alias.domain = Domain.objects.get(name=domname)\n if commit:\n alias.save(int_rcpts=self.int_rcpts, ext_rcpts=self.ext_rcpts)\n", "issue": "Create alias with tag (+) in recipient address with internal domain\nIt's impossible to create new alias with tag in recipient address.\n\nExample : \n- I've [email protected] mailbox\n- I would like to create [email protected] alias with [email protected] recipient\n\nI've this error \u00ab Local recipient [email protected] not found \u00bb.\n\nSolution proposition : \n- use Alias.extmboxes to record this email address with tag\n\nDo you see other solution ?\n\n", "before_files": [{"content": "from django import forms\nfrom django.utils.translation import ugettext as _, ugettext_lazy\nfrom django.http import QueryDict\nfrom modoboa.lib.exceptions import BadRequest, NotFound, Conflict\nfrom modoboa.lib.emailutils import split_mailbox\nfrom modoboa.lib.formutils import (\n DynamicForm\n)\nfrom modoboa.extensions.admin.models import (\n Domain, Mailbox, Alias\n)\n\n\nclass AliasForm(forms.ModelForm, DynamicForm):\n email = forms.EmailField(\n label=ugettext_lazy(\"Email address\"),\n help_text=ugettext_lazy(\n \"The distribution list address. Use the '*' character to create a \"\n \"'catchall' address (ex: *@domain.tld).\"\n ),\n widget=forms.TextInput(attrs={\"class\": \"form-control\"})\n )\n recipients = forms.EmailField(\n label=ugettext_lazy(\"Recipients\"), required=False,\n help_text=ugettext_lazy(\n \"Mailbox(es) this alias will point to. Indicate only one address \"\n \"per input, press ENTER to add a new input.\"\n ),\n widget=forms.TextInput(attrs={\"class\": \"form-control\"})\n )\n\n class Meta:\n model = Alias\n fields = (\"enabled\",)\n\n def __init__(self, user, *args, **kwargs):\n self.user = user\n super(AliasForm, self).__init__(*args, **kwargs)\n self.fields.keyOrder = ['email', 'recipients', 'enabled']\n\n if len(args) and isinstance(args[0], QueryDict):\n if \"instance\" in kwargs:\n if not kwargs[\"instance\"].domain.enabled:\n del self.fields[\"enabled\"]\n self._load_from_qdict(args[0], \"recipients\", forms.EmailField)\n elif \"instance\" in kwargs:\n dlist = kwargs[\"instance\"]\n self.fields[\"email\"].initial = dlist.full_address\n if not dlist.domain.enabled:\n self.fields[\"enabled\"].widget.attrs[\"disabled\"] = \"disabled\"\n cpt = 1\n for al in dlist.aliases.all():\n name = \"recipients_%d\" % cpt\n self._create_field(forms.EmailField, name, al.full_address, 2)\n cpt += 1\n for mb in dlist.mboxes.all():\n name = \"recipients_%d\" % (cpt)\n self._create_field(forms.EmailField, name, mb.full_address, 2)\n cpt += 1\n for addr in dlist.extmboxes.split(','):\n if addr == \"\":\n continue\n name = \"recipients_%d\" % (cpt)\n self._create_field(forms.EmailField, name, addr, 2)\n cpt += 1\n\n def clean_email(self):\n localpart, domname = split_mailbox(self.cleaned_data[\"email\"])\n try:\n domain = Domain.objects.get(name=domname)\n except Domain.DoesNotExist:\n raise forms.ValidationError(_(\"Domain does not exist\"))\n if not self.user.can_access(domain):\n raise forms.ValidationError(\n _(\"You don't have access to this domain\")\n )\n return self.cleaned_data[\"email\"].lower()\n\n def set_recipients(self):\n \"\"\"Recipients dispatching\n\n We make a difference between 'local' recipients (the ones hosted\n by Modoboa) and 'external' recipients.\n \"\"\"\n self.ext_rcpts = []\n self.int_rcpts = []\n total = 0\n\n for k, v in self.cleaned_data.items():\n if not k.startswith(\"recipients\"):\n continue\n if v == \"\":\n continue\n local_part, domname = split_mailbox(v)\n if domname is None:\n raise BadRequest(\n u\"%s %s\" % (_(\"Invalid mailbox\"), v)\n )\n try:\n domain = Domain.objects.get(name=domname)\n except Domain.DoesNotExist:\n domain = None\n if domain is not None:\n try:\n rcpt = Alias.objects.get(domain=domain, address=local_part)\n if rcpt.full_address == self.cleaned_data[\"email\"]:\n rcpt = None\n except Alias.DoesNotExist:\n rcpt = None\n if rcpt is None:\n try:\n rcpt = Mailbox.objects.get(domain=domain, address=local_part)\n except Mailbox.DoesNotExist:\n raise NotFound(\n _(\"Local recipient %s not found\" % v)\n )\n if rcpt in self.int_rcpts:\n raise Conflict(\n _(\"Recipient %s already present\" % v)\n )\n self.int_rcpts += [rcpt]\n total += 1\n continue\n\n if v in self.ext_rcpts:\n raise Conflict(\n _(\"Recipient %s already present\" % v)\n )\n self.ext_rcpts += [v]\n total += 1\n\n if total == 0:\n raise BadRequest(_(\"No recipient defined\"))\n\n def save(self, commit=True):\n alias = super(AliasForm, self).save(commit=False)\n localpart, domname = split_mailbox(self.cleaned_data[\"email\"])\n alias.address = localpart\n alias.domain = Domain.objects.get(name=domname)\n if commit:\n alias.save(int_rcpts=self.int_rcpts, ext_rcpts=self.ext_rcpts)\n self.save_m2m()\n return alias\n", "path": "modoboa/extensions/admin/forms/alias.py"}], "after_files": [{"content": "from django import forms\nfrom django.utils.translation import ugettext as _, ugettext_lazy\nfrom django.http import QueryDict\nfrom modoboa.lib.exceptions import BadRequest, NotFound, Conflict\nfrom modoboa.lib.emailutils import split_mailbox\nfrom modoboa.lib.formutils import (\n DynamicForm\n)\nfrom modoboa.extensions.admin.models import (\n Domain, Mailbox, Alias\n)\n\n\nclass AliasForm(forms.ModelForm, DynamicForm):\n email = forms.EmailField(\n label=ugettext_lazy(\"Email address\"),\n help_text=ugettext_lazy(\n \"The distribution list address. Use the '*' character to create a \"\n \"'catchall' address (ex: *@domain.tld).\"\n ),\n widget=forms.TextInput(attrs={\"class\": \"form-control\"})\n )\n recipients = forms.EmailField(\n label=ugettext_lazy(\"Recipients\"), required=False,\n help_text=ugettext_lazy(\n \"Mailbox(es) this alias will point to. Indicate only one address \"\n \"per input, press ENTER to add a new input.\"\n ),\n widget=forms.TextInput(attrs={\"class\": \"form-control\"})\n )\n\n class Meta:\n model = Alias\n fields = (\"enabled\",)\n\n def __init__(self, user, *args, **kwargs):\n self.user = user\n super(AliasForm, self).__init__(*args, **kwargs)\n self.fields.keyOrder = ['email', 'recipients', 'enabled']\n\n if len(args) and isinstance(args[0], QueryDict):\n if \"instance\" in kwargs:\n if not kwargs[\"instance\"].domain.enabled:\n del self.fields[\"enabled\"]\n self._load_from_qdict(args[0], \"recipients\", forms.EmailField)\n elif \"instance\" in kwargs:\n dlist = kwargs[\"instance\"]\n self.fields[\"email\"].initial = dlist.full_address\n if not dlist.domain.enabled:\n self.fields[\"enabled\"].widget.attrs[\"disabled\"] = \"disabled\"\n cpt = 1\n for al in dlist.aliases.all():\n name = \"recipients_%d\" % cpt\n self._create_field(forms.EmailField, name, al.full_address, 2)\n cpt += 1\n for mb in dlist.mboxes.all():\n name = \"recipients_%d\" % (cpt)\n self._create_field(forms.EmailField, name, mb.full_address, 2)\n cpt += 1\n for addr in dlist.extmboxes.split(','):\n if addr == \"\":\n continue\n name = \"recipients_%d\" % (cpt)\n self._create_field(forms.EmailField, name, addr, 2)\n cpt += 1\n\n def clean_email(self):\n localpart, domname = split_mailbox(self.cleaned_data[\"email\"])\n try:\n domain = Domain.objects.get(name=domname)\n except Domain.DoesNotExist:\n raise forms.ValidationError(_(\"Domain does not exist\"))\n if not self.user.can_access(domain):\n raise forms.ValidationError(\n _(\"You don't have access to this domain\")\n )\n return self.cleaned_data[\"email\"].lower()\n\n def set_recipients(self):\n \"\"\"Recipients dispatching\n\n We make a difference between 'local' recipients (the ones hosted\n by Modoboa) and 'external' recipients.\n \"\"\"\n self.ext_rcpts = []\n self.int_rcpts = []\n total = 0\n\n for k, v in self.cleaned_data.items():\n if not k.startswith(\"recipients\"):\n continue\n if v == \"\":\n continue\n local_part, domname = split_mailbox(v)\n if domname is None:\n raise BadRequest(\n u\"%s %s\" % (_(\"Invalid mailbox\"), v)\n )\n\n # Support tag in recipient, see https://github.com/tonioo/modoboa/issues/713\n local_part_with_tag = None\n if '+' in local_part:\n local_part_with_tag = local_part\n local_part = local_part[0:local_part.find('+')]\n\n domain = Domain.objects.filter(name=domname).first()\n\n if domain is not None:\n rcpt = Alias.objects.filter(domain=domain, address=local_part).first()\n if rcpt and (rcpt.full_address == self.cleaned_data[\"email\"]):\n rcpt = None\n\n if rcpt is None:\n try:\n rcpt = Mailbox.objects.get(domain=domain, address=local_part)\n except Mailbox.DoesNotExist:\n raise NotFound(\n _(\"Local recipient %s@%s not found\" % (local_part, domname))\n )\n\n if local_part_with_tag is None:\n if rcpt in self.int_rcpts:\n raise Conflict(\n _(\"Recipient %s already present\" % v)\n )\n self.int_rcpts += [rcpt]\n total += 1\n continue\n\n if v in self.ext_rcpts:\n raise Conflict(\n _(\"Recipient %s already present\" % v)\n )\n self.ext_rcpts += [v]\n total += 1\n\n if total == 0:\n raise BadRequest(_(\"No recipient defined\"))\n\n def save(self, commit=True):\n alias = super(AliasForm, self).save(commit=False)\n local_part, domname = split_mailbox(self.cleaned_data[\"email\"])\n alias.address = local_part\n alias.domain = Domain.objects.get(name=domname)\n if commit:\n alias.save(int_rcpts=self.int_rcpts, ext_rcpts=self.ext_rcpts)\n self.save_m2m()\n return alias\n", "path": "modoboa/extensions/admin/forms/alias.py"}]}
| 1,807 | 621 |
gh_patches_debug_1856
|
rasdani/github-patches
|
git_diff
|
Kaggle__docker-python-1326
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
NameError: name 'io' is not defined
## 🐛 Bug
I am trying to run my scripts on GPU notebook, and I keep getting the following error.
```shell
Traceback (most recent call last):
File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/wandb_init.py", line 1172, in init
getcaller()
File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/wandb_init.py", line 846, in getcaller
src, line, func, stack = logger.findCaller(stack_info=True)
File "/root/.local/lib/python3.10/site-packages/log.py", line 42, in findCaller
sio = io.StringIO()
NameError: name 'io' is not defined
```
In addition, I found that there is no import `io` package in [this](https://github.com/Kaggle/docker-python/blob/main/patches/log.py) code.
### To Reproduce
### Expected behavior
### Additional context
<!-- Add any other context about the problem here. -->
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `patches/log.py`
Content:
```
1 import logging
2 import os
3
4 import google.auth
5
6
7 _LOG_TO_FILE_ENV = os.getenv("KAGGLE_LOG_TO_FILE")
8
9
10 class _LogFormatter(logging.Formatter):
11 """A logging formatter which truncates long messages."""
12
13 _MAX_LOG_LENGTH = 10000 # Be generous, not to truncate long backtraces.
14
15 def format(self, record):
16 msg = super(_LogFormatter, self).format(record)
17 return msg[:_LogFormatter._MAX_LOG_LENGTH] if msg else msg
18
19 # TODO(vimota): Clean this up once we're using python 3.8 and can use
20 # (https://github.com/python/cpython/commit/dde9fdbe453925279ac3d2a6a72102f6f9ef247c)
21 # Right now, making the logging module display the intended frame's information
22 # when the logging calls (info, warn, ...) are wrapped (as is the case in our
23 # Log class) involves fragile logic.
24 class _Logger(logging.Logger):
25
26 # This is a copy of logging.Logger.findCaller with the filename ignore
27 # set expanded to include the current filename (".../log.py").
28 # Copyright 2001-2015 by Vinay Sajip. All Rights Reserved.
29 # License: https://github.com/python/cpython/blob/ce9e62544571e7ade7186697d5dd065fb4c5243f/LICENSE
30 def findCaller(self, stack_info=False, stacklevel=1):
31 f = logging.currentframe()
32 f = f.f_back
33 rv = "(unknown file)", 0, "(unknown function)", None
34 while hasattr(f, "f_code"):
35 co = f.f_code
36 filename = os.path.normcase(co.co_filename)
37 if filename in _ignore_srcfiles:
38 f = f.f_back
39 continue
40 sinfo = None
41 if stack_info:
42 sio = io.StringIO()
43 sio.write('Stack (most recent call last):\n')
44 traceback.print_stack(f, file=sio)
45 sinfo = sio.getvalue()
46 if sinfo[-1] == '\n':
47 sinfo = sinfo[:-1]
48 sio.close()
49 rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
50 break
51 return rv
52
53
54 _srcfile = os.path.normcase(_Logger.findCaller.__code__.co_filename)
55 _ignore_srcfiles = (_srcfile, logging._srcfile)
56
57 class Log:
58 """ Helper aggregate for all things related to logging activity. """
59
60 _GLOBAL_LOG = logging.getLogger("")
61 _initialized = False
62
63 # These are convenience helpers. For performance, consider saving Log.get_logger() and using that
64 @staticmethod
65 def critical(msg, *args, **kwargs):
66 Log._GLOBAL_LOG.critical(msg, *args, **kwargs)
67
68 @staticmethod
69 def fatal(msg, *args, **kwargs):
70 Log._GLOBAL_LOG.fatal(msg, *args, **kwargs)
71
72 @staticmethod
73 def exception(msg, *args, **kwargs):
74 Log._GLOBAL_LOG.exception(msg, *args, **kwargs)
75
76 @staticmethod
77 def error(msg, *args, **kwargs):
78 Log._GLOBAL_LOG.error(msg, *args, **kwargs)
79
80 @staticmethod
81 def warn(msg, *args, **kwargs):
82 Log._GLOBAL_LOG.warn(msg, *args, **kwargs)
83
84 @staticmethod
85 def warning(msg, *args, **kwargs):
86 Log._GLOBAL_LOG.warning(msg, *args, **kwargs)
87
88 @staticmethod
89 def debug(msg, *args, **kwargs):
90 Log._GLOBAL_LOG.debug(msg, *args, **kwargs)
91
92 @staticmethod
93 def info(msg, *args, **kwargs):
94 Log._GLOBAL_LOG.info(msg, *args, **kwargs)
95
96 @staticmethod
97 def set_level(loglevel):
98 if isinstance(loglevel, int):
99 Log._GLOBAL_LOG.setLevel(loglevel)
100 return
101 elif isinstance(loglevel, str):
102 # idea from https://docs.python.org/3.5/howto/logging.html#logging-to-a-file
103 numeric_level = getattr(logging, loglevel.upper(), None)
104 if isinstance(numeric_level, int):
105 Log._GLOBAL_LOG.setLevel(numeric_level)
106 return
107
108 raise ValueError('Invalid log level: %s' % loglevel)
109
110 @staticmethod
111 def _static_init():
112 if Log._initialized:
113 return
114
115 logging.setLoggerClass(_Logger)
116 # The root logger's type is unfortunately (and surprisingly) not affected by
117 # `setLoggerClass`. Monkey patch it instead. TODO(vimota): Remove this, see the TODO
118 # associated with _Logger.
119 logging.RootLogger.findCaller = _Logger.findCaller
120 log_to_file = _LOG_TO_FILE_ENV.lower() in ("yes", "true", "t", "1") if _LOG_TO_FILE_ENV is not None else True
121 if log_to_file:
122 handler = logging.FileHandler(filename='/tmp/kaggle.log', mode='w')
123 else:
124 handler = logging.StreamHandler()
125
126 # ".1s" is for the first letter: http://stackoverflow.com/a/27453084/1869.
127 format_string = "%(asctime)s %(levelname).1s %(process)d %(filename)s:%(lineno)d] %(message)s"
128 handler.setFormatter(_LogFormatter(format_string))
129 logging.basicConfig(level=logging.INFO, handlers=[handler])
130 Log._initialized = True
131
132 Log._static_init()
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/patches/log.py b/patches/log.py
--- a/patches/log.py
+++ b/patches/log.py
@@ -1,3 +1,4 @@
+import io
import logging
import os
@@ -129,4 +130,4 @@
logging.basicConfig(level=logging.INFO, handlers=[handler])
Log._initialized = True
-Log._static_init()
\ No newline at end of file
+Log._static_init()
|
{"golden_diff": "diff --git a/patches/log.py b/patches/log.py\n--- a/patches/log.py\n+++ b/patches/log.py\n@@ -1,3 +1,4 @@\n+import io\n import logging\n import os\n \n@@ -129,4 +130,4 @@\n logging.basicConfig(level=logging.INFO, handlers=[handler])\n Log._initialized = True\n \n-Log._static_init()\n\\ No newline at end of file\n+Log._static_init()\n", "issue": "NameError: name 'io' is not defined\n## \ud83d\udc1b Bug\r\n\r\nI am trying to run my scripts on GPU notebook, and I keep getting the following error.\r\n\r\n```shell\r\nTraceback (most recent call last):\r\n File \"/opt/conda/lib/python3.10/site-packages/wandb/sdk/wandb_init.py\", line 1172, in init\r\n getcaller()\r\n File \"/opt/conda/lib/python3.10/site-packages/wandb/sdk/wandb_init.py\", line 846, in getcaller\r\n src, line, func, stack = logger.findCaller(stack_info=True)\r\n File \"/root/.local/lib/python3.10/site-packages/log.py\", line 42, in findCaller\r\n sio = io.StringIO()\r\nNameError: name 'io' is not defined\r\n```\r\n\r\nIn addition, I found that there is no import `io` package in [this](https://github.com/Kaggle/docker-python/blob/main/patches/log.py) code.\r\n\r\n### To Reproduce \r\n\r\n### Expected behavior\r\n\r\n### Additional context\r\n\r\n<!-- Add any other context about the problem here. -->\r\n\n", "before_files": [{"content": "import logging\nimport os\n\nimport google.auth\n\n\n_LOG_TO_FILE_ENV = os.getenv(\"KAGGLE_LOG_TO_FILE\")\n\n\nclass _LogFormatter(logging.Formatter):\n \"\"\"A logging formatter which truncates long messages.\"\"\"\n\n _MAX_LOG_LENGTH = 10000 # Be generous, not to truncate long backtraces.\n\n def format(self, record):\n msg = super(_LogFormatter, self).format(record)\n return msg[:_LogFormatter._MAX_LOG_LENGTH] if msg else msg\n\n# TODO(vimota): Clean this up once we're using python 3.8 and can use\n# (https://github.com/python/cpython/commit/dde9fdbe453925279ac3d2a6a72102f6f9ef247c)\n# Right now, making the logging module display the intended frame's information\n# when the logging calls (info, warn, ...) are wrapped (as is the case in our\n# Log class) involves fragile logic.\nclass _Logger(logging.Logger):\n\n # This is a copy of logging.Logger.findCaller with the filename ignore\n # set expanded to include the current filename (\".../log.py\").\n # Copyright 2001-2015 by Vinay Sajip. All Rights Reserved.\n # License: https://github.com/python/cpython/blob/ce9e62544571e7ade7186697d5dd065fb4c5243f/LICENSE\n def findCaller(self, stack_info=False, stacklevel=1):\n f = logging.currentframe()\n f = f.f_back\n rv = \"(unknown file)\", 0, \"(unknown function)\", None\n while hasattr(f, \"f_code\"):\n co = f.f_code\n filename = os.path.normcase(co.co_filename)\n if filename in _ignore_srcfiles:\n f = f.f_back\n continue\n sinfo = None\n if stack_info:\n sio = io.StringIO()\n sio.write('Stack (most recent call last):\\n')\n traceback.print_stack(f, file=sio)\n sinfo = sio.getvalue()\n if sinfo[-1] == '\\n':\n sinfo = sinfo[:-1]\n sio.close()\n rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)\n break\n return rv\n\n\n_srcfile = os.path.normcase(_Logger.findCaller.__code__.co_filename)\n_ignore_srcfiles = (_srcfile, logging._srcfile)\n\nclass Log:\n \"\"\" Helper aggregate for all things related to logging activity. \"\"\"\n\n _GLOBAL_LOG = logging.getLogger(\"\")\n _initialized = False\n\n # These are convenience helpers. For performance, consider saving Log.get_logger() and using that\n @staticmethod\n def critical(msg, *args, **kwargs):\n Log._GLOBAL_LOG.critical(msg, *args, **kwargs)\n\n @staticmethod\n def fatal(msg, *args, **kwargs):\n Log._GLOBAL_LOG.fatal(msg, *args, **kwargs)\n\n @staticmethod\n def exception(msg, *args, **kwargs):\n Log._GLOBAL_LOG.exception(msg, *args, **kwargs)\n\n @staticmethod\n def error(msg, *args, **kwargs):\n Log._GLOBAL_LOG.error(msg, *args, **kwargs)\n\n @staticmethod\n def warn(msg, *args, **kwargs):\n Log._GLOBAL_LOG.warn(msg, *args, **kwargs)\n\n @staticmethod\n def warning(msg, *args, **kwargs):\n Log._GLOBAL_LOG.warning(msg, *args, **kwargs)\n\n @staticmethod\n def debug(msg, *args, **kwargs):\n Log._GLOBAL_LOG.debug(msg, *args, **kwargs)\n\n @staticmethod\n def info(msg, *args, **kwargs):\n Log._GLOBAL_LOG.info(msg, *args, **kwargs)\n\n @staticmethod\n def set_level(loglevel):\n if isinstance(loglevel, int):\n Log._GLOBAL_LOG.setLevel(loglevel)\n return\n elif isinstance(loglevel, str):\n # idea from https://docs.python.org/3.5/howto/logging.html#logging-to-a-file\n numeric_level = getattr(logging, loglevel.upper(), None)\n if isinstance(numeric_level, int):\n Log._GLOBAL_LOG.setLevel(numeric_level)\n return\n\n raise ValueError('Invalid log level: %s' % loglevel)\n\n @staticmethod\n def _static_init():\n if Log._initialized:\n return\n\n logging.setLoggerClass(_Logger)\n # The root logger's type is unfortunately (and surprisingly) not affected by\n # `setLoggerClass`. Monkey patch it instead. TODO(vimota): Remove this, see the TODO\n # associated with _Logger.\n logging.RootLogger.findCaller = _Logger.findCaller\n log_to_file = _LOG_TO_FILE_ENV.lower() in (\"yes\", \"true\", \"t\", \"1\") if _LOG_TO_FILE_ENV is not None else True\n if log_to_file:\n handler = logging.FileHandler(filename='/tmp/kaggle.log', mode='w')\n else:\n handler = logging.StreamHandler()\n \n # \".1s\" is for the first letter: http://stackoverflow.com/a/27453084/1869.\n format_string = \"%(asctime)s %(levelname).1s %(process)d %(filename)s:%(lineno)d] %(message)s\"\n handler.setFormatter(_LogFormatter(format_string))\n logging.basicConfig(level=logging.INFO, handlers=[handler])\n Log._initialized = True\n\nLog._static_init()", "path": "patches/log.py"}], "after_files": [{"content": "import io\nimport logging\nimport os\n\nimport google.auth\n\n\n_LOG_TO_FILE_ENV = os.getenv(\"KAGGLE_LOG_TO_FILE\")\n\n\nclass _LogFormatter(logging.Formatter):\n \"\"\"A logging formatter which truncates long messages.\"\"\"\n\n _MAX_LOG_LENGTH = 10000 # Be generous, not to truncate long backtraces.\n\n def format(self, record):\n msg = super(_LogFormatter, self).format(record)\n return msg[:_LogFormatter._MAX_LOG_LENGTH] if msg else msg\n\n# TODO(vimota): Clean this up once we're using python 3.8 and can use\n# (https://github.com/python/cpython/commit/dde9fdbe453925279ac3d2a6a72102f6f9ef247c)\n# Right now, making the logging module display the intended frame's information\n# when the logging calls (info, warn, ...) are wrapped (as is the case in our\n# Log class) involves fragile logic.\nclass _Logger(logging.Logger):\n\n # This is a copy of logging.Logger.findCaller with the filename ignore\n # set expanded to include the current filename (\".../log.py\").\n # Copyright 2001-2015 by Vinay Sajip. All Rights Reserved.\n # License: https://github.com/python/cpython/blob/ce9e62544571e7ade7186697d5dd065fb4c5243f/LICENSE\n def findCaller(self, stack_info=False, stacklevel=1):\n f = logging.currentframe()\n f = f.f_back\n rv = \"(unknown file)\", 0, \"(unknown function)\", None\n while hasattr(f, \"f_code\"):\n co = f.f_code\n filename = os.path.normcase(co.co_filename)\n if filename in _ignore_srcfiles:\n f = f.f_back\n continue\n sinfo = None\n if stack_info:\n sio = io.StringIO()\n sio.write('Stack (most recent call last):\\n')\n traceback.print_stack(f, file=sio)\n sinfo = sio.getvalue()\n if sinfo[-1] == '\\n':\n sinfo = sinfo[:-1]\n sio.close()\n rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)\n break\n return rv\n\n\n_srcfile = os.path.normcase(_Logger.findCaller.__code__.co_filename)\n_ignore_srcfiles = (_srcfile, logging._srcfile)\n\nclass Log:\n \"\"\" Helper aggregate for all things related to logging activity. \"\"\"\n\n _GLOBAL_LOG = logging.getLogger(\"\")\n _initialized = False\n\n # These are convenience helpers. For performance, consider saving Log.get_logger() and using that\n @staticmethod\n def critical(msg, *args, **kwargs):\n Log._GLOBAL_LOG.critical(msg, *args, **kwargs)\n\n @staticmethod\n def fatal(msg, *args, **kwargs):\n Log._GLOBAL_LOG.fatal(msg, *args, **kwargs)\n\n @staticmethod\n def exception(msg, *args, **kwargs):\n Log._GLOBAL_LOG.exception(msg, *args, **kwargs)\n\n @staticmethod\n def error(msg, *args, **kwargs):\n Log._GLOBAL_LOG.error(msg, *args, **kwargs)\n\n @staticmethod\n def warn(msg, *args, **kwargs):\n Log._GLOBAL_LOG.warn(msg, *args, **kwargs)\n\n @staticmethod\n def warning(msg, *args, **kwargs):\n Log._GLOBAL_LOG.warning(msg, *args, **kwargs)\n\n @staticmethod\n def debug(msg, *args, **kwargs):\n Log._GLOBAL_LOG.debug(msg, *args, **kwargs)\n\n @staticmethod\n def info(msg, *args, **kwargs):\n Log._GLOBAL_LOG.info(msg, *args, **kwargs)\n\n @staticmethod\n def set_level(loglevel):\n if isinstance(loglevel, int):\n Log._GLOBAL_LOG.setLevel(loglevel)\n return\n elif isinstance(loglevel, str):\n # idea from https://docs.python.org/3.5/howto/logging.html#logging-to-a-file\n numeric_level = getattr(logging, loglevel.upper(), None)\n if isinstance(numeric_level, int):\n Log._GLOBAL_LOG.setLevel(numeric_level)\n return\n\n raise ValueError('Invalid log level: %s' % loglevel)\n\n @staticmethod\n def _static_init():\n if Log._initialized:\n return\n\n logging.setLoggerClass(_Logger)\n # The root logger's type is unfortunately (and surprisingly) not affected by\n # `setLoggerClass`. Monkey patch it instead. TODO(vimota): Remove this, see the TODO\n # associated with _Logger.\n logging.RootLogger.findCaller = _Logger.findCaller\n log_to_file = _LOG_TO_FILE_ENV.lower() in (\"yes\", \"true\", \"t\", \"1\") if _LOG_TO_FILE_ENV is not None else True\n if log_to_file:\n handler = logging.FileHandler(filename='/tmp/kaggle.log', mode='w')\n else:\n handler = logging.StreamHandler()\n \n # \".1s\" is for the first letter: http://stackoverflow.com/a/27453084/1869.\n format_string = \"%(asctime)s %(levelname).1s %(process)d %(filename)s:%(lineno)d] %(message)s\"\n handler.setFormatter(_LogFormatter(format_string))\n logging.basicConfig(level=logging.INFO, handlers=[handler])\n Log._initialized = True\n\nLog._static_init()\n", "path": "patches/log.py"}]}
| 2,017 | 100 |
gh_patches_debug_6945
|
rasdani/github-patches
|
git_diff
|
interlegis__sapl-1948
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Erro na inclusão de Nome de Comissão acima de 50 caracteres
Ao inserir um Nome de Comissão acima de 50 caracteres aparece a mensagem Error 500. Mas na edição o sistema aceita.
grato
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sapl/comissoes/forms.py`
Content:
```
1 from django import forms
2 from django.contrib.contenttypes.models import ContentType
3 from django.core.exceptions import ValidationError
4 from django.db import transaction
5 from django.db.models import Q
6 from django.forms import ModelForm
7 from django.utils.translation import ugettext_lazy as _
8
9 from sapl.base.models import Autor, TipoAutor
10 from sapl.comissoes.models import (Comissao, Composicao, DocumentoAcessorio,
11 Participacao, Reuniao, Periodo)
12 from sapl.parlamentares.models import Legislatura, Mandato, Parlamentar
13
14 class ComposicaoForm(forms.ModelForm):
15
16 comissao = forms.CharField(required=False, label='Comissao', widget=forms.HiddenInput())
17
18 class Meta:
19 model = Composicao
20 exclude = []
21
22 def __init__(self, user=None, **kwargs):
23 super(ComposicaoForm, self).__init__(**kwargs)
24 self.fields['comissao'].widget.attrs['disabled'] = 'disabled'
25
26 def clean(self):
27 cleaned_data = super(ComposicaoForm, self).clean()
28
29 if not self.is_valid():
30 return cleaned_data
31
32 periodo = cleaned_data['periodo']
33 comissao_pk = self.initial['comissao'].id
34 intersecao_periodo = Composicao.objects.filter(
35 Q(periodo__data_inicio__lte=periodo.data_fim,
36 periodo__data_fim__gte=periodo.data_fim) |
37 Q(periodo__data_inicio__gte=periodo.data_inicio,
38 periodo__data_fim__lte=periodo.data_inicio),
39 comissao_id=comissao_pk)
40
41 if intersecao_periodo:
42 raise ValidationError('O período informado '
43 'choca com períodos já '
44 'cadastrados para esta comissão')
45
46 return cleaned_data
47
48 class PeriodoForm(forms.ModelForm):
49
50 class Meta:
51 model = Periodo
52 exclude = []
53
54 def clean(self):
55 cleaned_data = super(PeriodoForm, self).clean()
56
57 if not self.is_valid():
58 return cleaned_data
59
60 data_inicio = cleaned_data['data_inicio']
61 data_fim = cleaned_data['data_fim']
62
63 if data_fim and data_fim < data_inicio:
64 raise ValidationError('Data início não pode ser superior a data de fim')
65 return cleaned_data
66
67
68 class ParticipacaoCreateForm(forms.ModelForm):
69
70 parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())
71
72 class Meta:
73 model = Participacao
74 fields = '__all__'
75 exclude = ['composicao']
76
77 def __init__(self, user=None, **kwargs):
78 super(ParticipacaoCreateForm, self).__init__(**kwargs)
79
80 if self.instance:
81 comissao = kwargs['initial']
82 comissao_pk = int(comissao['parent_pk'])
83 composicao = Composicao.objects.get(id=comissao_pk)
84 participantes = composicao.participacao_set.all()
85 id_part = [p.parlamentar.id for p in participantes]
86 else:
87 id_part = []
88
89 qs = self.create_participacao()
90
91 parlamentares = Mandato.objects.filter(qs,
92 parlamentar__ativo=True
93 ).prefetch_related('parlamentar').\
94 values_list('parlamentar',
95 flat=True
96 ).distinct()
97
98 qs = Parlamentar.objects.filter(id__in=parlamentares).distinct().\
99 exclude(id__in=id_part)
100 eligible = self.verifica()
101 result = list(set(qs) & set(eligible))
102 if not cmp(result, eligible): # se igual a 0 significa que o qs e o eli são iguais!
103 self.fields['parlamentar'].queryset = qs
104 else:
105 ids = [e.id for e in eligible]
106 qs = Parlamentar.objects.filter(id__in=ids)
107 self.fields['parlamentar'].queryset = qs
108
109
110 def clean(self):
111 cleaned_data = super(ParticipacaoCreateForm, self).clean()
112
113 if not self.is_valid():
114 return cleaned_data
115
116 data_designacao = cleaned_data['data_designacao']
117 data_desligamento = cleaned_data['data_desligamento']
118
119 if data_desligamento and \
120 data_designacao > data_desligamento:
121 raise ValidationError(_('Data de designação não pode ser superior '
122 'à data de desligamento'))
123
124 composicao = Composicao.objects.get(id=self.initial['parent_pk'])
125 cargos_unicos = [c.cargo.nome for c in composicao.participacao_set.filter(cargo__unico=True)]
126
127 if cleaned_data['cargo'].nome in cargos_unicos:
128 msg = _('Este cargo é único para esta Comissão.')
129 raise ValidationError(msg)
130 return cleaned_data
131
132
133 def create_participacao(self):
134 composicao = Composicao.objects.get(id=self.initial['parent_pk'])
135 data_inicio_comissao = composicao.periodo.data_inicio
136 data_fim_comissao = composicao.periodo.data_fim
137 q1 = Q(data_fim_mandato__isnull=False,
138 data_fim_mandato__gte=data_inicio_comissao)
139 q2 = Q(data_inicio_mandato__gte=data_inicio_comissao) \
140 & Q(data_inicio_mandato__lte=data_fim_comissao)
141 q3 = Q(data_fim_mandato__isnull=True,
142 data_inicio_mandato__lte=data_inicio_comissao)
143 qs = q1 | q2 | q3
144 return qs
145
146 def verifica(self):
147 composicao = Composicao.objects.get(id=self.initial['parent_pk'])
148 participantes = composicao.participacao_set.all()
149 participantes_id = [p.parlamentar.id for p in participantes]
150 parlamentares = Parlamentar.objects.all().exclude(
151 id__in=participantes_id).order_by('nome_completo')
152 parlamentares = [p for p in parlamentares if p.ativo]
153
154 lista = []
155
156 for p in parlamentares:
157 mandatos = p.mandato_set.all()
158 for m in mandatos:
159 data_inicio = m.data_inicio_mandato
160 data_fim = m.data_fim_mandato
161 comp_data_inicio = composicao.periodo.data_inicio
162 comp_data_fim = composicao.periodo.data_fim
163 if (data_fim and data_fim >= comp_data_inicio)\
164 or (data_inicio >= comp_data_inicio and data_inicio <= comp_data_fim)\
165 or (data_fim is None and data_inicio <= comp_data_inicio):
166 lista.append(p)
167
168 lista = list(set(lista))
169
170 return lista
171
172
173 class ParticipacaoEditForm(forms.ModelForm):
174
175 parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())
176 nome_parlamentar = forms.CharField(required=False, label='Parlamentar')
177
178 class Meta:
179 model = Participacao
180 fields = ['nome_parlamentar', 'parlamentar', 'cargo', 'titular',
181 'data_designacao', 'data_desligamento',
182 'motivo_desligamento', 'observacao']
183 widgets = {
184 'parlamentar': forms.HiddenInput(),
185 }
186
187 def __init__(self, user=None, **kwargs):
188 super(ParticipacaoEditForm, self).__init__(**kwargs)
189 self.initial['nome_parlamentar'] = Parlamentar.objects.get(
190 id=self.initial['parlamentar']).nome_parlamentar
191 self.fields['nome_parlamentar'].widget.attrs['disabled'] = 'disabled'
192
193 def clean(self):
194 cleaned_data = super(ParticipacaoEditForm, self).clean()
195
196 if not self.is_valid():
197 return cleaned_data
198
199 data_designacao = cleaned_data['data_designacao']
200 data_desligamento = cleaned_data['data_desligamento']
201
202 if data_desligamento and \
203 data_designacao > data_desligamento:
204 raise ValidationError(_('Data de designação não pode ser superior '
205 'à data de desligamento'))
206
207 composicao_id = self.instance.composicao_id
208
209 composicao = Composicao.objects.get(id=composicao_id)
210 cargos_unicos = [c.cargo.nome for c in composicao.participacao_set.filter(cargo__unico=True)]
211
212 if cleaned_data['cargo'].nome in cargos_unicos:
213 msg = _('Este cargo é único para esta Comissão.')
214 raise ValidationError(msg)
215
216 return cleaned_data
217
218
219 class ComissaoForm(forms.ModelForm):
220
221 class Meta:
222 model = Comissao
223 fields = '__all__'
224
225 def __init__(self, user=None, **kwargs):
226 super(ComissaoForm, self).__init__(**kwargs)
227 inst = self.instance
228 if inst.pk:
229 if inst.tipo.natureza == 'P':
230 self.fields['apelido_temp'].widget.attrs['disabled'] = 'disabled'
231 self.fields['data_instalacao_temp'].widget.attrs['disabled'] = 'disabled'
232 self.fields['data_final_prevista_temp'].widget.attrs['disabled'] = 'disabled'
233 self.fields['data_prorrogada_temp'].widget.attrs['disabled'] = 'disabled'
234 self.fields['data_fim_comissao'].widget.attrs['disabled'] = 'disabled'
235
236
237
238 def clean(self):
239 super(ComissaoForm, self).clean()
240
241 if not self.is_valid():
242 return self.cleaned_data
243
244 if self.cleaned_data['data_extincao']:
245 if (self.cleaned_data['data_extincao'] <
246 self.cleaned_data['data_criacao']):
247 msg = _('Data de extinção não pode ser menor que a de criação')
248 raise ValidationError(msg)
249 return self.cleaned_data
250
251 @transaction.atomic
252 def save(self, commit=True):
253 inst = self.instance
254 if not inst.pk:
255 comissao = super(ComissaoForm, self).save(commit)
256 content_type = ContentType.objects.get_for_model(Comissao)
257 object_id = comissao.pk
258 tipo = TipoAutor.objects.get(descricao__icontains='Comiss')
259 nome = comissao.sigla + ' - ' + comissao.nome
260 Autor.objects.create(
261 content_type=content_type,
262 object_id=object_id,
263 tipo=tipo,
264 nome=nome
265 )
266 return comissao
267 else:
268 comissao = super(ComissaoForm, self).save(commit)
269 return comissao
270
271
272 class ReuniaoForm(ModelForm):
273
274 comissao = forms.ModelChoiceField(queryset=Comissao.objects.all(),
275 widget=forms.HiddenInput())
276
277 class Meta:
278 model = Reuniao
279 exclude = ['cod_andamento_reuniao']
280
281 def clean(self):
282 super(ReuniaoForm, self).clean()
283
284 if not self.is_valid():
285 return self.cleaned_data
286
287 if self.cleaned_data['hora_fim']:
288 if (self.cleaned_data['hora_fim'] <
289 self.cleaned_data['hora_inicio']):
290 msg = _('A hora de término da reunião não pode ser menor que a de início')
291 raise ValidationError(msg)
292 return self.cleaned_data
293
294 class DocumentoAcessorioCreateForm(forms.ModelForm):
295
296 parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())
297
298 class Meta:
299 model = DocumentoAcessorio
300 exclude = ['reuniao']
301
302 def __init__(self, user=None, **kwargs):
303 super(DocumentoAcessorioCreateForm, self).__init__(**kwargs)
304
305 if self.instance:
306 reuniao = Reuniao.objects.get(id=self.initial['parent_pk'])
307 comissao = reuniao.comissao
308 comissao_pk = comissao.id
309 documentos = reuniao.documentoacessorio_set.all()
310 return self.create_documentoacessorio()
311
312
313 def create_documentoacessorio(self):
314 reuniao = Reuniao.objects.get(id=self.initial['parent_pk'])
315
316
317 class DocumentoAcessorioEditForm(forms.ModelForm):
318
319 parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())
320
321 class Meta:
322 model = DocumentoAcessorio
323 fields = ['nome', 'data', 'autor', 'ementa',
324 'indexacao', 'arquivo']
325
326 def __init__(self, user=None, **kwargs):
327 super(DocumentoAcessorioEditForm, self).__init__(**kwargs)
328
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/sapl/comissoes/forms.py b/sapl/comissoes/forms.py
--- a/sapl/comissoes/forms.py
+++ b/sapl/comissoes/forms.py
@@ -241,6 +241,9 @@
if not self.is_valid():
return self.cleaned_data
+ if len(self.cleaned_data['nome']) > 50:
+ msg = _('Nome da Comissão deve ter no máximo 50 caracteres.')
+ raise ValidationError(msg)
if self.cleaned_data['data_extincao']:
if (self.cleaned_data['data_extincao'] <
self.cleaned_data['data_criacao']):
|
{"golden_diff": "diff --git a/sapl/comissoes/forms.py b/sapl/comissoes/forms.py\n--- a/sapl/comissoes/forms.py\n+++ b/sapl/comissoes/forms.py\n@@ -241,6 +241,9 @@\n if not self.is_valid():\n return self.cleaned_data\n \n+ if len(self.cleaned_data['nome']) > 50:\n+ msg = _('Nome da Comiss\u00e3o deve ter no m\u00e1ximo 50 caracteres.')\n+ raise ValidationError(msg)\n if self.cleaned_data['data_extincao']:\n if (self.cleaned_data['data_extincao'] <\n self.cleaned_data['data_criacao']):\n", "issue": "Erro na inclus\u00e3o de Nome de Comiss\u00e3o acima de 50 caracteres\nAo inserir um Nome de Comiss\u00e3o acima de 50 caracteres aparece a mensagem Error 500. Mas na edi\u00e7\u00e3o o sistema aceita.\r\ngrato\n", "before_files": [{"content": "from django import forms\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.core.exceptions import ValidationError\nfrom django.db import transaction\nfrom django.db.models import Q\nfrom django.forms import ModelForm\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom sapl.base.models import Autor, TipoAutor\nfrom sapl.comissoes.models import (Comissao, Composicao, DocumentoAcessorio,\n Participacao, Reuniao, Periodo)\nfrom sapl.parlamentares.models import Legislatura, Mandato, Parlamentar\n\nclass ComposicaoForm(forms.ModelForm):\n\n comissao = forms.CharField(required=False, label='Comissao', widget=forms.HiddenInput())\n\n class Meta:\n model = Composicao\n exclude = []\n\n def __init__(self, user=None, **kwargs):\n super(ComposicaoForm, self).__init__(**kwargs)\n self.fields['comissao'].widget.attrs['disabled'] = 'disabled'\n\n def clean(self):\n cleaned_data = super(ComposicaoForm, self).clean()\n\n if not self.is_valid():\n return cleaned_data\n\n periodo = cleaned_data['periodo']\n comissao_pk = self.initial['comissao'].id\n intersecao_periodo = Composicao.objects.filter(\n Q(periodo__data_inicio__lte=periodo.data_fim,\n periodo__data_fim__gte=periodo.data_fim) |\n Q(periodo__data_inicio__gte=periodo.data_inicio,\n periodo__data_fim__lte=periodo.data_inicio),\n comissao_id=comissao_pk)\n\n if intersecao_periodo:\n raise ValidationError('O per\u00edodo informado '\n 'choca com per\u00edodos j\u00e1 '\n 'cadastrados para esta comiss\u00e3o')\n\n return cleaned_data\n\nclass PeriodoForm(forms.ModelForm):\n\n class Meta:\n model = Periodo\n exclude = []\n\n def clean(self):\n cleaned_data = super(PeriodoForm, self).clean()\n\n if not self.is_valid():\n return cleaned_data\n\n data_inicio = cleaned_data['data_inicio']\n data_fim = cleaned_data['data_fim']\n\n if data_fim and data_fim < data_inicio:\n raise ValidationError('Data in\u00edcio n\u00e3o pode ser superior a data de fim')\n return cleaned_data\n\n\nclass ParticipacaoCreateForm(forms.ModelForm):\n\n parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())\n\n class Meta:\n model = Participacao\n fields = '__all__'\n exclude = ['composicao']\n\n def __init__(self, user=None, **kwargs):\n super(ParticipacaoCreateForm, self).__init__(**kwargs)\n\n if self.instance:\n comissao = kwargs['initial']\n comissao_pk = int(comissao['parent_pk'])\n composicao = Composicao.objects.get(id=comissao_pk)\n participantes = composicao.participacao_set.all()\n id_part = [p.parlamentar.id for p in participantes]\n else:\n id_part = []\n\n qs = self.create_participacao()\n\n parlamentares = Mandato.objects.filter(qs,\n parlamentar__ativo=True\n ).prefetch_related('parlamentar').\\\n values_list('parlamentar',\n flat=True\n ).distinct()\n\n qs = Parlamentar.objects.filter(id__in=parlamentares).distinct().\\\n exclude(id__in=id_part)\n eligible = self.verifica()\n result = list(set(qs) & set(eligible))\n if not cmp(result, eligible): # se igual a 0 significa que o qs e o eli s\u00e3o iguais!\n self.fields['parlamentar'].queryset = qs\n else:\n ids = [e.id for e in eligible]\n qs = Parlamentar.objects.filter(id__in=ids)\n self.fields['parlamentar'].queryset = qs\n\n\n def clean(self):\n cleaned_data = super(ParticipacaoCreateForm, self).clean()\n\n if not self.is_valid():\n return cleaned_data\n\n data_designacao = cleaned_data['data_designacao']\n data_desligamento = cleaned_data['data_desligamento']\n\n if data_desligamento and \\\n data_designacao > data_desligamento:\n raise ValidationError(_('Data de designa\u00e7\u00e3o n\u00e3o pode ser superior '\n '\u00e0 data de desligamento'))\n\n composicao = Composicao.objects.get(id=self.initial['parent_pk'])\n cargos_unicos = [c.cargo.nome for c in composicao.participacao_set.filter(cargo__unico=True)]\n\n if cleaned_data['cargo'].nome in cargos_unicos:\n msg = _('Este cargo \u00e9 \u00fanico para esta Comiss\u00e3o.')\n raise ValidationError(msg)\n return cleaned_data\n\n\n def create_participacao(self):\n composicao = Composicao.objects.get(id=self.initial['parent_pk'])\n data_inicio_comissao = composicao.periodo.data_inicio\n data_fim_comissao = composicao.periodo.data_fim\n q1 = Q(data_fim_mandato__isnull=False,\n data_fim_mandato__gte=data_inicio_comissao)\n q2 = Q(data_inicio_mandato__gte=data_inicio_comissao) \\\n & Q(data_inicio_mandato__lte=data_fim_comissao)\n q3 = Q(data_fim_mandato__isnull=True,\n data_inicio_mandato__lte=data_inicio_comissao)\n qs = q1 | q2 | q3\n return qs\n\n def verifica(self):\n composicao = Composicao.objects.get(id=self.initial['parent_pk'])\n participantes = composicao.participacao_set.all()\n participantes_id = [p.parlamentar.id for p in participantes]\n parlamentares = Parlamentar.objects.all().exclude(\n id__in=participantes_id).order_by('nome_completo')\n parlamentares = [p for p in parlamentares if p.ativo]\n\n lista = []\n\n for p in parlamentares:\n mandatos = p.mandato_set.all()\n for m in mandatos:\n data_inicio = m.data_inicio_mandato\n data_fim = m.data_fim_mandato\n comp_data_inicio = composicao.periodo.data_inicio\n comp_data_fim = composicao.periodo.data_fim\n if (data_fim and data_fim >= comp_data_inicio)\\\n or (data_inicio >= comp_data_inicio and data_inicio <= comp_data_fim)\\\n or (data_fim is None and data_inicio <= comp_data_inicio):\n lista.append(p)\n\n lista = list(set(lista))\n\n return lista\n\n\nclass ParticipacaoEditForm(forms.ModelForm):\n\n parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())\n nome_parlamentar = forms.CharField(required=False, label='Parlamentar')\n\n class Meta:\n model = Participacao\n fields = ['nome_parlamentar', 'parlamentar', 'cargo', 'titular',\n 'data_designacao', 'data_desligamento',\n 'motivo_desligamento', 'observacao']\n widgets = {\n 'parlamentar': forms.HiddenInput(),\n }\n\n def __init__(self, user=None, **kwargs):\n super(ParticipacaoEditForm, self).__init__(**kwargs)\n self.initial['nome_parlamentar'] = Parlamentar.objects.get(\n id=self.initial['parlamentar']).nome_parlamentar\n self.fields['nome_parlamentar'].widget.attrs['disabled'] = 'disabled'\n\n def clean(self):\n cleaned_data = super(ParticipacaoEditForm, self).clean()\n\n if not self.is_valid():\n return cleaned_data\n\n data_designacao = cleaned_data['data_designacao']\n data_desligamento = cleaned_data['data_desligamento']\n\n if data_desligamento and \\\n data_designacao > data_desligamento:\n raise ValidationError(_('Data de designa\u00e7\u00e3o n\u00e3o pode ser superior '\n '\u00e0 data de desligamento'))\n\n composicao_id = self.instance.composicao_id\n\n composicao = Composicao.objects.get(id=composicao_id)\n cargos_unicos = [c.cargo.nome for c in composicao.participacao_set.filter(cargo__unico=True)]\n\n if cleaned_data['cargo'].nome in cargos_unicos:\n msg = _('Este cargo \u00e9 \u00fanico para esta Comiss\u00e3o.')\n raise ValidationError(msg)\n\n return cleaned_data\n\n\nclass ComissaoForm(forms.ModelForm):\n\n class Meta:\n model = Comissao\n fields = '__all__'\n\n def __init__(self, user=None, **kwargs):\n super(ComissaoForm, self).__init__(**kwargs)\n inst = self.instance\n if inst.pk:\n if inst.tipo.natureza == 'P':\n self.fields['apelido_temp'].widget.attrs['disabled'] = 'disabled'\n self.fields['data_instalacao_temp'].widget.attrs['disabled'] = 'disabled'\n self.fields['data_final_prevista_temp'].widget.attrs['disabled'] = 'disabled'\n self.fields['data_prorrogada_temp'].widget.attrs['disabled'] = 'disabled'\n self.fields['data_fim_comissao'].widget.attrs['disabled'] = 'disabled'\n\n\n\n def clean(self):\n super(ComissaoForm, self).clean()\n\n if not self.is_valid():\n return self.cleaned_data\n\n if self.cleaned_data['data_extincao']:\n if (self.cleaned_data['data_extincao'] <\n self.cleaned_data['data_criacao']):\n msg = _('Data de extin\u00e7\u00e3o n\u00e3o pode ser menor que a de cria\u00e7\u00e3o')\n raise ValidationError(msg)\n return self.cleaned_data\n\n @transaction.atomic\n def save(self, commit=True):\n inst = self.instance\n if not inst.pk:\n comissao = super(ComissaoForm, self).save(commit)\n content_type = ContentType.objects.get_for_model(Comissao)\n object_id = comissao.pk\n tipo = TipoAutor.objects.get(descricao__icontains='Comiss')\n nome = comissao.sigla + ' - ' + comissao.nome\n Autor.objects.create(\n content_type=content_type,\n object_id=object_id,\n tipo=tipo,\n nome=nome\n )\n return comissao\n else:\n comissao = super(ComissaoForm, self).save(commit)\n return comissao\n\n\nclass ReuniaoForm(ModelForm):\n\n comissao = forms.ModelChoiceField(queryset=Comissao.objects.all(),\n widget=forms.HiddenInput())\n\n class Meta:\n model = Reuniao\n exclude = ['cod_andamento_reuniao']\n\n def clean(self):\n super(ReuniaoForm, self).clean()\n\n if not self.is_valid():\n return self.cleaned_data\n\n if self.cleaned_data['hora_fim']:\n if (self.cleaned_data['hora_fim'] <\n self.cleaned_data['hora_inicio']):\n msg = _('A hora de t\u00e9rmino da reuni\u00e3o n\u00e3o pode ser menor que a de in\u00edcio')\n raise ValidationError(msg)\n return self.cleaned_data\n\nclass DocumentoAcessorioCreateForm(forms.ModelForm):\n\n parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())\n\n class Meta:\n model = DocumentoAcessorio\n exclude = ['reuniao']\n\n def __init__(self, user=None, **kwargs):\n super(DocumentoAcessorioCreateForm, self).__init__(**kwargs)\n\n if self.instance:\n reuniao = Reuniao.objects.get(id=self.initial['parent_pk'])\n comissao = reuniao.comissao\n comissao_pk = comissao.id\n documentos = reuniao.documentoacessorio_set.all()\n return self.create_documentoacessorio()\n\n\n def create_documentoacessorio(self):\n reuniao = Reuniao.objects.get(id=self.initial['parent_pk'])\n\n\nclass DocumentoAcessorioEditForm(forms.ModelForm):\n\n parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())\n\n class Meta:\n model = DocumentoAcessorio\n fields = ['nome', 'data', 'autor', 'ementa',\n 'indexacao', 'arquivo']\n\n def __init__(self, user=None, **kwargs):\n super(DocumentoAcessorioEditForm, self).__init__(**kwargs)\n", "path": "sapl/comissoes/forms.py"}], "after_files": [{"content": "from django import forms\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.core.exceptions import ValidationError\nfrom django.db import transaction\nfrom django.db.models import Q\nfrom django.forms import ModelForm\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom sapl.base.models import Autor, TipoAutor\nfrom sapl.comissoes.models import (Comissao, Composicao, DocumentoAcessorio,\n Participacao, Reuniao, Periodo)\nfrom sapl.parlamentares.models import Legislatura, Mandato, Parlamentar\n\nclass ComposicaoForm(forms.ModelForm):\n\n comissao = forms.CharField(required=False, label='Comissao', widget=forms.HiddenInput())\n\n class Meta:\n model = Composicao\n exclude = []\n\n def __init__(self, user=None, **kwargs):\n super(ComposicaoForm, self).__init__(**kwargs)\n self.fields['comissao'].widget.attrs['disabled'] = 'disabled'\n\n def clean(self):\n cleaned_data = super(ComposicaoForm, self).clean()\n\n if not self.is_valid():\n return cleaned_data\n\n periodo = cleaned_data['periodo']\n comissao_pk = self.initial['comissao'].id\n intersecao_periodo = Composicao.objects.filter(\n Q(periodo__data_inicio__lte=periodo.data_fim,\n periodo__data_fim__gte=periodo.data_fim) |\n Q(periodo__data_inicio__gte=periodo.data_inicio,\n periodo__data_fim__lte=periodo.data_inicio),\n comissao_id=comissao_pk)\n\n if intersecao_periodo:\n raise ValidationError('O per\u00edodo informado '\n 'choca com per\u00edodos j\u00e1 '\n 'cadastrados para esta comiss\u00e3o')\n\n return cleaned_data\n\nclass PeriodoForm(forms.ModelForm):\n\n class Meta:\n model = Periodo\n exclude = []\n\n def clean(self):\n cleaned_data = super(PeriodoForm, self).clean()\n\n if not self.is_valid():\n return cleaned_data\n\n data_inicio = cleaned_data['data_inicio']\n data_fim = cleaned_data['data_fim']\n\n if data_fim and data_fim < data_inicio:\n raise ValidationError('Data in\u00edcio n\u00e3o pode ser superior a data de fim')\n return cleaned_data\n\n\nclass ParticipacaoCreateForm(forms.ModelForm):\n\n parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())\n\n class Meta:\n model = Participacao\n fields = '__all__'\n exclude = ['composicao']\n\n def __init__(self, user=None, **kwargs):\n super(ParticipacaoCreateForm, self).__init__(**kwargs)\n\n if self.instance:\n comissao = kwargs['initial']\n comissao_pk = int(comissao['parent_pk'])\n composicao = Composicao.objects.get(id=comissao_pk)\n participantes = composicao.participacao_set.all()\n id_part = [p.parlamentar.id for p in participantes]\n else:\n id_part = []\n\n qs = self.create_participacao()\n\n parlamentares = Mandato.objects.filter(qs,\n parlamentar__ativo=True\n ).prefetch_related('parlamentar').\\\n values_list('parlamentar',\n flat=True\n ).distinct()\n\n qs = Parlamentar.objects.filter(id__in=parlamentares).distinct().\\\n exclude(id__in=id_part)\n eligible = self.verifica()\n result = list(set(qs) & set(eligible))\n if not cmp(result, eligible): # se igual a 0 significa que o qs e o eli s\u00e3o iguais!\n self.fields['parlamentar'].queryset = qs\n else:\n ids = [e.id for e in eligible]\n qs = Parlamentar.objects.filter(id__in=ids)\n self.fields['parlamentar'].queryset = qs\n\n\n def clean(self):\n cleaned_data = super(ParticipacaoCreateForm, self).clean()\n\n if not self.is_valid():\n return cleaned_data\n\n data_designacao = cleaned_data['data_designacao']\n data_desligamento = cleaned_data['data_desligamento']\n\n if data_desligamento and \\\n data_designacao > data_desligamento:\n raise ValidationError(_('Data de designa\u00e7\u00e3o n\u00e3o pode ser superior '\n '\u00e0 data de desligamento'))\n\n composicao = Composicao.objects.get(id=self.initial['parent_pk'])\n cargos_unicos = [c.cargo.nome for c in composicao.participacao_set.filter(cargo__unico=True)]\n\n if cleaned_data['cargo'].nome in cargos_unicos:\n msg = _('Este cargo \u00e9 \u00fanico para esta Comiss\u00e3o.')\n raise ValidationError(msg)\n return cleaned_data\n\n\n def create_participacao(self):\n composicao = Composicao.objects.get(id=self.initial['parent_pk'])\n data_inicio_comissao = composicao.periodo.data_inicio\n data_fim_comissao = composicao.periodo.data_fim\n q1 = Q(data_fim_mandato__isnull=False,\n data_fim_mandato__gte=data_inicio_comissao)\n q2 = Q(data_inicio_mandato__gte=data_inicio_comissao) \\\n & Q(data_inicio_mandato__lte=data_fim_comissao)\n q3 = Q(data_fim_mandato__isnull=True,\n data_inicio_mandato__lte=data_inicio_comissao)\n qs = q1 | q2 | q3\n return qs\n\n def verifica(self):\n composicao = Composicao.objects.get(id=self.initial['parent_pk'])\n participantes = composicao.participacao_set.all()\n participantes_id = [p.parlamentar.id for p in participantes]\n parlamentares = Parlamentar.objects.all().exclude(\n id__in=participantes_id).order_by('nome_completo')\n parlamentares = [p for p in parlamentares if p.ativo]\n\n lista = []\n\n for p in parlamentares:\n mandatos = p.mandato_set.all()\n for m in mandatos:\n data_inicio = m.data_inicio_mandato\n data_fim = m.data_fim_mandato\n comp_data_inicio = composicao.periodo.data_inicio\n comp_data_fim = composicao.periodo.data_fim\n if (data_fim and data_fim >= comp_data_inicio)\\\n or (data_inicio >= comp_data_inicio and data_inicio <= comp_data_fim)\\\n or (data_fim is None and data_inicio <= comp_data_inicio):\n lista.append(p)\n\n lista = list(set(lista))\n\n return lista\n\n\nclass ParticipacaoEditForm(forms.ModelForm):\n\n parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())\n nome_parlamentar = forms.CharField(required=False, label='Parlamentar')\n\n class Meta:\n model = Participacao\n fields = ['nome_parlamentar', 'parlamentar', 'cargo', 'titular',\n 'data_designacao', 'data_desligamento',\n 'motivo_desligamento', 'observacao']\n widgets = {\n 'parlamentar': forms.HiddenInput(),\n }\n\n def __init__(self, user=None, **kwargs):\n super(ParticipacaoEditForm, self).__init__(**kwargs)\n self.initial['nome_parlamentar'] = Parlamentar.objects.get(\n id=self.initial['parlamentar']).nome_parlamentar\n self.fields['nome_parlamentar'].widget.attrs['disabled'] = 'disabled'\n\n def clean(self):\n cleaned_data = super(ParticipacaoEditForm, self).clean()\n\n if not self.is_valid():\n return cleaned_data\n\n data_designacao = cleaned_data['data_designacao']\n data_desligamento = cleaned_data['data_desligamento']\n\n if data_desligamento and \\\n data_designacao > data_desligamento:\n raise ValidationError(_('Data de designa\u00e7\u00e3o n\u00e3o pode ser superior '\n '\u00e0 data de desligamento'))\n\n composicao_id = self.instance.composicao_id\n\n composicao = Composicao.objects.get(id=composicao_id)\n cargos_unicos = [c.cargo.nome for c in composicao.participacao_set.filter(cargo__unico=True)]\n\n if cleaned_data['cargo'].nome in cargos_unicos:\n msg = _('Este cargo \u00e9 \u00fanico para esta Comiss\u00e3o.')\n raise ValidationError(msg)\n\n return cleaned_data\n\n\nclass ComissaoForm(forms.ModelForm):\n\n class Meta:\n model = Comissao\n fields = '__all__'\n\n def __init__(self, user=None, **kwargs):\n super(ComissaoForm, self).__init__(**kwargs)\n inst = self.instance\n if inst.pk:\n if inst.tipo.natureza == 'P':\n self.fields['apelido_temp'].widget.attrs['disabled'] = 'disabled'\n self.fields['data_instalacao_temp'].widget.attrs['disabled'] = 'disabled'\n self.fields['data_final_prevista_temp'].widget.attrs['disabled'] = 'disabled'\n self.fields['data_prorrogada_temp'].widget.attrs['disabled'] = 'disabled'\n self.fields['data_fim_comissao'].widget.attrs['disabled'] = 'disabled'\n\n\n\n def clean(self):\n super(ComissaoForm, self).clean()\n\n if not self.is_valid():\n return self.cleaned_data\n\n if len(self.cleaned_data['nome']) > 50:\n msg = _('Nome da Comiss\u00e3o deve ter no m\u00e1ximo 50 caracteres.')\n raise ValidationError(msg)\n if self.cleaned_data['data_extincao']:\n if (self.cleaned_data['data_extincao'] <\n self.cleaned_data['data_criacao']):\n msg = _('Data de extin\u00e7\u00e3o n\u00e3o pode ser menor que a de cria\u00e7\u00e3o')\n raise ValidationError(msg)\n return self.cleaned_data\n\n @transaction.atomic\n def save(self, commit=True):\n inst = self.instance\n if not inst.pk:\n comissao = super(ComissaoForm, self).save(commit)\n content_type = ContentType.objects.get_for_model(Comissao)\n object_id = comissao.pk\n tipo = TipoAutor.objects.get(descricao__icontains='Comiss')\n nome = comissao.sigla + ' - ' + comissao.nome\n Autor.objects.create(\n content_type=content_type,\n object_id=object_id,\n tipo=tipo,\n nome=nome\n )\n return comissao\n else:\n comissao = super(ComissaoForm, self).save(commit)\n return comissao\n\n\nclass ReuniaoForm(ModelForm):\n\n comissao = forms.ModelChoiceField(queryset=Comissao.objects.all(),\n widget=forms.HiddenInput())\n\n class Meta:\n model = Reuniao\n exclude = ['cod_andamento_reuniao']\n\n def clean(self):\n super(ReuniaoForm, self).clean()\n\n if not self.is_valid():\n return self.cleaned_data\n\n if self.cleaned_data['hora_fim']:\n if (self.cleaned_data['hora_fim'] <\n self.cleaned_data['hora_inicio']):\n msg = _('A hora de t\u00e9rmino da reuni\u00e3o n\u00e3o pode ser menor que a de in\u00edcio')\n raise ValidationError(msg)\n return self.cleaned_data\n\nclass DocumentoAcessorioCreateForm(forms.ModelForm):\n\n parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())\n\n class Meta:\n model = DocumentoAcessorio\n exclude = ['reuniao']\n\n def __init__(self, user=None, **kwargs):\n super(DocumentoAcessorioCreateForm, self).__init__(**kwargs)\n\n if self.instance:\n reuniao = Reuniao.objects.get(id=self.initial['parent_pk'])\n comissao = reuniao.comissao\n comissao_pk = comissao.id\n documentos = reuniao.documentoacessorio_set.all()\n return self.create_documentoacessorio()\n\n\n def create_documentoacessorio(self):\n reuniao = Reuniao.objects.get(id=self.initial['parent_pk'])\n\n\nclass DocumentoAcessorioEditForm(forms.ModelForm):\n\n parent_pk = forms.CharField(required=False) # widget=forms.HiddenInput())\n\n class Meta:\n model = DocumentoAcessorio\n fields = ['nome', 'data', 'autor', 'ementa',\n 'indexacao', 'arquivo']\n\n def __init__(self, user=None, **kwargs):\n super(DocumentoAcessorioEditForm, self).__init__(**kwargs)\n", "path": "sapl/comissoes/forms.py"}]}
| 3,939 | 145 |
gh_patches_debug_30455
|
rasdani/github-patches
|
git_diff
|
pytorch__pytorch-965
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Sparse and CosineLoss only support Float on CUDA
These two lines introduce bugs and as @fmassa pointed out, have to be changed to `.is_cuda`
https://github.com/pytorch/pytorch/blob/79f5bf84e54d57a4c81912aeedb0b8a27f97c27e/torch/nn/_functions/loss.py#L13
https://github.com/pytorch/pytorch/blob/28220134371bf9944412e6d232ca20b827a849b4/torch/nn/_functions/thnn/sparse.py#L80
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `torch/nn/_functions/thnn/sparse.py`
Content:
```
1 import torch
2 from torch import sparse
3 from torch.autograd.function import Function
4 from torch._thnn import type2backend
5
6 from . import _all_functions
7
8
9 class Embedding(Function):
10
11 def __init__(self, padding_idx, max_norm, norm_type, scale_grad_by_freq,
12 sparse=False):
13 super(Embedding, self).__init__()
14 self.padding_idx = padding_idx
15 self.max_norm = max_norm
16 self.norm_type = norm_type
17 self.scale_grad_by_freq = scale_grad_by_freq
18 self._indices = None
19 self.sparse = sparse
20
21 def _renorm(self, indices, weight):
22 if indices.dim() == 2:
23 indices = indices.view(-1)
24
25 self._backend.LookupTable_renorm(
26 self._backend.library_state,
27 indices,
28 weight,
29 self.max_norm,
30 self.norm_type
31 )
32
33 def _make_sparse(self, indices, tensor_type):
34 i = torch.LongTensor(2, indices.numel())
35 v = torch.ones(indices.numel())
36 i[1].copy_(torch.range(0, indices.numel() - 1))
37 i[0].copy_(indices)
38 SparseTensor = getattr(sparse, tensor_type.__name__)
39 return SparseTensor(i, v, torch.Size(
40 [self._weight_size[0], indices.numel()])).contiguous()
41
42 def forward(self, indices, weight):
43 assert indices.dim() <= 2
44 assert not self.needs_input_grad[0], "Embedding doesn't " \
45 "compute the gradient w.r.t. the indices"
46
47 self._backend = type2backend[type(weight)]
48 self._weight_size = weight.size()
49
50 if not indices.is_contiguous():
51 self._indices = indices.contiguous()
52 indices = self._indices
53 else:
54 self.save_for_backward(indices)
55
56 output = weight.new()
57 if self.max_norm is not None:
58 self._renorm(indices, weight)
59
60 if indices.dim() == 1:
61 output = torch.index_select(weight, 0, indices)
62 else:
63 output = torch.index_select(weight, 0, indices.view(-1))
64 output = output.view(indices.size(0), indices.size(1), weight.size(1))
65
66 return output
67
68 def backward(self, grad_output):
69 if self._indices is not None:
70 indices = self._indices
71 else:
72 indices, = self.saved_tensors
73
74 grad_output = grad_output.contiguous()
75 if not self.sparse:
76 if indices.dim() == 2:
77 indices = indices.view(-1)
78
79 with torch.cuda.device_of(grad_output):
80 if torch.typename(grad_output) == 'torch.cuda.FloatTensor':
81 _sorted = torch.cuda.LongTensor()
82 _indices = torch.cuda.LongTensor()
83 _count = torch.cuda.LongTensor()
84 else:
85 _count = torch.IntTensor()
86 _sorted = _indices = None
87
88 # TODO: sparse updates...
89 grad_weight = grad_output.new(self._weight_size).zero_()
90 self._backend.LookupTable_accGradParameters(
91 self._backend.library_state,
92 indices,
93 grad_output,
94 grad_weight,
95 _count,
96 _sorted,
97 _indices,
98 self.scale_grad_by_freq,
99 self.padding_idx,
100 1
101 )
102 else:
103 sp = self._make_sparse(indices, type(grad_output))
104 go = grad_output.view(-1, grad_output.size()[-1])
105 grad_weight = torch.smm(sp, go)
106 return None, grad_weight
107
108
109 _all_functions.append(Embedding)
110
```
Path: `torch/nn/_functions/loss.py`
Content:
```
1 import torch
2 from torch.autograd import Function
3
4
5 class CosineEmbeddingLoss(Function):
6
7 def __init__(self, margin=0, size_average=True):
8 super(CosineEmbeddingLoss, self).__init__()
9 self.margin = margin
10 self.size_average = size_average
11
12 def _new_idx(self, input):
13 if torch.typename(input) == 'torch.cuda.FloatTensor':
14 return torch.cuda.ByteTensor()
15 else:
16 return torch.ByteTensor()
17
18 def forward(self, input1, input2, y):
19 self.w1 = input1.new()
20 self.w22 = input1.new()
21 self.w = input1.new()
22 self.w32 = input1.new()
23 self._outputs = input1.new()
24
25 _idx = self._new_idx(input1)
26
27 buffer = torch.mul(input1, input2)
28 torch.sum(buffer, 1, out=self.w1)
29
30 epsilon = 1e-12
31 torch.mul(input1, input1, out=buffer)
32 torch.sum(buffer, 1, out=self.w22).add_(epsilon)
33
34 self._outputs.resize_as_(self.w22).fill_(1)
35 torch.div(self._outputs, self.w22, out=self.w22)
36 self.w.resize_as_(self.w22).copy_(self.w22)
37
38 torch.mul(input2, input2, out=buffer)
39 torch.sum(buffer, 1, out=self.w32).add_(epsilon)
40 torch.div(self._outputs, self.w32, out=self.w32)
41 self.w.mul_(self.w32)
42 self.w.sqrt_()
43
44 torch.mul(self.w1, self.w, out=self._outputs)
45 self._outputs = self._outputs.select(1, 0)
46
47 torch.eq(y, -1, out=_idx)
48 self._outputs[_idx] = self._outputs[_idx].add_(-self.margin).clamp_(min=0)
49 torch.eq(y, 1, out=_idx)
50 self._outputs[_idx] = self._outputs[_idx].mul_(-1).add_(1)
51
52 output = self._outputs.sum()
53
54 if self.size_average:
55 output = output / y.size(0)
56
57 self.save_for_backward(input1, input2, y)
58 return input1.new((output,))
59
60 def backward(self, grad_output):
61 v1, v2, y = self.saved_tensors
62
63 buffer = v1.new()
64 _idx = self._new_idx(v1)
65
66 gw1 = grad_output.new()
67 gw2 = grad_output.new()
68 gw1.resize_as_(v1).copy_(v2)
69 gw2.resize_as_(v1).copy_(v1)
70
71 torch.mul(self.w1, self.w22, out=buffer)
72 gw1.addcmul_(-1, buffer.expand_as(v1), v1)
73 gw1.mul_(self.w.expand_as(v1))
74
75 torch.mul(self.w1, self.w32, out=buffer)
76 gw2.addcmul_(-1, buffer.expand_as(v1), v2)
77 gw2.mul_(self.w.expand_as(v1))
78
79 torch.le(self._outputs, 0, out=_idx)
80 _idx = _idx.view(-1, 1).expand(gw1.size())
81 gw1[_idx] = 0
82 gw2[_idx] = 0
83
84 torch.eq(y, 1, out=_idx)
85 _idx = _idx.view(-1, 1).expand(gw2.size())
86 gw1[_idx] = gw1[_idx].mul_(-1)
87 gw2[_idx] = gw2[_idx].mul_(-1)
88
89 if self.size_average:
90 gw1.div_(y.size(0))
91 gw2.div_(y.size(0))
92
93 if grad_output[0] != 1:
94 gw1.mul_(grad_output)
95 gw2.mul_(grad_output)
96
97 return gw1, gw2, None
98
99
100 class HingeEmbeddingLoss(Function):
101
102 def __init__(self, margin=1, size_average=True):
103 super(HingeEmbeddingLoss, self).__init__()
104 self.margin = margin
105 self.size_average = size_average
106
107 def forward(self, input, target):
108 buffer = input.new()
109 buffer.resize_as_(input).copy_(input)
110 buffer[torch.eq(target, -1.)] = 0
111 output = buffer.sum()
112
113 buffer.fill_(self.margin).add_(-1, input)
114 buffer.clamp_(min=0)
115 buffer[torch.eq(target, 1.)] = 0
116 output += buffer.sum()
117
118 if self.size_average:
119 output = output / input.nelement()
120
121 self.save_for_backward(input, target)
122 return input.new((output,))
123
124 def backward(self, grad_output):
125 input, target = self.saved_tensors
126 grad_input = input.new().resize_as_(input).copy_(target)
127 grad_input[torch.mul(torch.eq(target, -1), torch.gt(input, self.margin))] = 0
128
129 if self.size_average:
130 grad_input.mul_(1. / input.nelement())
131
132 if grad_output[0] != 1:
133 grad_input.mul_(grad_output[0])
134
135 return grad_input, None
136
137
138 class MarginRankingLoss(Function):
139
140 def __init__(self, margin=1, size_average=True):
141 super(MarginRankingLoss, self).__init__()
142 self.margin = margin
143 self.size_average = size_average
144
145 def forward(self, input1, input2, y):
146 _output = input1.clone()
147 _output.add_(-1, input2)
148 _output.mul_(-1).mul_(y)
149 _output.add_(self.margin)
150 _output.clamp_(min=0)
151 output = _output.sum()
152
153 if self.size_average:
154 output = output / y.size(0)
155
156 self.save_for_backward(input1, input2, y)
157 return input1.new((output,))
158
159 def backward(self, grad_output):
160 input1, input2, y = self.saved_tensors
161 grad_input1 = input1.new().resize_as_(input1)
162 grad_input2 = input2.new().resize_as_(input2)
163
164 dist = input1.clone()
165 dist.add_(-1, input2)
166 dist.mul_(-1).mul_(y)
167 dist.add_(self.margin)
168 mask = dist.ge(0)
169
170 grad_input1.copy_(mask)
171 grad_input1.mul_(-1).mul_(y)
172 grad_input2.copy_(mask)
173 grad_input2.mul_(y)
174
175 if self.size_average:
176 grad_input1.div_(y.size(0))
177 grad_input2.div_(y.size(0))
178
179 return grad_input1, grad_input2, None
180
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/torch/nn/_functions/loss.py b/torch/nn/_functions/loss.py
--- a/torch/nn/_functions/loss.py
+++ b/torch/nn/_functions/loss.py
@@ -9,12 +9,6 @@
self.margin = margin
self.size_average = size_average
- def _new_idx(self, input):
- if torch.typename(input) == 'torch.cuda.FloatTensor':
- return torch.cuda.ByteTensor()
- else:
- return torch.ByteTensor()
-
def forward(self, input1, input2, y):
self.w1 = input1.new()
self.w22 = input1.new()
@@ -22,7 +16,7 @@
self.w32 = input1.new()
self._outputs = input1.new()
- _idx = self._new_idx(input1)
+ _idx = input1.new().byte()
buffer = torch.mul(input1, input2)
torch.sum(buffer, 1, out=self.w1)
@@ -61,7 +55,7 @@
v1, v2, y = self.saved_tensors
buffer = v1.new()
- _idx = self._new_idx(v1)
+ _idx = v1.new().byte()
gw1 = grad_output.new()
gw2 = grad_output.new()
diff --git a/torch/nn/_functions/thnn/sparse.py b/torch/nn/_functions/thnn/sparse.py
--- a/torch/nn/_functions/thnn/sparse.py
+++ b/torch/nn/_functions/thnn/sparse.py
@@ -77,7 +77,7 @@
indices = indices.view(-1)
with torch.cuda.device_of(grad_output):
- if torch.typename(grad_output) == 'torch.cuda.FloatTensor':
+ if grad_output.is_cuda:
_sorted = torch.cuda.LongTensor()
_indices = torch.cuda.LongTensor()
_count = torch.cuda.LongTensor()
|
{"golden_diff": "diff --git a/torch/nn/_functions/loss.py b/torch/nn/_functions/loss.py\n--- a/torch/nn/_functions/loss.py\n+++ b/torch/nn/_functions/loss.py\n@@ -9,12 +9,6 @@\n self.margin = margin\n self.size_average = size_average\n \n- def _new_idx(self, input):\n- if torch.typename(input) == 'torch.cuda.FloatTensor':\n- return torch.cuda.ByteTensor()\n- else:\n- return torch.ByteTensor()\n-\n def forward(self, input1, input2, y):\n self.w1 = input1.new()\n self.w22 = input1.new()\n@@ -22,7 +16,7 @@\n self.w32 = input1.new()\n self._outputs = input1.new()\n \n- _idx = self._new_idx(input1)\n+ _idx = input1.new().byte()\n \n buffer = torch.mul(input1, input2)\n torch.sum(buffer, 1, out=self.w1)\n@@ -61,7 +55,7 @@\n v1, v2, y = self.saved_tensors\n \n buffer = v1.new()\n- _idx = self._new_idx(v1)\n+ _idx = v1.new().byte()\n \n gw1 = grad_output.new()\n gw2 = grad_output.new()\ndiff --git a/torch/nn/_functions/thnn/sparse.py b/torch/nn/_functions/thnn/sparse.py\n--- a/torch/nn/_functions/thnn/sparse.py\n+++ b/torch/nn/_functions/thnn/sparse.py\n@@ -77,7 +77,7 @@\n indices = indices.view(-1)\n \n with torch.cuda.device_of(grad_output):\n- if torch.typename(grad_output) == 'torch.cuda.FloatTensor':\n+ if grad_output.is_cuda:\n _sorted = torch.cuda.LongTensor()\n _indices = torch.cuda.LongTensor()\n _count = torch.cuda.LongTensor()\n", "issue": "Sparse and CosineLoss only support Float on CUDA\nThese two lines introduce bugs and as @fmassa pointed out, have to be changed to `.is_cuda`\r\nhttps://github.com/pytorch/pytorch/blob/79f5bf84e54d57a4c81912aeedb0b8a27f97c27e/torch/nn/_functions/loss.py#L13\r\nhttps://github.com/pytorch/pytorch/blob/28220134371bf9944412e6d232ca20b827a849b4/torch/nn/_functions/thnn/sparse.py#L80\n", "before_files": [{"content": "import torch\nfrom torch import sparse\nfrom torch.autograd.function import Function\nfrom torch._thnn import type2backend\n\nfrom . import _all_functions\n\n\nclass Embedding(Function):\n\n def __init__(self, padding_idx, max_norm, norm_type, scale_grad_by_freq,\n sparse=False):\n super(Embedding, self).__init__()\n self.padding_idx = padding_idx\n self.max_norm = max_norm\n self.norm_type = norm_type\n self.scale_grad_by_freq = scale_grad_by_freq\n self._indices = None\n self.sparse = sparse\n\n def _renorm(self, indices, weight):\n if indices.dim() == 2:\n indices = indices.view(-1)\n\n self._backend.LookupTable_renorm(\n self._backend.library_state,\n indices,\n weight,\n self.max_norm,\n self.norm_type\n )\n\n def _make_sparse(self, indices, tensor_type):\n i = torch.LongTensor(2, indices.numel())\n v = torch.ones(indices.numel())\n i[1].copy_(torch.range(0, indices.numel() - 1))\n i[0].copy_(indices)\n SparseTensor = getattr(sparse, tensor_type.__name__)\n return SparseTensor(i, v, torch.Size(\n [self._weight_size[0], indices.numel()])).contiguous()\n\n def forward(self, indices, weight):\n assert indices.dim() <= 2\n assert not self.needs_input_grad[0], \"Embedding doesn't \" \\\n \"compute the gradient w.r.t. the indices\"\n\n self._backend = type2backend[type(weight)]\n self._weight_size = weight.size()\n\n if not indices.is_contiguous():\n self._indices = indices.contiguous()\n indices = self._indices\n else:\n self.save_for_backward(indices)\n\n output = weight.new()\n if self.max_norm is not None:\n self._renorm(indices, weight)\n\n if indices.dim() == 1:\n output = torch.index_select(weight, 0, indices)\n else:\n output = torch.index_select(weight, 0, indices.view(-1))\n output = output.view(indices.size(0), indices.size(1), weight.size(1))\n\n return output\n\n def backward(self, grad_output):\n if self._indices is not None:\n indices = self._indices\n else:\n indices, = self.saved_tensors\n\n grad_output = grad_output.contiguous()\n if not self.sparse:\n if indices.dim() == 2:\n indices = indices.view(-1)\n\n with torch.cuda.device_of(grad_output):\n if torch.typename(grad_output) == 'torch.cuda.FloatTensor':\n _sorted = torch.cuda.LongTensor()\n _indices = torch.cuda.LongTensor()\n _count = torch.cuda.LongTensor()\n else:\n _count = torch.IntTensor()\n _sorted = _indices = None\n\n # TODO: sparse updates...\n grad_weight = grad_output.new(self._weight_size).zero_()\n self._backend.LookupTable_accGradParameters(\n self._backend.library_state,\n indices,\n grad_output,\n grad_weight,\n _count,\n _sorted,\n _indices,\n self.scale_grad_by_freq,\n self.padding_idx,\n 1\n )\n else:\n sp = self._make_sparse(indices, type(grad_output))\n go = grad_output.view(-1, grad_output.size()[-1])\n grad_weight = torch.smm(sp, go)\n return None, grad_weight\n\n\n_all_functions.append(Embedding)\n", "path": "torch/nn/_functions/thnn/sparse.py"}, {"content": "import torch\nfrom torch.autograd import Function\n\n\nclass CosineEmbeddingLoss(Function):\n\n def __init__(self, margin=0, size_average=True):\n super(CosineEmbeddingLoss, self).__init__()\n self.margin = margin\n self.size_average = size_average\n\n def _new_idx(self, input):\n if torch.typename(input) == 'torch.cuda.FloatTensor':\n return torch.cuda.ByteTensor()\n else:\n return torch.ByteTensor()\n\n def forward(self, input1, input2, y):\n self.w1 = input1.new()\n self.w22 = input1.new()\n self.w = input1.new()\n self.w32 = input1.new()\n self._outputs = input1.new()\n\n _idx = self._new_idx(input1)\n\n buffer = torch.mul(input1, input2)\n torch.sum(buffer, 1, out=self.w1)\n\n epsilon = 1e-12\n torch.mul(input1, input1, out=buffer)\n torch.sum(buffer, 1, out=self.w22).add_(epsilon)\n\n self._outputs.resize_as_(self.w22).fill_(1)\n torch.div(self._outputs, self.w22, out=self.w22)\n self.w.resize_as_(self.w22).copy_(self.w22)\n\n torch.mul(input2, input2, out=buffer)\n torch.sum(buffer, 1, out=self.w32).add_(epsilon)\n torch.div(self._outputs, self.w32, out=self.w32)\n self.w.mul_(self.w32)\n self.w.sqrt_()\n\n torch.mul(self.w1, self.w, out=self._outputs)\n self._outputs = self._outputs.select(1, 0)\n\n torch.eq(y, -1, out=_idx)\n self._outputs[_idx] = self._outputs[_idx].add_(-self.margin).clamp_(min=0)\n torch.eq(y, 1, out=_idx)\n self._outputs[_idx] = self._outputs[_idx].mul_(-1).add_(1)\n\n output = self._outputs.sum()\n\n if self.size_average:\n output = output / y.size(0)\n\n self.save_for_backward(input1, input2, y)\n return input1.new((output,))\n\n def backward(self, grad_output):\n v1, v2, y = self.saved_tensors\n\n buffer = v1.new()\n _idx = self._new_idx(v1)\n\n gw1 = grad_output.new()\n gw2 = grad_output.new()\n gw1.resize_as_(v1).copy_(v2)\n gw2.resize_as_(v1).copy_(v1)\n\n torch.mul(self.w1, self.w22, out=buffer)\n gw1.addcmul_(-1, buffer.expand_as(v1), v1)\n gw1.mul_(self.w.expand_as(v1))\n\n torch.mul(self.w1, self.w32, out=buffer)\n gw2.addcmul_(-1, buffer.expand_as(v1), v2)\n gw2.mul_(self.w.expand_as(v1))\n\n torch.le(self._outputs, 0, out=_idx)\n _idx = _idx.view(-1, 1).expand(gw1.size())\n gw1[_idx] = 0\n gw2[_idx] = 0\n\n torch.eq(y, 1, out=_idx)\n _idx = _idx.view(-1, 1).expand(gw2.size())\n gw1[_idx] = gw1[_idx].mul_(-1)\n gw2[_idx] = gw2[_idx].mul_(-1)\n\n if self.size_average:\n gw1.div_(y.size(0))\n gw2.div_(y.size(0))\n\n if grad_output[0] != 1:\n gw1.mul_(grad_output)\n gw2.mul_(grad_output)\n\n return gw1, gw2, None\n\n\nclass HingeEmbeddingLoss(Function):\n\n def __init__(self, margin=1, size_average=True):\n super(HingeEmbeddingLoss, self).__init__()\n self.margin = margin\n self.size_average = size_average\n\n def forward(self, input, target):\n buffer = input.new()\n buffer.resize_as_(input).copy_(input)\n buffer[torch.eq(target, -1.)] = 0\n output = buffer.sum()\n\n buffer.fill_(self.margin).add_(-1, input)\n buffer.clamp_(min=0)\n buffer[torch.eq(target, 1.)] = 0\n output += buffer.sum()\n\n if self.size_average:\n output = output / input.nelement()\n\n self.save_for_backward(input, target)\n return input.new((output,))\n\n def backward(self, grad_output):\n input, target = self.saved_tensors\n grad_input = input.new().resize_as_(input).copy_(target)\n grad_input[torch.mul(torch.eq(target, -1), torch.gt(input, self.margin))] = 0\n\n if self.size_average:\n grad_input.mul_(1. / input.nelement())\n\n if grad_output[0] != 1:\n grad_input.mul_(grad_output[0])\n\n return grad_input, None\n\n\nclass MarginRankingLoss(Function):\n\n def __init__(self, margin=1, size_average=True):\n super(MarginRankingLoss, self).__init__()\n self.margin = margin\n self.size_average = size_average\n\n def forward(self, input1, input2, y):\n _output = input1.clone()\n _output.add_(-1, input2)\n _output.mul_(-1).mul_(y)\n _output.add_(self.margin)\n _output.clamp_(min=0)\n output = _output.sum()\n\n if self.size_average:\n output = output / y.size(0)\n\n self.save_for_backward(input1, input2, y)\n return input1.new((output,))\n\n def backward(self, grad_output):\n input1, input2, y = self.saved_tensors\n grad_input1 = input1.new().resize_as_(input1)\n grad_input2 = input2.new().resize_as_(input2)\n\n dist = input1.clone()\n dist.add_(-1, input2)\n dist.mul_(-1).mul_(y)\n dist.add_(self.margin)\n mask = dist.ge(0)\n\n grad_input1.copy_(mask)\n grad_input1.mul_(-1).mul_(y)\n grad_input2.copy_(mask)\n grad_input2.mul_(y)\n\n if self.size_average:\n grad_input1.div_(y.size(0))\n grad_input2.div_(y.size(0))\n\n return grad_input1, grad_input2, None\n", "path": "torch/nn/_functions/loss.py"}], "after_files": [{"content": "import torch\nfrom torch import sparse\nfrom torch.autograd.function import Function\nfrom torch._thnn import type2backend\n\nfrom . import _all_functions\n\n\nclass Embedding(Function):\n\n def __init__(self, padding_idx, max_norm, norm_type, scale_grad_by_freq,\n sparse=False):\n super(Embedding, self).__init__()\n self.padding_idx = padding_idx\n self.max_norm = max_norm\n self.norm_type = norm_type\n self.scale_grad_by_freq = scale_grad_by_freq\n self._indices = None\n self.sparse = sparse\n\n def _renorm(self, indices, weight):\n if indices.dim() == 2:\n indices = indices.view(-1)\n\n self._backend.LookupTable_renorm(\n self._backend.library_state,\n indices,\n weight,\n self.max_norm,\n self.norm_type\n )\n\n def _make_sparse(self, indices, tensor_type):\n i = torch.LongTensor(2, indices.numel())\n v = torch.ones(indices.numel())\n i[1].copy_(torch.range(0, indices.numel() - 1))\n i[0].copy_(indices)\n SparseTensor = getattr(sparse, tensor_type.__name__)\n return SparseTensor(i, v, torch.Size(\n [self._weight_size[0], indices.numel()])).contiguous()\n\n def forward(self, indices, weight):\n assert indices.dim() <= 2\n assert not self.needs_input_grad[0], \"Embedding doesn't \" \\\n \"compute the gradient w.r.t. the indices\"\n\n self._backend = type2backend[type(weight)]\n self._weight_size = weight.size()\n\n if not indices.is_contiguous():\n self._indices = indices.contiguous()\n indices = self._indices\n else:\n self.save_for_backward(indices)\n\n output = weight.new()\n if self.max_norm is not None:\n self._renorm(indices, weight)\n\n if indices.dim() == 1:\n output = torch.index_select(weight, 0, indices)\n else:\n output = torch.index_select(weight, 0, indices.view(-1))\n output = output.view(indices.size(0), indices.size(1), weight.size(1))\n\n return output\n\n def backward(self, grad_output):\n if self._indices is not None:\n indices = self._indices\n else:\n indices, = self.saved_tensors\n\n grad_output = grad_output.contiguous()\n if not self.sparse:\n if indices.dim() == 2:\n indices = indices.view(-1)\n\n with torch.cuda.device_of(grad_output):\n if grad_output.is_cuda:\n _sorted = torch.cuda.LongTensor()\n _indices = torch.cuda.LongTensor()\n _count = torch.cuda.LongTensor()\n else:\n _count = torch.IntTensor()\n _sorted = _indices = None\n\n # TODO: sparse updates...\n grad_weight = grad_output.new(self._weight_size).zero_()\n self._backend.LookupTable_accGradParameters(\n self._backend.library_state,\n indices,\n grad_output,\n grad_weight,\n _count,\n _sorted,\n _indices,\n self.scale_grad_by_freq,\n self.padding_idx,\n 1\n )\n else:\n sp = self._make_sparse(indices, type(grad_output))\n go = grad_output.view(-1, grad_output.size()[-1])\n grad_weight = torch.smm(sp, go)\n return None, grad_weight\n\n\n_all_functions.append(Embedding)\n", "path": "torch/nn/_functions/thnn/sparse.py"}, {"content": "import torch\nfrom torch.autograd import Function\n\n\nclass CosineEmbeddingLoss(Function):\n\n def __init__(self, margin=0, size_average=True):\n super(CosineEmbeddingLoss, self).__init__()\n self.margin = margin\n self.size_average = size_average\n\n def forward(self, input1, input2, y):\n self.w1 = input1.new()\n self.w22 = input1.new()\n self.w = input1.new()\n self.w32 = input1.new()\n self._outputs = input1.new()\n\n _idx = input1.new().byte()\n\n buffer = torch.mul(input1, input2)\n torch.sum(buffer, 1, out=self.w1)\n\n epsilon = 1e-12\n torch.mul(input1, input1, out=buffer)\n torch.sum(buffer, 1, out=self.w22).add_(epsilon)\n\n self._outputs.resize_as_(self.w22).fill_(1)\n torch.div(self._outputs, self.w22, out=self.w22)\n self.w.resize_as_(self.w22).copy_(self.w22)\n\n torch.mul(input2, input2, out=buffer)\n torch.sum(buffer, 1, out=self.w32).add_(epsilon)\n torch.div(self._outputs, self.w32, out=self.w32)\n self.w.mul_(self.w32)\n self.w.sqrt_()\n\n torch.mul(self.w1, self.w, out=self._outputs)\n self._outputs = self._outputs.select(1, 0)\n\n torch.eq(y, -1, out=_idx)\n self._outputs[_idx] = self._outputs[_idx].add_(-self.margin).clamp_(min=0)\n torch.eq(y, 1, out=_idx)\n self._outputs[_idx] = self._outputs[_idx].mul_(-1).add_(1)\n\n output = self._outputs.sum()\n\n if self.size_average:\n output = output / y.size(0)\n\n self.save_for_backward(input1, input2, y)\n return input1.new((output,))\n\n def backward(self, grad_output):\n v1, v2, y = self.saved_tensors\n\n buffer = v1.new()\n _idx = v1.new().byte()\n\n gw1 = grad_output.new()\n gw2 = grad_output.new()\n gw1.resize_as_(v1).copy_(v2)\n gw2.resize_as_(v1).copy_(v1)\n\n torch.mul(self.w1, self.w22, out=buffer)\n gw1.addcmul_(-1, buffer.expand_as(v1), v1)\n gw1.mul_(self.w.expand_as(v1))\n\n torch.mul(self.w1, self.w32, out=buffer)\n gw2.addcmul_(-1, buffer.expand_as(v1), v2)\n gw2.mul_(self.w.expand_as(v1))\n\n torch.le(self._outputs, 0, out=_idx)\n _idx = _idx.view(-1, 1).expand(gw1.size())\n gw1[_idx] = 0\n gw2[_idx] = 0\n\n torch.eq(y, 1, out=_idx)\n _idx = _idx.view(-1, 1).expand(gw2.size())\n gw1[_idx] = gw1[_idx].mul_(-1)\n gw2[_idx] = gw2[_idx].mul_(-1)\n\n if self.size_average:\n gw1.div_(y.size(0))\n gw2.div_(y.size(0))\n\n if grad_output[0] != 1:\n gw1.mul_(grad_output)\n gw2.mul_(grad_output)\n\n return gw1, gw2, None\n\n\nclass HingeEmbeddingLoss(Function):\n\n def __init__(self, margin=1, size_average=True):\n super(HingeEmbeddingLoss, self).__init__()\n self.margin = margin\n self.size_average = size_average\n\n def forward(self, input, target):\n buffer = input.new()\n buffer.resize_as_(input).copy_(input)\n buffer[torch.eq(target, -1.)] = 0\n output = buffer.sum()\n\n buffer.fill_(self.margin).add_(-1, input)\n buffer.clamp_(min=0)\n buffer[torch.eq(target, 1.)] = 0\n output += buffer.sum()\n\n if self.size_average:\n output = output / input.nelement()\n\n self.save_for_backward(input, target)\n return input.new((output,))\n\n def backward(self, grad_output):\n input, target = self.saved_tensors\n grad_input = input.new().resize_as_(input).copy_(target)\n grad_input[torch.mul(torch.eq(target, -1), torch.gt(input, self.margin))] = 0\n\n if self.size_average:\n grad_input.mul_(1. / input.nelement())\n\n if grad_output[0] != 1:\n grad_input.mul_(grad_output[0])\n\n return grad_input, None\n\n\nclass MarginRankingLoss(Function):\n\n def __init__(self, margin=1, size_average=True):\n super(MarginRankingLoss, self).__init__()\n self.margin = margin\n self.size_average = size_average\n\n def forward(self, input1, input2, y):\n _output = input1.clone()\n _output.add_(-1, input2)\n _output.mul_(-1).mul_(y)\n _output.add_(self.margin)\n _output.clamp_(min=0)\n output = _output.sum()\n\n if self.size_average:\n output = output / y.size(0)\n\n self.save_for_backward(input1, input2, y)\n return input1.new((output,))\n\n def backward(self, grad_output):\n input1, input2, y = self.saved_tensors\n grad_input1 = input1.new().resize_as_(input1)\n grad_input2 = input2.new().resize_as_(input2)\n\n dist = input1.clone()\n dist.add_(-1, input2)\n dist.mul_(-1).mul_(y)\n dist.add_(self.margin)\n mask = dist.ge(0)\n\n grad_input1.copy_(mask)\n grad_input1.mul_(-1).mul_(y)\n grad_input2.copy_(mask)\n grad_input2.mul_(y)\n\n if self.size_average:\n grad_input1.div_(y.size(0))\n grad_input2.div_(y.size(0))\n\n return grad_input1, grad_input2, None\n", "path": "torch/nn/_functions/loss.py"}]}
| 3,367 | 437 |
gh_patches_debug_6502
|
rasdani/github-patches
|
git_diff
|
scikit-hep__awkward-1832
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Requests from review of #1776 (merging docs into main).
### Which documentation?
Tutorials site
### What needs to be documented?
This is a follow-up of https://github.com/scikit-hep/awkward/pull/1776#pullrequestreview-1144743439, which is an approval of merging the `docs` branch into `main` as-is, but with some to-do items to be picked up later. This issue copies those into a bulleted list.
- [ ] Vertical margin around the badges/buttons on the front page so that they are still distinct when the window is narrow or someone is looking at it on their phone.
- [ ] Helpful messages on the "Try it in your browser" page (see below).
- [ ] Better sizing of Retrolite on small screens
- [x] All of the sections containing `ak.*` functions (everything defined in `src/awkward/operations/`) should be contiguous, before the section on "Low-level layouts". This is so that someone searching for an operation they can't quite remember the name of is less likely to miss it. The "Converting between backends", "Indexing and grouping", and "Copying and packing arrays" belong to this category.
- [x] Remove the "Additional documentation" section (which is unnecessary and out of date, including the diagram).
- [x] The kernel specification should be generated and included.
- [x] The AwkwardForth sub-sections may be suppressed on `reference/index.html` and the kernel specification sub-sections _have to_ be suppressed. They would flood that page.
That's it!
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs-sphinx/conf.py`
Content:
```
1 # Configuration file for the Sphinx documentation builder.
2 #
3 # This file only contains a selection of the most common options. For a full
4 # list see the documentation:
5 # https://www.sphinx-doc.org/en/master/usage/configuration.html
6
7 # -- Path setup --------------------------------------------------------------
8
9 # If extensions (or modules to document with autodoc) are in another directory,
10 # add these directories to sys.path here. If the directory is relative to the
11 # documentation root, use os.path.abspath to make it absolute, like shown here.
12 #
13 import os
14 import json
15 import datetime
16 import runpy
17 import sys
18 import subprocess
19 import pathlib
20
21 # -- Project information -----------------------------------------------------
22
23 project = "Awkward Array"
24 copyright = f"{datetime.datetime.now().year}, Awkward Array development team"
25 author = "Jim Pivarski"
26
27 # -- General configuration ---------------------------------------------------
28
29 # Add any Sphinx extension module names here, as strings. They can be
30 # extensions coming with Sphinx (named "sphinx.ext.*") or your custom
31 # ones.
32 extensions = [
33 "sphinx_copybutton",
34 "sphinx_design",
35 "sphinx_external_toc",
36 "sphinx.ext.intersphinx",
37 "myst_nb",
38 # Preserve old links
39 "sphinx_reredirects",
40 "jupyterlite_sphinx",
41 ]
42
43 # Add any paths that contain templates here, relative to this directory.
44 templates_path = ["_templates"]
45
46 # List of patterns, relative to source directory, that match files and
47 # directories to ignore when looking for source files.
48 # This pattern also affects html_static_path and html_extra_path.
49 exclude_patterns = ["_build", "_templates", "Thumbs.db", "jupyter_execute", ".*"]
50
51 # -- Options for HTML output -------------------------------------------------
52
53 # The theme to use for HTML and HTML Help pages. See the documentation for
54 # a list of builtin themes.
55 #
56 html_context = {
57 "github_user": "scikit-hep",
58 "github_repo": "awkward",
59 # TODO: set this
60 "github_version": os.environ.get("READTHEDOCS_VERSION", "main"),
61 "doc_path": "docs-sphinx",
62 }
63 html_theme = "pydata_sphinx_theme"
64 html_show_sourcelink = True
65 html_theme_options = {
66 "logo": {
67 "image_light": "image/logo-300px.png",
68 "image_dark": "image/logo-300px-white.png",
69 },
70 "github_url": "https://github.com/scikit-hep/awkward",
71 # Add light/dark mode and documentation version switcher:
72 "navbar_end": ["theme-switcher", "navbar-icon-links"],
73 "footer_items": ["copyright", "sphinx-version", "funding"],
74 "icon_links": [
75 {
76 "name": "PyPI",
77 "url": "https://pypi.org/project/awkward",
78 "icon": "fab fa-python",
79 }
80 ],
81 "use_edit_page_button": True,
82 "external_links": [
83 {
84 "name": "Contributor guide",
85 "url": "https://github.com/scikit-hep/awkward/blob/main/CONTRIBUTING.md",
86 },
87 {
88 "name": "Release history",
89 "url": "https://github.com/scikit-hep/awkward/releases",
90 },
91 ],
92 "analytics": {
93 "plausible_analytics_domain": "awkward-array.org",
94 "plausible_analytics_url": "https://views.scientific-python.org/js/plausible.js"
95 }
96 }
97
98 # Add any paths that contain custom static files (such as style sheets) here,
99 # relative to this directory. They are copied after the builtin static files,
100 # so a file named "default.css" will overwrite the builtin "default.css".
101 html_static_path = ["_static"]
102 html_css_files = ["css/awkward.css"]
103
104 # MyST settings
105 myst_enable_extensions = [
106 "colon_fence",
107 ]
108
109 nb_execution_mode = "cache"
110 nb_execution_raise_on_error = True
111 # unpkg is currently _very_ slow
112 nb_ipywidgets_js = {
113 # Load RequireJS, used by the IPywidgets for dependency management
114 "https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js": {
115 "integrity": "sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=",
116 "crossorigin": "anonymous",
117 },
118 # Load IPywidgets bundle for embedding.
119 "https://cdn.jsdelivr.net/npm/@jupyter-widgets/[email protected]/dist/embed-amd.js": {
120 "data-jupyter-widgets-cdn": "https://cdn.jsdelivr.net/npm/",
121 "crossorigin": "anonymous",
122 },
123 }
124 # Additional stuff
125 master_doc = "index"
126
127 # Cross-reference existing Python objects
128 intersphinx_mapping = {
129 "python": ("https://docs.python.org/3/", None),
130 "pandas": ("https://pandas.pydata.org/pandas-docs/stable", None),
131 "numpy": ("https://numpy.org/doc/stable", None),
132 "scipy": ("https://docs.scipy.org/doc/scipy", None),
133 "numba": ("https://numba.pydata.org/numba-doc/latest", None),
134 "arrow": ("https://arrow.apache.org/docs/", None),
135 "jax": ("https://jax.readthedocs.io/en/latest", None),
136 }
137
138 # Preserve legacy routes
139 with open("redirects.json") as f:
140 redirects = json.load(f)
141
142 redirect_html_template_file = "_templates/redirect.html"
143
144 # JupyterLite configuration
145 jupyterlite_dir = "./lite"
146 # Don't override ipynb format
147 jupyterlite_bind_ipynb_suffix = False
148 # We've disabled localstorage, so we must provide the contents explicitly
149 jupyterlite_contents = ["getting-started/demo/*"]
150
151 HERE = pathlib.Path(__file__).parent
152
153 # Generate C++ bindings
154 subprocess.check_call(
155 ["doxygen", str(HERE.parent / "docs-doxygen" / "Doxyfile")], cwd=HERE.parent
156 )
157
158 # Generate Python docstrings
159 runpy.run_path(HERE / "prepare_docstrings.py", run_name="__main__")
160
161 # Generate kernel docs
162 runpy.run_path(HERE.parent / "dev" / "generate-kerneldocs.py", run_name="__main__")
163
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/docs-sphinx/conf.py b/docs-sphinx/conf.py
--- a/docs-sphinx/conf.py
+++ b/docs-sphinx/conf.py
@@ -160,3 +160,15 @@
# Generate kernel docs
runpy.run_path(HERE.parent / "dev" / "generate-kerneldocs.py", run_name="__main__")
+
+
+# Sphinx doesn't usually want content to fit the screen, so we hack the styles for this page
+def install_jupyterlite_styles(app, pagename, templatename, context, event_arg) -> None:
+ if pagename != "getting-started/try-awkward-array":
+ return
+
+ app.add_css_file("css/try-awkward-array.css")
+
+
+def setup(app):
+ app.connect('html-page-context', install_jupyterlite_styles)
|
{"golden_diff": "diff --git a/docs-sphinx/conf.py b/docs-sphinx/conf.py\n--- a/docs-sphinx/conf.py\n+++ b/docs-sphinx/conf.py\n@@ -160,3 +160,15 @@\n \n # Generate kernel docs\n runpy.run_path(HERE.parent / \"dev\" / \"generate-kerneldocs.py\", run_name=\"__main__\")\n+\n+\n+# Sphinx doesn't usually want content to fit the screen, so we hack the styles for this page\n+def install_jupyterlite_styles(app, pagename, templatename, context, event_arg) -> None:\n+ if pagename != \"getting-started/try-awkward-array\":\n+ return\n+\n+ app.add_css_file(\"css/try-awkward-array.css\")\n+\n+\n+def setup(app):\n+ app.connect('html-page-context', install_jupyterlite_styles)\n", "issue": "Requests from review of #1776 (merging docs into main).\n### Which documentation?\r\n\r\nTutorials site\r\n\r\n### What needs to be documented?\r\n\r\nThis is a follow-up of https://github.com/scikit-hep/awkward/pull/1776#pullrequestreview-1144743439, which is an approval of merging the `docs` branch into `main` as-is, but with some to-do items to be picked up later. This issue copies those into a bulleted list.\r\n\r\n - [ ] Vertical margin around the badges/buttons on the front page so that they are still distinct when the window is narrow or someone is looking at it on their phone.\r\n - [ ] Helpful messages on the \"Try it in your browser\" page (see below).\r\n - [ ] Better sizing of Retrolite on small screens\r\n - [x] All of the sections containing `ak.*` functions (everything defined in `src/awkward/operations/`) should be contiguous, before the section on \"Low-level layouts\". This is so that someone searching for an operation they can't quite remember the name of is less likely to miss it. The \"Converting between backends\", \"Indexing and grouping\", and \"Copying and packing arrays\" belong to this category.\r\n - [x] Remove the \"Additional documentation\" section (which is unnecessary and out of date, including the diagram).\r\n - [x] The kernel specification should be generated and included.\r\n - [x] The AwkwardForth sub-sections may be suppressed on `reference/index.html` and the kernel specification sub-sections _have to_ be suppressed. They would flood that page.\r\n\r\nThat's it!\n", "before_files": [{"content": "# Configuration file for the Sphinx documentation builder.\n#\n# This file only contains a selection of the most common options. For a full\n# list see the documentation:\n# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\n# -- Path setup --------------------------------------------------------------\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\nimport os\nimport json\nimport datetime\nimport runpy\nimport sys\nimport subprocess\nimport pathlib\n\n# -- Project information -----------------------------------------------------\n\nproject = \"Awkward Array\"\ncopyright = f\"{datetime.datetime.now().year}, Awkward Array development team\"\nauthor = \"Jim Pivarski\"\n\n# -- General configuration ---------------------------------------------------\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named \"sphinx.ext.*\") or your custom\n# ones.\nextensions = [\n \"sphinx_copybutton\",\n \"sphinx_design\",\n \"sphinx_external_toc\",\n \"sphinx.ext.intersphinx\",\n \"myst_nb\",\n # Preserve old links\n \"sphinx_reredirects\",\n \"jupyterlite_sphinx\",\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = [\"_templates\"]\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This pattern also affects html_static_path and html_extra_path.\nexclude_patterns = [\"_build\", \"_templates\", \"Thumbs.db\", \"jupyter_execute\", \".*\"]\n\n# -- Options for HTML output -------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_context = {\n \"github_user\": \"scikit-hep\",\n \"github_repo\": \"awkward\",\n # TODO: set this\n \"github_version\": os.environ.get(\"READTHEDOCS_VERSION\", \"main\"),\n \"doc_path\": \"docs-sphinx\",\n}\nhtml_theme = \"pydata_sphinx_theme\"\nhtml_show_sourcelink = True\nhtml_theme_options = {\n \"logo\": {\n \"image_light\": \"image/logo-300px.png\",\n \"image_dark\": \"image/logo-300px-white.png\",\n },\n \"github_url\": \"https://github.com/scikit-hep/awkward\",\n # Add light/dark mode and documentation version switcher:\n \"navbar_end\": [\"theme-switcher\", \"navbar-icon-links\"],\n \"footer_items\": [\"copyright\", \"sphinx-version\", \"funding\"],\n \"icon_links\": [\n {\n \"name\": \"PyPI\",\n \"url\": \"https://pypi.org/project/awkward\",\n \"icon\": \"fab fa-python\",\n }\n ],\n \"use_edit_page_button\": True,\n \"external_links\": [\n {\n \"name\": \"Contributor guide\",\n \"url\": \"https://github.com/scikit-hep/awkward/blob/main/CONTRIBUTING.md\",\n },\n {\n \"name\": \"Release history\",\n \"url\": \"https://github.com/scikit-hep/awkward/releases\",\n },\n ],\n \"analytics\": {\n \"plausible_analytics_domain\": \"awkward-array.org\",\n \"plausible_analytics_url\": \"https://views.scientific-python.org/js/plausible.js\"\n }\n}\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = [\"_static\"]\nhtml_css_files = [\"css/awkward.css\"]\n\n# MyST settings\nmyst_enable_extensions = [\n \"colon_fence\",\n]\n\nnb_execution_mode = \"cache\"\nnb_execution_raise_on_error = True\n# unpkg is currently _very_ slow\nnb_ipywidgets_js = {\n # Load RequireJS, used by the IPywidgets for dependency management\n \"https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js\": {\n \"integrity\": \"sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=\",\n \"crossorigin\": \"anonymous\",\n },\n # Load IPywidgets bundle for embedding.\n \"https://cdn.jsdelivr.net/npm/@jupyter-widgets/[email protected]/dist/embed-amd.js\": {\n \"data-jupyter-widgets-cdn\": \"https://cdn.jsdelivr.net/npm/\",\n \"crossorigin\": \"anonymous\",\n },\n}\n# Additional stuff\nmaster_doc = \"index\"\n\n# Cross-reference existing Python objects\nintersphinx_mapping = {\n \"python\": (\"https://docs.python.org/3/\", None),\n \"pandas\": (\"https://pandas.pydata.org/pandas-docs/stable\", None),\n \"numpy\": (\"https://numpy.org/doc/stable\", None),\n \"scipy\": (\"https://docs.scipy.org/doc/scipy\", None),\n \"numba\": (\"https://numba.pydata.org/numba-doc/latest\", None),\n \"arrow\": (\"https://arrow.apache.org/docs/\", None),\n \"jax\": (\"https://jax.readthedocs.io/en/latest\", None),\n}\n\n# Preserve legacy routes\nwith open(\"redirects.json\") as f:\n redirects = json.load(f)\n\nredirect_html_template_file = \"_templates/redirect.html\"\n\n# JupyterLite configuration\njupyterlite_dir = \"./lite\"\n# Don't override ipynb format\njupyterlite_bind_ipynb_suffix = False\n# We've disabled localstorage, so we must provide the contents explicitly\njupyterlite_contents = [\"getting-started/demo/*\"]\n\nHERE = pathlib.Path(__file__).parent\n\n# Generate C++ bindings\nsubprocess.check_call(\n [\"doxygen\", str(HERE.parent / \"docs-doxygen\" / \"Doxyfile\")], cwd=HERE.parent\n)\n\n# Generate Python docstrings\nrunpy.run_path(HERE / \"prepare_docstrings.py\", run_name=\"__main__\")\n\n# Generate kernel docs\nrunpy.run_path(HERE.parent / \"dev\" / \"generate-kerneldocs.py\", run_name=\"__main__\")\n", "path": "docs-sphinx/conf.py"}], "after_files": [{"content": "# Configuration file for the Sphinx documentation builder.\n#\n# This file only contains a selection of the most common options. For a full\n# list see the documentation:\n# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\n# -- Path setup --------------------------------------------------------------\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\nimport os\nimport json\nimport datetime\nimport runpy\nimport sys\nimport subprocess\nimport pathlib\n\n# -- Project information -----------------------------------------------------\n\nproject = \"Awkward Array\"\ncopyright = f\"{datetime.datetime.now().year}, Awkward Array development team\"\nauthor = \"Jim Pivarski\"\n\n# -- General configuration ---------------------------------------------------\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named \"sphinx.ext.*\") or your custom\n# ones.\nextensions = [\n \"sphinx_copybutton\",\n \"sphinx_design\",\n \"sphinx_external_toc\",\n \"sphinx.ext.intersphinx\",\n \"myst_nb\",\n # Preserve old links\n \"sphinx_reredirects\",\n \"jupyterlite_sphinx\",\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = [\"_templates\"]\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This pattern also affects html_static_path and html_extra_path.\nexclude_patterns = [\"_build\", \"_templates\", \"Thumbs.db\", \"jupyter_execute\", \".*\"]\n\n# -- Options for HTML output -------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_context = {\n \"github_user\": \"scikit-hep\",\n \"github_repo\": \"awkward\",\n # TODO: set this\n \"github_version\": os.environ.get(\"READTHEDOCS_VERSION\", \"main\"),\n \"doc_path\": \"docs-sphinx\",\n}\nhtml_theme = \"pydata_sphinx_theme\"\nhtml_show_sourcelink = True\nhtml_theme_options = {\n \"logo\": {\n \"image_light\": \"image/logo-300px.png\",\n \"image_dark\": \"image/logo-300px-white.png\",\n },\n \"github_url\": \"https://github.com/scikit-hep/awkward\",\n # Add light/dark mode and documentation version switcher:\n \"navbar_end\": [\"theme-switcher\", \"navbar-icon-links\"],\n \"footer_items\": [\"copyright\", \"sphinx-version\", \"funding\"],\n \"icon_links\": [\n {\n \"name\": \"PyPI\",\n \"url\": \"https://pypi.org/project/awkward\",\n \"icon\": \"fab fa-python\",\n }\n ],\n \"use_edit_page_button\": True,\n \"external_links\": [\n {\n \"name\": \"Contributor guide\",\n \"url\": \"https://github.com/scikit-hep/awkward/blob/main/CONTRIBUTING.md\",\n },\n {\n \"name\": \"Release history\",\n \"url\": \"https://github.com/scikit-hep/awkward/releases\",\n },\n ],\n \"analytics\": {\n \"plausible_analytics_domain\": \"awkward-array.org\",\n \"plausible_analytics_url\": \"https://views.scientific-python.org/js/plausible.js\"\n }\n}\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = [\"_static\"]\nhtml_css_files = [\"css/awkward.css\"]\n\n# MyST settings\nmyst_enable_extensions = [\n \"colon_fence\",\n]\n\nnb_execution_mode = \"cache\"\nnb_execution_raise_on_error = True\n# unpkg is currently _very_ slow\nnb_ipywidgets_js = {\n # Load RequireJS, used by the IPywidgets for dependency management\n \"https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js\": {\n \"integrity\": \"sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=\",\n \"crossorigin\": \"anonymous\",\n },\n # Load IPywidgets bundle for embedding.\n \"https://cdn.jsdelivr.net/npm/@jupyter-widgets/[email protected]/dist/embed-amd.js\": {\n \"data-jupyter-widgets-cdn\": \"https://cdn.jsdelivr.net/npm/\",\n \"crossorigin\": \"anonymous\",\n },\n}\n# Additional stuff\nmaster_doc = \"index\"\n\n# Cross-reference existing Python objects\nintersphinx_mapping = {\n \"python\": (\"https://docs.python.org/3/\", None),\n \"pandas\": (\"https://pandas.pydata.org/pandas-docs/stable\", None),\n \"numpy\": (\"https://numpy.org/doc/stable\", None),\n \"scipy\": (\"https://docs.scipy.org/doc/scipy\", None),\n \"numba\": (\"https://numba.pydata.org/numba-doc/latest\", None),\n \"arrow\": (\"https://arrow.apache.org/docs/\", None),\n \"jax\": (\"https://jax.readthedocs.io/en/latest\", None),\n}\n\n# Preserve legacy routes\nwith open(\"redirects.json\") as f:\n redirects = json.load(f)\n\nredirect_html_template_file = \"_templates/redirect.html\"\n\n# JupyterLite configuration\njupyterlite_dir = \"./lite\"\n# Don't override ipynb format\njupyterlite_bind_ipynb_suffix = False\n# We've disabled localstorage, so we must provide the contents explicitly\njupyterlite_contents = [\"getting-started/demo/*\"]\n\nHERE = pathlib.Path(__file__).parent\n\n# Generate C++ bindings\nsubprocess.check_call(\n [\"doxygen\", str(HERE.parent / \"docs-doxygen\" / \"Doxyfile\")], cwd=HERE.parent\n)\n\n# Generate Python docstrings\nrunpy.run_path(HERE / \"prepare_docstrings.py\", run_name=\"__main__\")\n\n# Generate kernel docs\nrunpy.run_path(HERE.parent / \"dev\" / \"generate-kerneldocs.py\", run_name=\"__main__\")\n\n\n# Sphinx doesn't usually want content to fit the screen, so we hack the styles for this page\ndef install_jupyterlite_styles(app, pagename, templatename, context, event_arg) -> None:\n if pagename != \"getting-started/try-awkward-array\":\n return\n\n app.add_css_file(\"css/try-awkward-array.css\")\n\n\ndef setup(app):\n app.connect('html-page-context', install_jupyterlite_styles)\n", "path": "docs-sphinx/conf.py"}]}
| 2,391 | 186 |
gh_patches_debug_3998
|
rasdani/github-patches
|
git_diff
|
pymodbus-dev__pymodbus-1925
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Single byte error response handling
### Versions
- Python: 3.11
- OS: Ubuntu 22.04
- Pymodbus: 3.6.2
- Modbus Hardware (if used): Sungrow Inverter
### Pymodbus Specific
- Client: tcp async
### Description
#### Current State
When I query my sungrow inverter with a register I'm not supposed to be querying, I'm gettting a malformed package and the connection is dopped.
Wireshark:

TCP Payload: 00 02 00 00 00 02 01 84 02
#### Request
This line of code assumes that `data` has more than a single element. However, `data` in my case is only b'\x84'.
The response is being assigned correctly: Exception Response(132, 4, IllegalFunction)
But in the marked line I'm getting an pymodbus.logging:General exception: index out of range
See https://github.com/pymodbus-dev/pymodbus/blob/6913e9c829009f3704f75d7cb7fff019c27c6770/pymodbus/factory.py#L351
I can fix this locally by changing
```python
response.decode(data[1:])
```
to
```python
if len(data) > 1:
response.decode(data[1:])
```
but I really have no idea what I'm doing here.
However that change allows me to handle the error super nicely on the response object:
```python
if rr.isError():
if isinstance(rr, pymodbus.pdu.ExceptionResponse):
if rr.exception_code == pymodbus.pdu.ModbusExceptions.IllegalFunction:
print("unsupported register")
```
### Code and Logs
```python
--------------------
2024-01-09 17:10:45,355 DEBUG logging:103 send: 0x0 0xd 0x0 0x0 0x0 0x6 0x1 0x4 0x13 0x90 0x0 0x2
2024-01-09 17:10:45,355 DEBUG logging:103 Adding transaction 13
2024-01-09 17:10:45,366 DEBUG logging:103 recv: 0x0 0xd 0x0 0x0 0x0 0x2 0x1 0x84 0x2 old_data: addr=None
2024-01-09 17:10:45,366 DEBUG logging:103 Processing: 0x0 0xd 0x0 0x0 0x0 0x2 0x1 0x84 0x2
2024-01-09 17:10:45,366 DEBUG logging:103 Factory Response[132]
*************** data: b'\x84'
*************** response: Exception Response(132, 4, IllegalFunction)
2024-01-09 17:10:45,366 ERROR logging:115 General exception: index out of range
2024-01-09 17:10:45,366 DEBUG logging:103 Resetting frame - Current Frame in buffer - 0x0 0xd 0x0 0x0 0x0 0x2 0x1 0x84 0x2
Fatal error: protocol.data_received() call failed.
protocol: <pymodbus.client.tcp.AsyncModbusTcpClient object at 0x7f2f1aac8e90>
transport: <_SelectorSocketTransport fd=6 read=polling write=<idle, bufsize=0>>
Traceback (most recent call last):
File "/usr/lib/python3.11/asyncio/selector_events.py", line 1009, in _read_ready__data_received
self._protocol.data_received(data)
File "/home/alex/.local/lib/python3.11/site-packages/pymodbus/transport/transport.py", line 312, in data_received
self.datagram_received(data, None)
File "/home/alex/.local/lib/python3.11/site-packages/pymodbus/transport/transport.py", line 346, in datagram_received
cut = self.callback_data(self.recv_buffer, addr=addr)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/alex/.local/lib/python3.11/site-packages/pymodbus/client/base.py", line 186, in callback_data
self.framer.processIncomingPacket(data, self._handle_response, slave=0)
File "/home/alex/.local/lib/python3.11/site-packages/pymodbus/framer/base.py", line 139, in processIncomingPacket
self.frameProcessIncomingPacket(single, callback, slave, **kwargs)
File "/home/alex/.local/lib/python3.11/site-packages/pymodbus/framer/socket_framer.py", line 147, in frameProcessIncomingPacket
self._process(callback, tid)
File "/home/alex/.local/lib/python3.11/site-packages/pymodbus/framer/socket_framer.py", line 158, in _process
raise ModbusIOException("Unable to decode request")
pymodbus.exceptions.ModbusIOException: Modbus Error: [Input/Output] Unable to decode request
2024-01-09 17:10:45,367 DEBUG logging:103 Connection lost comm due to Modbus Error: [Input/Output] Unable to decode request
2024-01-09 17:10:45,367 DEBUG logging:103 Getting transaction 13
2024-01-09 17:10:45,368 DEBUG logging:103 Wait comm 100.0 ms before reconnecting.
Traceback (most recent call last):
File "/home/alex/homeassistant-sungrow/custom_components/sungrow/core/modbus.py", line 325, in _read_range
rr = await self._client.read_input_registers(address_start - 1, count=address_count, slave=self._slave) # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/alex/.local/lib/python3.11/site-packages/pymodbus/client/base.py", line 167, in async_execute
resp = await asyncio.wait_for(
^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/lib/python3.11/asyncio/tasks.py", line 479, in wait_for
return fut.result()
^^^^^^^^^^^^
pymodbus.exceptions.ConnectionException: Modbus Error: [Connection] Connection lost during request
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pymodbus/framer/socket_framer.py`
Content:
```
1 """Socket framer."""
2 # pylint: disable=missing-type-doc
3 import struct
4
5 from pymodbus.exceptions import (
6 InvalidMessageReceivedException,
7 ModbusIOException,
8 )
9 from pymodbus.framer.base import SOCKET_FRAME_HEADER, ModbusFramer
10 from pymodbus.logging import Log
11
12
13 # --------------------------------------------------------------------------- #
14 # Modbus TCP Message
15 # --------------------------------------------------------------------------- #
16
17
18 class ModbusSocketFramer(ModbusFramer):
19 """Modbus Socket Frame controller.
20
21 Before each modbus TCP message is an MBAP header which is used as a
22 message frame. It allows us to easily separate messages as follows::
23
24 [ MBAP Header ] [ Function Code] [ Data ] \
25 [ tid ][ pid ][ length ][ uid ]
26 2b 2b 2b 1b 1b Nb
27
28 while len(message) > 0:
29 tid, pid, length`, uid = struct.unpack(">HHHB", message)
30 request = message[0:7 + length - 1`]
31 message = [7 + length - 1:]
32
33 * length = uid + function code + data
34 * The -1 is to account for the uid byte
35 """
36
37 method = "socket"
38
39 def __init__(self, decoder, client=None):
40 """Initialize a new instance of the framer.
41
42 :param decoder: The decoder factory implementation to use
43 """
44 super().__init__(decoder, client)
45 self._hsize = 0x07
46
47 # ----------------------------------------------------------------------- #
48 # Private Helper Functions
49 # ----------------------------------------------------------------------- #
50 def checkFrame(self):
51 """Check and decode the next frame.
52
53 Return true if we were successful.
54 """
55 if not self.isFrameReady():
56 return False
57 (
58 self._header["tid"],
59 self._header["pid"],
60 self._header["len"],
61 self._header["uid"],
62 ) = struct.unpack(">HHHB", self._buffer[0 : self._hsize])
63
64 # someone sent us an error? ignore it
65 if self._header["len"] < 2:
66 self.advanceFrame()
67 # we have at least a complete message, continue
68 elif len(self._buffer) - self._hsize + 1 >= self._header["len"]:
69 return True
70 # we don't have enough of a message yet, wait
71 return False
72
73 def advanceFrame(self):
74 """Skip over the current framed message.
75
76 This allows us to skip over the current message after we have processed
77 it or determined that it contains an error. It also has to reset the
78 current frame header handle
79 """
80 length = self._hsize + self._header["len"] - 1
81 self._buffer = self._buffer[length:]
82 self._header = {"tid": 0, "pid": 0, "len": 0, "uid": 0}
83
84 def isFrameReady(self):
85 """Check if we should continue decode logic.
86
87 This is meant to be used in a while loop in the decoding phase to let
88 the decoder factory know that there is still data in the buffer.
89
90 :returns: True if ready, False otherwise
91 """
92 return len(self._buffer) > self._hsize
93
94 def getFrame(self):
95 """Return the next frame from the buffered data.
96
97 :returns: The next full frame buffer
98 """
99 length = self._hsize + self._header["len"] - 1
100 return self._buffer[self._hsize : length]
101
102 # ----------------------------------------------------------------------- #
103 # Public Member Functions
104 # ----------------------------------------------------------------------- #
105 def decode_data(self, data):
106 """Decode data."""
107 if len(data) > self._hsize:
108 tid, pid, length, uid, fcode = struct.unpack(
109 SOCKET_FRAME_HEADER, data[0 : self._hsize + 1]
110 )
111 return {
112 "tid": tid,
113 "pid": pid,
114 "length": length,
115 "slave": uid,
116 "fcode": fcode,
117 }
118 return {}
119
120 def frameProcessIncomingPacket(self, single, callback, slave, tid=None, **kwargs):
121 """Process new packet pattern.
122
123 This takes in a new request packet, adds it to the current
124 packet stream, and performs framing on it. That is, checks
125 for complete messages, and once found, will process all that
126 exist. This handles the case when we read N + 1 or 1 // N
127 messages at a time instead of 1.
128
129 The processed and decoded messages are pushed to the callback
130 function to process and send.
131 """
132 if not self.checkFrame():
133 Log.debug("Frame check failed, ignoring!!")
134 return
135 if not self._validate_slave_id(slave, single):
136 header_txt = self._header["uid"]
137 Log.debug("Not a valid slave id - {}, ignoring!!", header_txt)
138 self.resetFrame()
139 return
140 self._process(callback, tid)
141
142 def _process(self, callback, tid, error=False):
143 """Process incoming packets irrespective error condition."""
144 data = self._buffer if error else self.getFrame()
145 if (result := self.decoder.decode(data)) is None:
146 self.resetFrame()
147 raise ModbusIOException("Unable to decode request")
148 if error and result.function_code < 0x80:
149 raise InvalidMessageReceivedException(result)
150 self.populateResult(result)
151 self.advanceFrame()
152 if tid and tid != result.transaction_id:
153 self.resetFrame()
154 else:
155 callback(result) # defer or push to a thread?
156
157 def buildPacket(self, message):
158 """Create a ready to send modbus packet.
159
160 :param message: The populated request/response to send
161 """
162 data = message.encode()
163 packet = struct.pack(
164 SOCKET_FRAME_HEADER,
165 message.transaction_id,
166 message.protocol_id,
167 len(data) + 2,
168 message.slave_id,
169 message.function_code,
170 )
171 packet += data
172 return packet
173
174
175 # __END__
176
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pymodbus/framer/socket_framer.py b/pymodbus/framer/socket_framer.py
--- a/pymodbus/framer/socket_framer.py
+++ b/pymodbus/framer/socket_framer.py
@@ -96,7 +96,7 @@
:returns: The next full frame buffer
"""
- length = self._hsize + self._header["len"] - 1
+ length = self._hsize + self._header["len"]
return self._buffer[self._hsize : length]
# ----------------------------------------------------------------------- #
|
{"golden_diff": "diff --git a/pymodbus/framer/socket_framer.py b/pymodbus/framer/socket_framer.py\n--- a/pymodbus/framer/socket_framer.py\n+++ b/pymodbus/framer/socket_framer.py\n@@ -96,7 +96,7 @@\n \n :returns: The next full frame buffer\n \"\"\"\n- length = self._hsize + self._header[\"len\"] - 1\n+ length = self._hsize + self._header[\"len\"]\n return self._buffer[self._hsize : length]\n \n # ----------------------------------------------------------------------- #\n", "issue": "Single byte error response handling\n### Versions\r\n\r\n- Python: 3.11\r\n- OS: Ubuntu 22.04\r\n- Pymodbus: 3.6.2\r\n- Modbus Hardware (if used): Sungrow Inverter\r\n\r\n### Pymodbus Specific\r\n\r\n- Client: tcp async\r\n\r\n### Description\r\n\r\n#### Current State\r\nWhen I query my sungrow inverter with a register I'm not supposed to be querying, I'm gettting a malformed package and the connection is dopped.\r\n\r\nWireshark:\r\n\r\n\r\nTCP Payload: 00 02 00 00 00 02 01 84 02\r\n\r\n#### Request\r\nThis line of code assumes that `data` has more than a single element. However, `data` in my case is only b'\\x84'.\r\nThe response is being assigned correctly: Exception Response(132, 4, IllegalFunction)\r\nBut in the marked line I'm getting an pymodbus.logging:General exception: index out of range\r\n\r\nSee https://github.com/pymodbus-dev/pymodbus/blob/6913e9c829009f3704f75d7cb7fff019c27c6770/pymodbus/factory.py#L351 \r\n\r\nI can fix this locally by changing\r\n```python\r\n response.decode(data[1:])\r\n```\r\nto\r\n```python\r\n if len(data) > 1:\r\n response.decode(data[1:])\r\n```\r\nbut I really have no idea what I'm doing here.\r\n\r\nHowever that change allows me to handle the error super nicely on the response object:\r\n```python\r\n if rr.isError():\r\n if isinstance(rr, pymodbus.pdu.ExceptionResponse):\r\n if rr.exception_code == pymodbus.pdu.ModbusExceptions.IllegalFunction:\r\n print(\"unsupported register\")\r\n```\r\n\r\n\r\n### Code and Logs\r\n\r\n```python\r\n--------------------\r\n2024-01-09 17:10:45,355 DEBUG logging:103 send: 0x0 0xd 0x0 0x0 0x0 0x6 0x1 0x4 0x13 0x90 0x0 0x2\r\n2024-01-09 17:10:45,355 DEBUG logging:103 Adding transaction 13\r\n2024-01-09 17:10:45,366 DEBUG logging:103 recv: 0x0 0xd 0x0 0x0 0x0 0x2 0x1 0x84 0x2 old_data: addr=None\r\n2024-01-09 17:10:45,366 DEBUG logging:103 Processing: 0x0 0xd 0x0 0x0 0x0 0x2 0x1 0x84 0x2\r\n2024-01-09 17:10:45,366 DEBUG logging:103 Factory Response[132]\r\n*************** data: b'\\x84'\r\n*************** response: Exception Response(132, 4, IllegalFunction)\r\n2024-01-09 17:10:45,366 ERROR logging:115 General exception: index out of range\r\n2024-01-09 17:10:45,366 DEBUG logging:103 Resetting frame - Current Frame in buffer - 0x0 0xd 0x0 0x0 0x0 0x2 0x1 0x84 0x2\r\nFatal error: protocol.data_received() call failed.\r\nprotocol: <pymodbus.client.tcp.AsyncModbusTcpClient object at 0x7f2f1aac8e90>\r\ntransport: <_SelectorSocketTransport fd=6 read=polling write=<idle, bufsize=0>>\r\nTraceback (most recent call last):\r\n File \"/usr/lib/python3.11/asyncio/selector_events.py\", line 1009, in _read_ready__data_received\r\n self._protocol.data_received(data)\r\n File \"/home/alex/.local/lib/python3.11/site-packages/pymodbus/transport/transport.py\", line 312, in data_received\r\n self.datagram_received(data, None)\r\n File \"/home/alex/.local/lib/python3.11/site-packages/pymodbus/transport/transport.py\", line 346, in datagram_received\r\n cut = self.callback_data(self.recv_buffer, addr=addr)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File \"/home/alex/.local/lib/python3.11/site-packages/pymodbus/client/base.py\", line 186, in callback_data\r\n self.framer.processIncomingPacket(data, self._handle_response, slave=0)\r\n File \"/home/alex/.local/lib/python3.11/site-packages/pymodbus/framer/base.py\", line 139, in processIncomingPacket\r\n self.frameProcessIncomingPacket(single, callback, slave, **kwargs)\r\n File \"/home/alex/.local/lib/python3.11/site-packages/pymodbus/framer/socket_framer.py\", line 147, in frameProcessIncomingPacket\r\n self._process(callback, tid)\r\n File \"/home/alex/.local/lib/python3.11/site-packages/pymodbus/framer/socket_framer.py\", line 158, in _process\r\n raise ModbusIOException(\"Unable to decode request\")\r\npymodbus.exceptions.ModbusIOException: Modbus Error: [Input/Output] Unable to decode request\r\n2024-01-09 17:10:45,367 DEBUG logging:103 Connection lost comm due to Modbus Error: [Input/Output] Unable to decode request\r\n2024-01-09 17:10:45,367 DEBUG logging:103 Getting transaction 13\r\n2024-01-09 17:10:45,368 DEBUG logging:103 Wait comm 100.0 ms before reconnecting.\r\nTraceback (most recent call last):\r\n File \"/home/alex/homeassistant-sungrow/custom_components/sungrow/core/modbus.py\", line 325, in _read_range\r\n rr = await self._client.read_input_registers(address_start - 1, count=address_count, slave=self._slave) # type: ignore\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File \"/home/alex/.local/lib/python3.11/site-packages/pymodbus/client/base.py\", line 167, in async_execute\r\n resp = await asyncio.wait_for(\r\n ^^^^^^^^^^^^^^^^^^^^^^^\r\n File \"/usr/lib/python3.11/asyncio/tasks.py\", line 479, in wait_for\r\n return fut.result()\r\n ^^^^^^^^^^^^\r\npymodbus.exceptions.ConnectionException: Modbus Error: [Connection] Connection lost during request\r\n```\r\n\n", "before_files": [{"content": "\"\"\"Socket framer.\"\"\"\n# pylint: disable=missing-type-doc\nimport struct\n\nfrom pymodbus.exceptions import (\n InvalidMessageReceivedException,\n ModbusIOException,\n)\nfrom pymodbus.framer.base import SOCKET_FRAME_HEADER, ModbusFramer\nfrom pymodbus.logging import Log\n\n\n# --------------------------------------------------------------------------- #\n# Modbus TCP Message\n# --------------------------------------------------------------------------- #\n\n\nclass ModbusSocketFramer(ModbusFramer):\n \"\"\"Modbus Socket Frame controller.\n\n Before each modbus TCP message is an MBAP header which is used as a\n message frame. It allows us to easily separate messages as follows::\n\n [ MBAP Header ] [ Function Code] [ Data ] \\\n [ tid ][ pid ][ length ][ uid ]\n 2b 2b 2b 1b 1b Nb\n\n while len(message) > 0:\n tid, pid, length`, uid = struct.unpack(\">HHHB\", message)\n request = message[0:7 + length - 1`]\n message = [7 + length - 1:]\n\n * length = uid + function code + data\n * The -1 is to account for the uid byte\n \"\"\"\n\n method = \"socket\"\n\n def __init__(self, decoder, client=None):\n \"\"\"Initialize a new instance of the framer.\n\n :param decoder: The decoder factory implementation to use\n \"\"\"\n super().__init__(decoder, client)\n self._hsize = 0x07\n\n # ----------------------------------------------------------------------- #\n # Private Helper Functions\n # ----------------------------------------------------------------------- #\n def checkFrame(self):\n \"\"\"Check and decode the next frame.\n\n Return true if we were successful.\n \"\"\"\n if not self.isFrameReady():\n return False\n (\n self._header[\"tid\"],\n self._header[\"pid\"],\n self._header[\"len\"],\n self._header[\"uid\"],\n ) = struct.unpack(\">HHHB\", self._buffer[0 : self._hsize])\n\n # someone sent us an error? ignore it\n if self._header[\"len\"] < 2:\n self.advanceFrame()\n # we have at least a complete message, continue\n elif len(self._buffer) - self._hsize + 1 >= self._header[\"len\"]:\n return True\n # we don't have enough of a message yet, wait\n return False\n\n def advanceFrame(self):\n \"\"\"Skip over the current framed message.\n\n This allows us to skip over the current message after we have processed\n it or determined that it contains an error. It also has to reset the\n current frame header handle\n \"\"\"\n length = self._hsize + self._header[\"len\"] - 1\n self._buffer = self._buffer[length:]\n self._header = {\"tid\": 0, \"pid\": 0, \"len\": 0, \"uid\": 0}\n\n def isFrameReady(self):\n \"\"\"Check if we should continue decode logic.\n\n This is meant to be used in a while loop in the decoding phase to let\n the decoder factory know that there is still data in the buffer.\n\n :returns: True if ready, False otherwise\n \"\"\"\n return len(self._buffer) > self._hsize\n\n def getFrame(self):\n \"\"\"Return the next frame from the buffered data.\n\n :returns: The next full frame buffer\n \"\"\"\n length = self._hsize + self._header[\"len\"] - 1\n return self._buffer[self._hsize : length]\n\n # ----------------------------------------------------------------------- #\n # Public Member Functions\n # ----------------------------------------------------------------------- #\n def decode_data(self, data):\n \"\"\"Decode data.\"\"\"\n if len(data) > self._hsize:\n tid, pid, length, uid, fcode = struct.unpack(\n SOCKET_FRAME_HEADER, data[0 : self._hsize + 1]\n )\n return {\n \"tid\": tid,\n \"pid\": pid,\n \"length\": length,\n \"slave\": uid,\n \"fcode\": fcode,\n }\n return {}\n\n def frameProcessIncomingPacket(self, single, callback, slave, tid=None, **kwargs):\n \"\"\"Process new packet pattern.\n\n This takes in a new request packet, adds it to the current\n packet stream, and performs framing on it. That is, checks\n for complete messages, and once found, will process all that\n exist. This handles the case when we read N + 1 or 1 // N\n messages at a time instead of 1.\n\n The processed and decoded messages are pushed to the callback\n function to process and send.\n \"\"\"\n if not self.checkFrame():\n Log.debug(\"Frame check failed, ignoring!!\")\n return\n if not self._validate_slave_id(slave, single):\n header_txt = self._header[\"uid\"]\n Log.debug(\"Not a valid slave id - {}, ignoring!!\", header_txt)\n self.resetFrame()\n return\n self._process(callback, tid)\n\n def _process(self, callback, tid, error=False):\n \"\"\"Process incoming packets irrespective error condition.\"\"\"\n data = self._buffer if error else self.getFrame()\n if (result := self.decoder.decode(data)) is None:\n self.resetFrame()\n raise ModbusIOException(\"Unable to decode request\")\n if error and result.function_code < 0x80:\n raise InvalidMessageReceivedException(result)\n self.populateResult(result)\n self.advanceFrame()\n if tid and tid != result.transaction_id:\n self.resetFrame()\n else:\n callback(result) # defer or push to a thread?\n\n def buildPacket(self, message):\n \"\"\"Create a ready to send modbus packet.\n\n :param message: The populated request/response to send\n \"\"\"\n data = message.encode()\n packet = struct.pack(\n SOCKET_FRAME_HEADER,\n message.transaction_id,\n message.protocol_id,\n len(data) + 2,\n message.slave_id,\n message.function_code,\n )\n packet += data\n return packet\n\n\n# __END__\n", "path": "pymodbus/framer/socket_framer.py"}], "after_files": [{"content": "\"\"\"Socket framer.\"\"\"\n# pylint: disable=missing-type-doc\nimport struct\n\nfrom pymodbus.exceptions import (\n InvalidMessageReceivedException,\n ModbusIOException,\n)\nfrom pymodbus.framer.base import SOCKET_FRAME_HEADER, ModbusFramer\nfrom pymodbus.logging import Log\n\n\n# --------------------------------------------------------------------------- #\n# Modbus TCP Message\n# --------------------------------------------------------------------------- #\n\n\nclass ModbusSocketFramer(ModbusFramer):\n \"\"\"Modbus Socket Frame controller.\n\n Before each modbus TCP message is an MBAP header which is used as a\n message frame. It allows us to easily separate messages as follows::\n\n [ MBAP Header ] [ Function Code] [ Data ] \\\n [ tid ][ pid ][ length ][ uid ]\n 2b 2b 2b 1b 1b Nb\n\n while len(message) > 0:\n tid, pid, length`, uid = struct.unpack(\">HHHB\", message)\n request = message[0:7 + length - 1`]\n message = [7 + length - 1:]\n\n * length = uid + function code + data\n * The -1 is to account for the uid byte\n \"\"\"\n\n method = \"socket\"\n\n def __init__(self, decoder, client=None):\n \"\"\"Initialize a new instance of the framer.\n\n :param decoder: The decoder factory implementation to use\n \"\"\"\n super().__init__(decoder, client)\n self._hsize = 0x07\n\n # ----------------------------------------------------------------------- #\n # Private Helper Functions\n # ----------------------------------------------------------------------- #\n def checkFrame(self):\n \"\"\"Check and decode the next frame.\n\n Return true if we were successful.\n \"\"\"\n if not self.isFrameReady():\n return False\n (\n self._header[\"tid\"],\n self._header[\"pid\"],\n self._header[\"len\"],\n self._header[\"uid\"],\n ) = struct.unpack(\">HHHB\", self._buffer[0 : self._hsize])\n\n # someone sent us an error? ignore it\n if self._header[\"len\"] < 2:\n self.advanceFrame()\n # we have at least a complete message, continue\n elif len(self._buffer) - self._hsize + 1 >= self._header[\"len\"]:\n return True\n # we don't have enough of a message yet, wait\n return False\n\n def advanceFrame(self):\n \"\"\"Skip over the current framed message.\n\n This allows us to skip over the current message after we have processed\n it or determined that it contains an error. It also has to reset the\n current frame header handle\n \"\"\"\n length = self._hsize + self._header[\"len\"] - 1\n self._buffer = self._buffer[length:]\n self._header = {\"tid\": 0, \"pid\": 0, \"len\": 0, \"uid\": 0}\n\n def isFrameReady(self):\n \"\"\"Check if we should continue decode logic.\n\n This is meant to be used in a while loop in the decoding phase to let\n the decoder factory know that there is still data in the buffer.\n\n :returns: True if ready, False otherwise\n \"\"\"\n return len(self._buffer) > self._hsize\n\n def getFrame(self):\n \"\"\"Return the next frame from the buffered data.\n\n :returns: The next full frame buffer\n \"\"\"\n length = self._hsize + self._header[\"len\"]\n return self._buffer[self._hsize : length]\n\n # ----------------------------------------------------------------------- #\n # Public Member Functions\n # ----------------------------------------------------------------------- #\n def decode_data(self, data):\n \"\"\"Decode data.\"\"\"\n if len(data) > self._hsize:\n tid, pid, length, uid, fcode = struct.unpack(\n SOCKET_FRAME_HEADER, data[0 : self._hsize + 1]\n )\n return {\n \"tid\": tid,\n \"pid\": pid,\n \"length\": length,\n \"slave\": uid,\n \"fcode\": fcode,\n }\n return {}\n\n def frameProcessIncomingPacket(self, single, callback, slave, tid=None, **kwargs):\n \"\"\"Process new packet pattern.\n\n This takes in a new request packet, adds it to the current\n packet stream, and performs framing on it. That is, checks\n for complete messages, and once found, will process all that\n exist. This handles the case when we read N + 1 or 1 // N\n messages at a time instead of 1.\n\n The processed and decoded messages are pushed to the callback\n function to process and send.\n \"\"\"\n if not self.checkFrame():\n Log.debug(\"Frame check failed, ignoring!!\")\n return\n if not self._validate_slave_id(slave, single):\n header_txt = self._header[\"uid\"]\n Log.debug(\"Not a valid slave id - {}, ignoring!!\", header_txt)\n self.resetFrame()\n return\n self._process(callback, tid)\n\n def _process(self, callback, tid, error=False):\n \"\"\"Process incoming packets irrespective error condition.\"\"\"\n data = self._buffer if error else self.getFrame()\n if (result := self.decoder.decode(data)) is None:\n self.resetFrame()\n raise ModbusIOException(\"Unable to decode request\")\n if error and result.function_code < 0x80:\n raise InvalidMessageReceivedException(result)\n self.populateResult(result)\n self.advanceFrame()\n if tid and tid != result.transaction_id:\n self.resetFrame()\n else:\n callback(result) # defer or push to a thread?\n\n def buildPacket(self, message):\n \"\"\"Create a ready to send modbus packet.\n\n :param message: The populated request/response to send\n \"\"\"\n data = message.encode()\n packet = struct.pack(\n SOCKET_FRAME_HEADER,\n message.transaction_id,\n message.protocol_id,\n len(data) + 2,\n message.slave_id,\n message.function_code,\n )\n packet += data\n return packet\n\n\n# __END__\n", "path": "pymodbus/framer/socket_framer.py"}]}
| 3,686 | 129 |
gh_patches_debug_23943
|
rasdani/github-patches
|
git_diff
|
pyinstaller__pyinstaller-4468
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
PyInstaller tries to load all project files as libraries and crashes with "win32ctypes.pywin32.pywintypes.error: (2, 'LoadLibraryExW', 'The system cannot find the file specified.')" when PySide2 is included
PyInstaller 3.5 and later seems to try to load every single file in project subdirectories as a library and finally crashes with the exception mentioned in the title when PySide2 is included as a depencendy.
I reproduced this with versions `3.5`, `4.0.dev0+3f57c9142`, and `4.0.dev0+g3f57c914` (current dev zip and current git head) and created [a minimal python project that exhibits this issue](https://github.com/Systemcluster/pyinstaller-crash).
This has previously been reported under #3916 with additional info posted in [this stackoverflow question](https://stackoverflow.com/questions/57932432/pyinstaller-win32ctypes-pywin32-pywintypes-error-2-loadlibraryexw-the-sys/58195896#58195896).
<details>
<summary>Here is a sample debug log output including traceback from one of the pyinstaller invokations.</summary>
```cmd
62 INFO: PyInstaller: 4.0.dev0+3f57c9142
62 INFO: Python: 3.7.4
73 INFO: Platform: Windows-10-10.0.18362-SP0
...
various similar outputs to the 4 following lines...
...
6530 DEBUG: Analyzing .venv\Lib\site-packages\pip\_vendor\chardet\__pycache__\jpcntx.cpython-37.pyc
6531 INFO: Cannot get manifest resource from non-PE file .venv\Lib\site-packages\pip\_vendor\chardet\__pycache__\jpcntx.cpython-37.pyc
6532 WARNING: Can not get binary dependencies for file: .venv\Lib\site-packages\pip\_vendor\chardet\__pycache__\jpcntx.cpython-37.pyc
6532 WARNING: Reason: 'DOS Header magic not found.'
6532 DEBUG: Analyzing .git\objects\4f\cbd50d0fa5061722eaf8c4aace8d7f77ad6784
Traceback (most recent call last):
File "c:\users\chris\development\w3modmanager\.venv\lib\site-packages\win32ctypes\pywin32\pywintypes.py", line 35, in pywin32error
yield
File "c:\users\chris\development\w3modmanager\.venv\lib\site-packages\win32ctypes\pywin32\win32api.py", line 43, in LoadLibraryEx
return _dll._LoadLibraryEx(fileName, 0, flags)
File "c:\users\chris\development\w3modmanager\.venv\lib\site-packages\win32ctypes\core\ctypes\_util.py", line 42, in check_null
raise make_error(function, function_name)
OSError: [WinError 2] The system cannot find the file specified.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\Chris\AppData\Local\Programs\Python\Python37\Lib\runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "C:\Users\Chris\AppData\Local\Programs\Python\Python37\Lib\runpy.py", line 85, in _run_code
exec(code, run_globals)
File "C:\Users\Chris\Development\w3modmanager\.venv\Scripts\pyinstaller.exe\__main__.py", line 9, in <module>
File "c:\users\chris\development\w3modmanager\.venv\lib\site-packages\PyInstaller\__main__.py", line 112, in run
run_build(pyi_config, spec_file, **vars(args))
File "c:\users\chris\development\w3modmanager\.venv\lib\site-packages\PyInstaller\__main__.py", line 63, in run_build
PyInstaller.building.build_main.main(pyi_config, spec_file, **kwargs)
File "c:\users\chris\development\w3modmanager\.venv\lib\site-packages\PyInstaller\building\build_main.py", line 732, in main
build(specfile, kw.get('distpath'), kw.get('workpath'), kw.get('clean_build'))
File "c:\users\chris\development\w3modmanager\.venv\lib\site-packages\PyInstaller\building\build_main.py", line 679, in build
exec(code, spec_namespace)
File ".\w3modmanager.spec", line 25, in <module>
noarchive=False,
File "c:\users\chris\development\w3modmanager\.venv\lib\site-packages\PyInstaller\building\build_main.py", line 242, in __init__
self.__postinit__()
File "c:\users\chris\development\w3modmanager\.venv\lib\site-packages\PyInstaller\building\datastruct.py", line 158, in __postinit__
self.assemble()
File "c:\users\chris\development\w3modmanager\.venv\lib\site-packages\PyInstaller\building\build_main.py", line 468, in assemble
redirects=self.binding_redirects))
File "c:\users\chris\development\w3modmanager\.venv\lib\site-packages\PyInstaller\depend\bindepend.py", line 226, in Dependencies
for ftocnm, fn in getAssemblyFiles(pth, manifest, redirects):
File "c:\users\chris\development\w3modmanager\.venv\lib\site-packages\PyInstaller\depend\bindepend.py", line 402, in getAssemblyFiles
for assembly in getAssemblies(pth):
File "c:\users\chris\development\w3modmanager\.venv\lib\site-packages\PyInstaller\depend\bindepend.py", line 353, in getAssemblies
res = GetManifestResources(pth)
File "c:\users\chris\development\w3modmanager\.venv\lib\site-packages\PyInstaller\utils\win32\winmanifest.py", line 1005, in GetManifestResources
return winresource.GetResources(filename, [RT_MANIFEST], names, languages)
File "c:\users\chris\development\w3modmanager\.venv\lib\site-packages\PyInstaller\utils\win32\winresource.py", line 168, in GetResources
hsrc = win32api.LoadLibraryEx(filename, 0, LOAD_LIBRARY_AS_DATAFILE)
File "c:\users\chris\development\w3modmanager\.venv\lib\site-packages\win32ctypes\pywin32\win32api.py", line 43, in LoadLibraryEx
return _dll._LoadLibraryEx(fileName, 0, flags)
File "C:\Users\Chris\AppData\Local\Programs\Python\Python37\Lib\contextlib.py", line 130, in __exit__
self.gen.throw(type, value, traceback)
File "c:\users\chris\development\w3modmanager\.venv\lib\site-packages\win32ctypes\pywin32\pywintypes.py", line 37, in pywin32error
raise error(exception.winerror, exception.function, exception.strerror)
win32ctypes.pywin32.pywintypes.error: (2, 'LoadLibraryExW', 'The system cannot find the file specified.')
```
</details>
By modifying `win32api.py` and catching the exception inside `LoadLibraryEx` , I noticed a lot of similar stacktraces prior to the crash of type `OSError: [WinError 193] <no description>` related to various different files (e.g. `.json`, `.pyc`, etc.), which seemed to indicate that PyInstaller tries to load all the files inside the subdirectories as libraries. The file or type that generates `WinError 2`, which isn't caught, is not deterministic and changes each invocation.
Adding some log output to `build_main.py` before `self.binaries.extend(bindepend.Dependencies(self.binaries, redirects=self.binding_redirects))` reveals that `self.binaries` contains every single non-`.py` file inside `.git` and `.venv`.
Looking at the log output a bit more, I suspected it might be related to the `hook-PySide2.QtNetwork.py` hook. I was able to narrow it down to these lines:
```python
binaries += [
# Per http://doc.qt.io/qt-5/ssl.html#enabling-and-disabling-ssl-support,
# the SSL libraries are dynamically loaded, implying they exist in
# the system path. Include these.
(getfullnameof('libeay32.dll'), os.path.join(*rel_data_path)),
(getfullnameof('ssleay32.dll'), os.path.join(*rel_data_path)),
]
```
Logging the result of these calls, I am seeing this output, which I am assuming is not a valid search path resulting in this issue.
```python
[
('C:\\Program Files\\Common Files\\Intel\\WirelessCommon\\libeay32.dll', '.'),
('', '.')
]
```
These lines seem to have been copied over verbatim from the PyQt5 hook in [this commit](https://github.com/pyinstaller/pyinstaller/commit/5b5791285332003940f4bc70c32bb0cf10a32917) by @bjones1. Removing these lines fixes the issue, however I am not sure about the relevancy of these libraries regarding PySide2.
[This commit](https://github.com/pyinstaller/pyinstaller/commit/47bd7f10de3b343270fd4d34b356f14617d5d3a1#diff-cfc3f87059223ae1b3d84abe9a90e8f0) changed the PyQt5.QtNetwork hook again, but the change didn't make it into the PySide2.QtNetwork hook. Porting this change over fixes the build as well.
As it stands, PySide2 support is broken in the latest version. It might be a very simple fix as described above, but I don't know enough about either the PySide2 requirements nor PyInstaller to verify.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `PyInstaller/hooks/hook-PySide2.QtNetwork.py`
Content:
```
1 #-----------------------------------------------------------------------------
2 # Copyright (c) 2013-2019, PyInstaller Development Team.
3 #
4 # Distributed under the terms of the GNU General Public License with exception
5 # for distributing bootloader.
6 #
7 # The full license is in the file COPYING.txt, distributed with this software.
8 #-----------------------------------------------------------------------------
9 import os.path
10
11 from PyInstaller.utils.hooks import eval_statement
12 from PyInstaller.utils.hooks.qt import add_qt5_dependencies, \
13 pyside2_library_info
14 from PyInstaller.compat import is_win
15 from PyInstaller.depend.bindepend import getfullnameof
16
17 # Only proceed if PySide2 can be imported.
18 if pyside2_library_info.version:
19 hiddenimports, binaries, datas = add_qt5_dependencies(__file__)
20
21 # Add libraries needed for SSL if these are available. See issue #3520, #4048.
22 if (is_win and eval_statement("""
23 from PySide2.QtNetwork import QSslSocket
24 print(QSslSocket.supportsSsl())""")):
25
26 rel_data_path = ['.']
27 binaries += [
28 # Per http://doc.qt.io/qt-5/ssl.html#enabling-and-disabling-ssl-support,
29 # the SSL libraries are dynamically loaded, implying they exist in
30 # the system path. Include these.
31 (getfullnameof('libeay32.dll'), os.path.join(*rel_data_path)),
32 (getfullnameof('ssleay32.dll'), os.path.join(*rel_data_path)),
33 ]
34
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/PyInstaller/hooks/hook-PySide2.QtNetwork.py b/PyInstaller/hooks/hook-PySide2.QtNetwork.py
--- a/PyInstaller/hooks/hook-PySide2.QtNetwork.py
+++ b/PyInstaller/hooks/hook-PySide2.QtNetwork.py
@@ -12,7 +12,6 @@
from PyInstaller.utils.hooks.qt import add_qt5_dependencies, \
pyside2_library_info
from PyInstaller.compat import is_win
-from PyInstaller.depend.bindepend import getfullnameof
# Only proceed if PySide2 can be imported.
if pyside2_library_info.version:
@@ -23,11 +22,10 @@
from PySide2.QtNetwork import QSslSocket
print(QSslSocket.supportsSsl())""")):
- rel_data_path = ['.']
- binaries += [
- # Per http://doc.qt.io/qt-5/ssl.html#enabling-and-disabling-ssl-support,
- # the SSL libraries are dynamically loaded, implying they exist in
- # the system path. Include these.
- (getfullnameof('libeay32.dll'), os.path.join(*rel_data_path)),
- (getfullnameof('ssleay32.dll'), os.path.join(*rel_data_path)),
- ]
+ binaries = []
+ for dll in ('libeay32.dll', 'ssleay32.dll', 'libssl-1_1-x64.dll',
+ 'libcrypto-1_1-x64.dllx'):
+ dll_path = os.path.join(
+ pyside2_library_info.location['BinariesPath'], dll)
+ if os.path.exists(dll_path):
+ binaries.append((dll_path, '.'))
|
{"golden_diff": "diff --git a/PyInstaller/hooks/hook-PySide2.QtNetwork.py b/PyInstaller/hooks/hook-PySide2.QtNetwork.py\n--- a/PyInstaller/hooks/hook-PySide2.QtNetwork.py\n+++ b/PyInstaller/hooks/hook-PySide2.QtNetwork.py\n@@ -12,7 +12,6 @@\n from PyInstaller.utils.hooks.qt import add_qt5_dependencies, \\\n pyside2_library_info\n from PyInstaller.compat import is_win\n-from PyInstaller.depend.bindepend import getfullnameof\n \n # Only proceed if PySide2 can be imported.\n if pyside2_library_info.version:\n@@ -23,11 +22,10 @@\n from PySide2.QtNetwork import QSslSocket\n print(QSslSocket.supportsSsl())\"\"\")):\n \n- rel_data_path = ['.']\n- binaries += [\n- # Per http://doc.qt.io/qt-5/ssl.html#enabling-and-disabling-ssl-support,\n- # the SSL libraries are dynamically loaded, implying they exist in\n- # the system path. Include these.\n- (getfullnameof('libeay32.dll'), os.path.join(*rel_data_path)),\n- (getfullnameof('ssleay32.dll'), os.path.join(*rel_data_path)),\n- ]\n+ binaries = []\n+ for dll in ('libeay32.dll', 'ssleay32.dll', 'libssl-1_1-x64.dll',\n+ 'libcrypto-1_1-x64.dllx'):\n+ dll_path = os.path.join(\n+ pyside2_library_info.location['BinariesPath'], dll)\n+ if os.path.exists(dll_path):\n+ binaries.append((dll_path, '.'))\n", "issue": "PyInstaller tries to load all project files as libraries and crashes with \"win32ctypes.pywin32.pywintypes.error: (2, 'LoadLibraryExW', 'The system cannot find the file specified.')\" when PySide2 is included\nPyInstaller 3.5 and later seems to try to load every single file in project subdirectories as a library and finally crashes with the exception mentioned in the title when PySide2 is included as a depencendy.\r\n\r\nI reproduced this with versions `3.5`, `4.0.dev0+3f57c9142`, and `4.0.dev0+g3f57c914` (current dev zip and current git head) and created [a minimal python project that exhibits this issue](https://github.com/Systemcluster/pyinstaller-crash).\r\n\r\nThis has previously been reported under #3916 with additional info posted in [this stackoverflow question](https://stackoverflow.com/questions/57932432/pyinstaller-win32ctypes-pywin32-pywintypes-error-2-loadlibraryexw-the-sys/58195896#58195896).\r\n\r\n<details>\r\n<summary>Here is a sample debug log output including traceback from one of the pyinstaller invokations.</summary>\r\n\r\n```cmd\r\n62 INFO: PyInstaller: 4.0.dev0+3f57c9142\r\n62 INFO: Python: 3.7.4\r\n73 INFO: Platform: Windows-10-10.0.18362-SP0\r\n...\r\nvarious similar outputs to the 4 following lines...\r\n...\r\n6530 DEBUG: Analyzing .venv\\Lib\\site-packages\\pip\\_vendor\\chardet\\__pycache__\\jpcntx.cpython-37.pyc\r\n6531 INFO: Cannot get manifest resource from non-PE file .venv\\Lib\\site-packages\\pip\\_vendor\\chardet\\__pycache__\\jpcntx.cpython-37.pyc\r\n6532 WARNING: Can not get binary dependencies for file: .venv\\Lib\\site-packages\\pip\\_vendor\\chardet\\__pycache__\\jpcntx.cpython-37.pyc\r\n6532 WARNING: Reason: 'DOS Header magic not found.'\r\n6532 DEBUG: Analyzing .git\\objects\\4f\\cbd50d0fa5061722eaf8c4aace8d7f77ad6784\r\nTraceback (most recent call last):\r\n File \"c:\\users\\chris\\development\\w3modmanager\\.venv\\lib\\site-packages\\win32ctypes\\pywin32\\pywintypes.py\", line 35, in pywin32error\r\n yield\r\n File \"c:\\users\\chris\\development\\w3modmanager\\.venv\\lib\\site-packages\\win32ctypes\\pywin32\\win32api.py\", line 43, in LoadLibraryEx\r\n return _dll._LoadLibraryEx(fileName, 0, flags)\r\n File \"c:\\users\\chris\\development\\w3modmanager\\.venv\\lib\\site-packages\\win32ctypes\\core\\ctypes\\_util.py\", line 42, in check_null\r\n raise make_error(function, function_name)\r\nOSError: [WinError 2] The system cannot find the file specified.\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"C:\\Users\\Chris\\AppData\\Local\\Programs\\Python\\Python37\\Lib\\runpy.py\", line 193, in _run_module_as_main\r\n \"__main__\", mod_spec)\r\n File \"C:\\Users\\Chris\\AppData\\Local\\Programs\\Python\\Python37\\Lib\\runpy.py\", line 85, in _run_code\r\n exec(code, run_globals)\r\n File \"C:\\Users\\Chris\\Development\\w3modmanager\\.venv\\Scripts\\pyinstaller.exe\\__main__.py\", line 9, in <module>\r\n\r\n File \"c:\\users\\chris\\development\\w3modmanager\\.venv\\lib\\site-packages\\PyInstaller\\__main__.py\", line 112, in run\r\n run_build(pyi_config, spec_file, **vars(args))\r\n File \"c:\\users\\chris\\development\\w3modmanager\\.venv\\lib\\site-packages\\PyInstaller\\__main__.py\", line 63, in run_build\r\n PyInstaller.building.build_main.main(pyi_config, spec_file, **kwargs)\r\n File \"c:\\users\\chris\\development\\w3modmanager\\.venv\\lib\\site-packages\\PyInstaller\\building\\build_main.py\", line 732, in main\r\n build(specfile, kw.get('distpath'), kw.get('workpath'), kw.get('clean_build'))\r\n File \"c:\\users\\chris\\development\\w3modmanager\\.venv\\lib\\site-packages\\PyInstaller\\building\\build_main.py\", line 679, in build\r\n exec(code, spec_namespace)\r\n File \".\\w3modmanager.spec\", line 25, in <module>\r\n noarchive=False,\r\n File \"c:\\users\\chris\\development\\w3modmanager\\.venv\\lib\\site-packages\\PyInstaller\\building\\build_main.py\", line 242, in __init__\r\n self.__postinit__()\r\n File \"c:\\users\\chris\\development\\w3modmanager\\.venv\\lib\\site-packages\\PyInstaller\\building\\datastruct.py\", line 158, in __postinit__\r\n self.assemble()\r\n File \"c:\\users\\chris\\development\\w3modmanager\\.venv\\lib\\site-packages\\PyInstaller\\building\\build_main.py\", line 468, in assemble\r\n redirects=self.binding_redirects))\r\n File \"c:\\users\\chris\\development\\w3modmanager\\.venv\\lib\\site-packages\\PyInstaller\\depend\\bindepend.py\", line 226, in Dependencies\r\n for ftocnm, fn in getAssemblyFiles(pth, manifest, redirects):\r\n File \"c:\\users\\chris\\development\\w3modmanager\\.venv\\lib\\site-packages\\PyInstaller\\depend\\bindepend.py\", line 402, in getAssemblyFiles\r\n for assembly in getAssemblies(pth):\r\n File \"c:\\users\\chris\\development\\w3modmanager\\.venv\\lib\\site-packages\\PyInstaller\\depend\\bindepend.py\", line 353, in getAssemblies\r\n res = GetManifestResources(pth)\r\n File \"c:\\users\\chris\\development\\w3modmanager\\.venv\\lib\\site-packages\\PyInstaller\\utils\\win32\\winmanifest.py\", line 1005, in GetManifestResources\r\n return winresource.GetResources(filename, [RT_MANIFEST], names, languages)\r\n File \"c:\\users\\chris\\development\\w3modmanager\\.venv\\lib\\site-packages\\PyInstaller\\utils\\win32\\winresource.py\", line 168, in GetResources\r\n hsrc = win32api.LoadLibraryEx(filename, 0, LOAD_LIBRARY_AS_DATAFILE)\r\n File \"c:\\users\\chris\\development\\w3modmanager\\.venv\\lib\\site-packages\\win32ctypes\\pywin32\\win32api.py\", line 43, in LoadLibraryEx\r\n return _dll._LoadLibraryEx(fileName, 0, flags)\r\n File \"C:\\Users\\Chris\\AppData\\Local\\Programs\\Python\\Python37\\Lib\\contextlib.py\", line 130, in __exit__\r\n self.gen.throw(type, value, traceback)\r\n File \"c:\\users\\chris\\development\\w3modmanager\\.venv\\lib\\site-packages\\win32ctypes\\pywin32\\pywintypes.py\", line 37, in pywin32error\r\n raise error(exception.winerror, exception.function, exception.strerror)\r\nwin32ctypes.pywin32.pywintypes.error: (2, 'LoadLibraryExW', 'The system cannot find the file specified.')\r\n\r\n```\r\n</details>\r\n\r\nBy modifying `win32api.py` and catching the exception inside `LoadLibraryEx` , I noticed a lot of similar stacktraces prior to the crash of type `OSError: [WinError 193] <no description>` related to various different files (e.g. `.json`, `.pyc`, etc.), which seemed to indicate that PyInstaller tries to load all the files inside the subdirectories as libraries. The file or type that generates `WinError 2`, which isn't caught, is not deterministic and changes each invocation.\r\n\r\nAdding some log output to `build_main.py` before `self.binaries.extend(bindepend.Dependencies(self.binaries, redirects=self.binding_redirects))` reveals that `self.binaries` contains every single non-`.py` file inside `.git` and `.venv`.\r\n\r\nLooking at the log output a bit more, I suspected it might be related to the `hook-PySide2.QtNetwork.py` hook. I was able to narrow it down to these lines:\r\n\r\n```python\r\nbinaries += [\r\n # Per http://doc.qt.io/qt-5/ssl.html#enabling-and-disabling-ssl-support,\r\n # the SSL libraries are dynamically loaded, implying they exist in\r\n # the system path. Include these.\r\n (getfullnameof('libeay32.dll'), os.path.join(*rel_data_path)),\r\n (getfullnameof('ssleay32.dll'), os.path.join(*rel_data_path)),\r\n]\r\n``` \r\n\r\nLogging the result of these calls, I am seeing this output, which I am assuming is not a valid search path resulting in this issue.\r\n\r\n```python\r\n[\r\n ('C:\\\\Program Files\\\\Common Files\\\\Intel\\\\WirelessCommon\\\\libeay32.dll', '.'), \r\n ('', '.')\r\n]\r\n```\r\n\r\nThese lines seem to have been copied over verbatim from the PyQt5 hook in [this commit](https://github.com/pyinstaller/pyinstaller/commit/5b5791285332003940f4bc70c32bb0cf10a32917) by @bjones1. Removing these lines fixes the issue, however I am not sure about the relevancy of these libraries regarding PySide2. \r\n\r\n[This commit](https://github.com/pyinstaller/pyinstaller/commit/47bd7f10de3b343270fd4d34b356f14617d5d3a1#diff-cfc3f87059223ae1b3d84abe9a90e8f0) changed the PyQt5.QtNetwork hook again, but the change didn't make it into the PySide2.QtNetwork hook. Porting this change over fixes the build as well.\r\n\r\nAs it stands, PySide2 support is broken in the latest version. It might be a very simple fix as described above, but I don't know enough about either the PySide2 requirements nor PyInstaller to verify.\r\n\n", "before_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2013-2019, PyInstaller Development Team.\n#\n# Distributed under the terms of the GNU General Public License with exception\n# for distributing bootloader.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\nimport os.path\n\nfrom PyInstaller.utils.hooks import eval_statement\nfrom PyInstaller.utils.hooks.qt import add_qt5_dependencies, \\\n pyside2_library_info\nfrom PyInstaller.compat import is_win\nfrom PyInstaller.depend.bindepend import getfullnameof\n\n# Only proceed if PySide2 can be imported.\nif pyside2_library_info.version:\n hiddenimports, binaries, datas = add_qt5_dependencies(__file__)\n\n # Add libraries needed for SSL if these are available. See issue #3520, #4048.\n if (is_win and eval_statement(\"\"\"\n from PySide2.QtNetwork import QSslSocket\n print(QSslSocket.supportsSsl())\"\"\")):\n\n rel_data_path = ['.']\n binaries += [\n # Per http://doc.qt.io/qt-5/ssl.html#enabling-and-disabling-ssl-support,\n # the SSL libraries are dynamically loaded, implying they exist in\n # the system path. Include these.\n (getfullnameof('libeay32.dll'), os.path.join(*rel_data_path)),\n (getfullnameof('ssleay32.dll'), os.path.join(*rel_data_path)),\n ]\n", "path": "PyInstaller/hooks/hook-PySide2.QtNetwork.py"}], "after_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2013-2019, PyInstaller Development Team.\n#\n# Distributed under the terms of the GNU General Public License with exception\n# for distributing bootloader.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\nimport os.path\n\nfrom PyInstaller.utils.hooks import eval_statement\nfrom PyInstaller.utils.hooks.qt import add_qt5_dependencies, \\\n pyside2_library_info\nfrom PyInstaller.compat import is_win\n\n# Only proceed if PySide2 can be imported.\nif pyside2_library_info.version:\n hiddenimports, binaries, datas = add_qt5_dependencies(__file__)\n\n # Add libraries needed for SSL if these are available. See issue #3520, #4048.\n if (is_win and eval_statement(\"\"\"\n from PySide2.QtNetwork import QSslSocket\n print(QSslSocket.supportsSsl())\"\"\")):\n\n binaries = []\n for dll in ('libeay32.dll', 'ssleay32.dll', 'libssl-1_1-x64.dll',\n 'libcrypto-1_1-x64.dllx'):\n dll_path = os.path.join(\n pyside2_library_info.location['BinariesPath'], dll)\n if os.path.exists(dll_path):\n binaries.append((dll_path, '.'))\n", "path": "PyInstaller/hooks/hook-PySide2.QtNetwork.py"}]}
| 3,143 | 384 |
gh_patches_debug_37780
|
rasdani/github-patches
|
git_diff
|
apluslms__a-plus-651
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
In A+ it should be possible to set student's default language in a specific course
> **Migrated from Trello**
- [x] Add language to the userprofile - #617
- [x] When user is viewing material with different language, make it clear that it's not their default, e.g., show a warning. - #651
Additionally, buttons (links) should be shown to set that language as their preferred language (to enrollment object) and to switch back to their preferred language.
This is more about the language set via url (query parameter), so that when the user is directed to a material in a different language than in their enrollment object (or profile), then that would be pointed out to them.
- [x] Store language in the Enrollment object. Think about the case where system has 3 languages sv, en, and fi, user has set the language in profile to sv and course is provided in en and fi. In that case, A+ should be able to remember the language the user likes to read that course and not force to set profile language. - #632
----
**Comments**
>**Jaakko Kantojärvi:**
At least for a profile, so when student logs in, the language is returned to original one.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `course/viewbase.py`
Content:
```
1 from django.contrib import messages
2 from django.core.exceptions import PermissionDenied
3 from django.http import Http404
4 from django.shortcuts import get_object_or_404, render_to_response
5 from django.utils import translation
6 from django.utils.translation import ugettext_lazy as _
7 from django.utils.translation import get_language, get_language_info
8
9 from authorization.permissions import ACCESS
10 from exercise.cache.content import CachedContent
11 from lib.helpers import remove_query_param_from_url, update_url_params
12 from lib.viewbase import BaseTemplateView
13 from userprofile.viewbase import UserProfileMixin
14 from .cache.students import CachedStudent
15 from .exceptions import TranslationNotFound
16 from .permissions import (
17 CourseVisiblePermission,
18 CourseModulePermission,
19 )
20 from .models import Course, CourseInstance, CourseModule, UserTagging
21
22
23 class CourseMixin(UserProfileMixin):
24 course_kw = "course_slug"
25
26 def get_resource_objects(self):
27 super().get_resource_objects()
28 self.course = get_object_or_404(
29 Course,
30 url=self._get_kwarg(self.course_kw)
31 )
32 self.is_teacher = self.course.is_teacher(self.request.user)
33 self.note("course", "is_teacher")
34
35
36 class CourseBaseView(CourseMixin, BaseTemplateView):
37 pass
38
39
40 class CourseInstanceBaseMixin(object):
41 course_kw = CourseMixin.course_kw
42 instance_kw = "instance_slug"
43 course_permission_classes = (
44 CourseVisiblePermission,
45 )
46
47 def get_permissions(self):
48 perms = super().get_permissions()
49 perms.extend((Perm() for Perm in self.course_permission_classes))
50 return perms
51
52 # get_course_instance_object
53
54 def get_resource_objects(self):
55 super().get_resource_objects()
56 user = self.request.user
57 instance = self.get_course_instance_object()
58 if instance is not None:
59 self.instance = instance
60 self.course = self.instance.course
61 self.content = CachedContent(self.instance)
62 self.user_course_data = None
63 is_real_user = user.is_authenticated and not user.is_anonymous
64 if is_real_user:
65 self.user_course_data = self.instance.get_enrollment_for(user)
66 self.is_student = self.instance.is_student(user)
67 self.is_assistant = self.instance.is_assistant(user)
68 self.is_teacher = self.course.is_teacher(user)
69 self.is_course_staff = self.is_teacher or self.is_assistant
70 self.get_taggings = lambda: CachedStudent(instance, user.id).data['tag_slugs']
71
72 self.note(
73 "course", "instance", "content", "user_course_data", "is_student", "is_assistant",
74 "is_teacher", "is_course_staff", "get_taggings",
75 )
76
77 # Try to find a language that is defined for this course instance
78 # and apply it
79 if self.instance.language:
80 instance_languages = self.instance.language.strip('|').split('|')
81 instance_def_language = instance_languages[0]
82 instance_languages = set(instance_languages)
83
84 query_language = self.request.GET.get('hl')
85 if query_language:
86 if query_language[:2] in instance_languages:
87 language = query_language
88 else:
89 raise TranslationNotFound
90 else:
91 languages = []
92 if self.user_course_data and self.user_course_data.language:
93 languages.append(self.user_course_data.language)
94 if is_real_user and user.userprofile.language:
95 languages.append(user.userprofile.language)
96 languages.append(get_language())
97
98 for lang in languages:
99 if lang[:2] in instance_languages:
100 language = lang
101 break
102 else:
103 language = instance_def_language
104
105 translation.activate(language)
106
107 def get_access_mode(self):
108 access_mode = super().get_access_mode()
109
110 if hasattr(self, 'instance'):
111 # Loosen the access mode if instance is public
112 show_for = self.instance.view_content_to
113 is_public = show_for == CourseInstance.VIEW_ACCESS.PUBLIC
114 access_mode_student = access_mode in (ACCESS.STUDENT, ACCESS.ENROLL)
115 if is_public and access_mode_student:
116 access_mode = ACCESS.ANONYMOUS
117
118 return access_mode
119
120
121 class CourseInstanceMixin(CourseInstanceBaseMixin, UserProfileMixin):
122 def get_course_instance_object(self):
123 return get_object_or_404(
124 CourseInstance,
125 url=self.kwargs[self.instance_kw],
126 course__url=self.kwargs[self.course_kw],
127 )
128
129 def handle_exception(self, exc):
130 if isinstance(exc, TranslationNotFound):
131 instance_languages = self.instance.language.strip("|").split("|")
132 url = remove_query_param_from_url(self.request.get_full_path(), 'hl')
133 for i, lang in enumerate(instance_languages):
134 instance_languages[i] = {"name": get_language_info(lang)['name'], "url": update_url_params(url, {'hl' : lang})}
135 return render_to_response('404.html', {'error_msg': str(exc), 'languages': instance_languages}, status=404)
136 return super().handle_exception(exc)
137
138
139 class CourseInstanceBaseView(CourseInstanceMixin, BaseTemplateView):
140 pass
141
142
143 class EnrollableViewMixin(CourseInstanceMixin):
144 access_mode = ACCESS.ENROLL
145
146 def get_common_objects(self):
147 self.enrolled = self.is_student
148 self.enrollable = (
149 self.profile
150 and self.instance.is_enrollable(self.profile.user)
151 )
152 self.note('enrolled', 'enrollable')
153
154
155 class CourseModuleBaseMixin(object):
156 module_kw = "module_slug"
157 module_permissions_classes = (
158 CourseModulePermission,
159 )
160
161 def get_permissions(self):
162 perms = super().get_permissions()
163 perms.extend((Perm() for Perm in self.module_permissions_classes))
164 return perms
165
166 # get_course_module_object
167
168 def get_resource_objects(self):
169 super().get_resource_objects()
170 self.module = self.get_course_module_object()
171 self.note("module")
172
173
174 class CourseModuleMixin(CourseModuleBaseMixin, CourseInstanceMixin):
175 def get_course_module_object(self):
176 return get_object_or_404(
177 CourseModule,
178 url=self.kwargs[self.module_kw],
179 course_instance=self.instance
180 )
181
182
183 class CourseModuleBaseView(CourseModuleMixin, BaseTemplateView):
184 pass
185
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/course/viewbase.py b/course/viewbase.py
--- a/course/viewbase.py
+++ b/course/viewbase.py
@@ -68,10 +68,14 @@
self.is_teacher = self.course.is_teacher(user)
self.is_course_staff = self.is_teacher or self.is_assistant
self.get_taggings = lambda: CachedStudent(instance, user.id).data['tag_slugs']
+ self.url_without_language = remove_query_param_from_url(self.request.get_full_path(), 'hl')
+ self.query_language = None
+ self.user_language = None
self.note(
"course", "instance", "content", "user_course_data", "is_student", "is_assistant",
- "is_teacher", "is_course_staff", "get_taggings",
+ "is_teacher", "is_course_staff", "get_taggings", "url_without_language",
+ "query_language", "user_language"
)
# Try to find a language that is defined for this course instance
@@ -81,20 +85,24 @@
instance_def_language = instance_languages[0]
instance_languages = set(instance_languages)
+ languages = []
+ if self.user_course_data and self.user_course_data.language:
+ languages.append(self.user_course_data.language)
+ if is_real_user and user.userprofile.language:
+ languages.append(user.userprofile.language)
+ languages.append(get_language())
+
query_language = self.request.GET.get('hl')
if query_language:
if query_language[:2] in instance_languages:
language = query_language
+ if languages:
+ self.user_language = languages[0]
+ if self.user_language[:2] != query_language[:2]:
+ self.query_language = query_language
else:
raise TranslationNotFound
else:
- languages = []
- if self.user_course_data and self.user_course_data.language:
- languages.append(self.user_course_data.language)
- if is_real_user and user.userprofile.language:
- languages.append(user.userprofile.language)
- languages.append(get_language())
-
for lang in languages:
if lang[:2] in instance_languages:
language = lang
|
{"golden_diff": "diff --git a/course/viewbase.py b/course/viewbase.py\n--- a/course/viewbase.py\n+++ b/course/viewbase.py\n@@ -68,10 +68,14 @@\n self.is_teacher = self.course.is_teacher(user)\n self.is_course_staff = self.is_teacher or self.is_assistant\n self.get_taggings = lambda: CachedStudent(instance, user.id).data['tag_slugs']\n+ self.url_without_language = remove_query_param_from_url(self.request.get_full_path(), 'hl')\n+ self.query_language = None\n+ self.user_language = None\n \n self.note(\n \"course\", \"instance\", \"content\", \"user_course_data\", \"is_student\", \"is_assistant\",\n- \"is_teacher\", \"is_course_staff\", \"get_taggings\",\n+ \"is_teacher\", \"is_course_staff\", \"get_taggings\", \"url_without_language\",\n+ \"query_language\", \"user_language\"\n )\n \n # Try to find a language that is defined for this course instance\n@@ -81,20 +85,24 @@\n instance_def_language = instance_languages[0]\n instance_languages = set(instance_languages)\n \n+ languages = []\n+ if self.user_course_data and self.user_course_data.language:\n+ languages.append(self.user_course_data.language)\n+ if is_real_user and user.userprofile.language:\n+ languages.append(user.userprofile.language)\n+ languages.append(get_language())\n+\n query_language = self.request.GET.get('hl')\n if query_language:\n if query_language[:2] in instance_languages:\n language = query_language\n+ if languages:\n+ self.user_language = languages[0]\n+ if self.user_language[:2] != query_language[:2]:\n+ self.query_language = query_language\n else:\n raise TranslationNotFound\n else:\n- languages = []\n- if self.user_course_data and self.user_course_data.language:\n- languages.append(self.user_course_data.language)\n- if is_real_user and user.userprofile.language:\n- languages.append(user.userprofile.language)\n- languages.append(get_language())\n-\n for lang in languages:\n if lang[:2] in instance_languages:\n language = lang\n", "issue": "In A+ it should be possible to set student's default language in a specific course\n> **Migrated from Trello**\r\n\r\n- [x] Add language to the userprofile - #617\r\n- [x] When user is viewing material with different language, make it clear that it's not their default, e.g., show a warning. - #651 \r\n\r\n Additionally, buttons (links) should be shown to set that language as their preferred language (to enrollment object) and to switch back to their preferred language.\r\n\r\n This is more about the language set via url (query parameter), so that when the user is directed to a material in a different language than in their enrollment object (or profile), then that would be pointed out to them.\r\n\r\n- [x] Store language in the Enrollment object. Think about the case where system has 3 languages sv, en, and fi, user has set the language in profile to sv and course is provided in en and fi. In that case, A+ should be able to remember the language the user likes to read that course and not force to set profile language. - #632\r\n\r\n----\r\n**Comments**\r\n\r\n>**Jaakko Kantoj\u00e4rvi:**\r\nAt least for a profile, so when student logs in, the language is returned to original one.\r\n\r\n\n", "before_files": [{"content": "from django.contrib import messages\nfrom django.core.exceptions import PermissionDenied\nfrom django.http import Http404\nfrom django.shortcuts import get_object_or_404, render_to_response\nfrom django.utils import translation\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.utils.translation import get_language, get_language_info\n\nfrom authorization.permissions import ACCESS\nfrom exercise.cache.content import CachedContent\nfrom lib.helpers import remove_query_param_from_url, update_url_params\nfrom lib.viewbase import BaseTemplateView\nfrom userprofile.viewbase import UserProfileMixin\nfrom .cache.students import CachedStudent\nfrom .exceptions import TranslationNotFound\nfrom .permissions import (\n CourseVisiblePermission,\n CourseModulePermission,\n)\nfrom .models import Course, CourseInstance, CourseModule, UserTagging\n\n\nclass CourseMixin(UserProfileMixin):\n course_kw = \"course_slug\"\n\n def get_resource_objects(self):\n super().get_resource_objects()\n self.course = get_object_or_404(\n Course,\n url=self._get_kwarg(self.course_kw)\n )\n self.is_teacher = self.course.is_teacher(self.request.user)\n self.note(\"course\", \"is_teacher\")\n\n\nclass CourseBaseView(CourseMixin, BaseTemplateView):\n pass\n\n\nclass CourseInstanceBaseMixin(object):\n course_kw = CourseMixin.course_kw\n instance_kw = \"instance_slug\"\n course_permission_classes = (\n CourseVisiblePermission,\n )\n\n def get_permissions(self):\n perms = super().get_permissions()\n perms.extend((Perm() for Perm in self.course_permission_classes))\n return perms\n\n # get_course_instance_object\n\n def get_resource_objects(self):\n super().get_resource_objects()\n user = self.request.user\n instance = self.get_course_instance_object()\n if instance is not None:\n self.instance = instance\n self.course = self.instance.course\n self.content = CachedContent(self.instance)\n self.user_course_data = None\n is_real_user = user.is_authenticated and not user.is_anonymous\n if is_real_user:\n self.user_course_data = self.instance.get_enrollment_for(user)\n self.is_student = self.instance.is_student(user)\n self.is_assistant = self.instance.is_assistant(user)\n self.is_teacher = self.course.is_teacher(user)\n self.is_course_staff = self.is_teacher or self.is_assistant\n self.get_taggings = lambda: CachedStudent(instance, user.id).data['tag_slugs']\n\n self.note(\n \"course\", \"instance\", \"content\", \"user_course_data\", \"is_student\", \"is_assistant\",\n \"is_teacher\", \"is_course_staff\", \"get_taggings\",\n )\n\n # Try to find a language that is defined for this course instance\n # and apply it\n if self.instance.language:\n instance_languages = self.instance.language.strip('|').split('|')\n instance_def_language = instance_languages[0]\n instance_languages = set(instance_languages)\n\n query_language = self.request.GET.get('hl')\n if query_language:\n if query_language[:2] in instance_languages:\n language = query_language\n else:\n raise TranslationNotFound\n else:\n languages = []\n if self.user_course_data and self.user_course_data.language:\n languages.append(self.user_course_data.language)\n if is_real_user and user.userprofile.language:\n languages.append(user.userprofile.language)\n languages.append(get_language())\n\n for lang in languages:\n if lang[:2] in instance_languages:\n language = lang\n break\n else:\n language = instance_def_language\n\n translation.activate(language)\n\n def get_access_mode(self):\n access_mode = super().get_access_mode()\n\n if hasattr(self, 'instance'):\n # Loosen the access mode if instance is public\n show_for = self.instance.view_content_to\n is_public = show_for == CourseInstance.VIEW_ACCESS.PUBLIC\n access_mode_student = access_mode in (ACCESS.STUDENT, ACCESS.ENROLL)\n if is_public and access_mode_student:\n access_mode = ACCESS.ANONYMOUS\n\n return access_mode\n\n\nclass CourseInstanceMixin(CourseInstanceBaseMixin, UserProfileMixin):\n def get_course_instance_object(self):\n return get_object_or_404(\n CourseInstance,\n url=self.kwargs[self.instance_kw],\n course__url=self.kwargs[self.course_kw],\n )\n\n def handle_exception(self, exc):\n if isinstance(exc, TranslationNotFound):\n instance_languages = self.instance.language.strip(\"|\").split(\"|\")\n url = remove_query_param_from_url(self.request.get_full_path(), 'hl')\n for i, lang in enumerate(instance_languages):\n instance_languages[i] = {\"name\": get_language_info(lang)['name'], \"url\": update_url_params(url, {'hl' : lang})}\n return render_to_response('404.html', {'error_msg': str(exc), 'languages': instance_languages}, status=404)\n return super().handle_exception(exc)\n\n\nclass CourseInstanceBaseView(CourseInstanceMixin, BaseTemplateView):\n pass\n\n\nclass EnrollableViewMixin(CourseInstanceMixin):\n access_mode = ACCESS.ENROLL\n\n def get_common_objects(self):\n self.enrolled = self.is_student\n self.enrollable = (\n self.profile\n and self.instance.is_enrollable(self.profile.user)\n )\n self.note('enrolled', 'enrollable')\n\n\nclass CourseModuleBaseMixin(object):\n module_kw = \"module_slug\"\n module_permissions_classes = (\n CourseModulePermission,\n )\n\n def get_permissions(self):\n perms = super().get_permissions()\n perms.extend((Perm() for Perm in self.module_permissions_classes))\n return perms\n\n # get_course_module_object\n\n def get_resource_objects(self):\n super().get_resource_objects()\n self.module = self.get_course_module_object()\n self.note(\"module\")\n\n\nclass CourseModuleMixin(CourseModuleBaseMixin, CourseInstanceMixin):\n def get_course_module_object(self):\n return get_object_or_404(\n CourseModule,\n url=self.kwargs[self.module_kw],\n course_instance=self.instance\n )\n\n\nclass CourseModuleBaseView(CourseModuleMixin, BaseTemplateView):\n pass\n", "path": "course/viewbase.py"}], "after_files": [{"content": "from django.contrib import messages\nfrom django.core.exceptions import PermissionDenied\nfrom django.http import Http404\nfrom django.shortcuts import get_object_or_404, render_to_response\nfrom django.utils import translation\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.utils.translation import get_language, get_language_info\n\nfrom authorization.permissions import ACCESS\nfrom exercise.cache.content import CachedContent\nfrom lib.helpers import remove_query_param_from_url, update_url_params\nfrom lib.viewbase import BaseTemplateView\nfrom userprofile.viewbase import UserProfileMixin\nfrom .cache.students import CachedStudent\nfrom .exceptions import TranslationNotFound\nfrom .permissions import (\n CourseVisiblePermission,\n CourseModulePermission,\n)\nfrom .models import Course, CourseInstance, CourseModule, UserTagging\n\n\nclass CourseMixin(UserProfileMixin):\n course_kw = \"course_slug\"\n\n def get_resource_objects(self):\n super().get_resource_objects()\n self.course = get_object_or_404(\n Course,\n url=self._get_kwarg(self.course_kw)\n )\n self.is_teacher = self.course.is_teacher(self.request.user)\n self.note(\"course\", \"is_teacher\")\n\n\nclass CourseBaseView(CourseMixin, BaseTemplateView):\n pass\n\n\nclass CourseInstanceBaseMixin(object):\n course_kw = CourseMixin.course_kw\n instance_kw = \"instance_slug\"\n course_permission_classes = (\n CourseVisiblePermission,\n )\n\n def get_permissions(self):\n perms = super().get_permissions()\n perms.extend((Perm() for Perm in self.course_permission_classes))\n return perms\n\n # get_course_instance_object\n\n def get_resource_objects(self):\n super().get_resource_objects()\n user = self.request.user\n instance = self.get_course_instance_object()\n if instance is not None:\n self.instance = instance\n self.course = self.instance.course\n self.content = CachedContent(self.instance)\n self.user_course_data = None\n is_real_user = user.is_authenticated and not user.is_anonymous\n if is_real_user:\n self.user_course_data = self.instance.get_enrollment_for(user)\n self.is_student = self.instance.is_student(user)\n self.is_assistant = self.instance.is_assistant(user)\n self.is_teacher = self.course.is_teacher(user)\n self.is_course_staff = self.is_teacher or self.is_assistant\n self.get_taggings = lambda: CachedStudent(instance, user.id).data['tag_slugs']\n self.url_without_language = remove_query_param_from_url(self.request.get_full_path(), 'hl')\n self.query_language = None\n self.user_language = None\n\n self.note(\n \"course\", \"instance\", \"content\", \"user_course_data\", \"is_student\", \"is_assistant\",\n \"is_teacher\", \"is_course_staff\", \"get_taggings\", \"url_without_language\",\n \"query_language\", \"user_language\"\n )\n\n # Try to find a language that is defined for this course instance\n # and apply it\n if self.instance.language:\n instance_languages = self.instance.language.strip('|').split('|')\n instance_def_language = instance_languages[0]\n instance_languages = set(instance_languages)\n\n languages = []\n if self.user_course_data and self.user_course_data.language:\n languages.append(self.user_course_data.language)\n if is_real_user and user.userprofile.language:\n languages.append(user.userprofile.language)\n languages.append(get_language())\n\n query_language = self.request.GET.get('hl')\n if query_language:\n if query_language[:2] in instance_languages:\n language = query_language\n if languages:\n self.user_language = languages[0]\n if self.user_language[:2] != query_language[:2]:\n self.query_language = query_language\n else:\n raise TranslationNotFound\n else:\n for lang in languages:\n if lang[:2] in instance_languages:\n language = lang\n break\n else:\n language = instance_def_language\n\n translation.activate(language)\n\n def get_access_mode(self):\n access_mode = super().get_access_mode()\n\n if hasattr(self, 'instance'):\n # Loosen the access mode if instance is public\n show_for = self.instance.view_content_to\n is_public = show_for == CourseInstance.VIEW_ACCESS.PUBLIC\n access_mode_student = access_mode in (ACCESS.STUDENT, ACCESS.ENROLL)\n if is_public and access_mode_student:\n access_mode = ACCESS.ANONYMOUS\n\n return access_mode\n\n\nclass CourseInstanceMixin(CourseInstanceBaseMixin, UserProfileMixin):\n def get_course_instance_object(self):\n return get_object_or_404(\n CourseInstance,\n url=self.kwargs[self.instance_kw],\n course__url=self.kwargs[self.course_kw],\n )\n\n def handle_exception(self, exc):\n if isinstance(exc, TranslationNotFound):\n instance_languages = self.instance.language.strip(\"|\").split(\"|\")\n url = remove_query_param_from_url(self.request.get_full_path(), 'hl')\n for i, lang in enumerate(instance_languages):\n instance_languages[i] = {\"name\": get_language_info(lang)['name'], \"url\": update_url_params(url, {'hl' : lang})}\n return render_to_response('404.html', {'error_msg': str(exc), 'languages': instance_languages}, status=404)\n return super().handle_exception(exc)\n\n\nclass CourseInstanceBaseView(CourseInstanceMixin, BaseTemplateView):\n pass\n\n\nclass EnrollableViewMixin(CourseInstanceMixin):\n access_mode = ACCESS.ENROLL\n\n def get_common_objects(self):\n self.enrolled = self.is_student\n self.enrollable = (\n self.profile\n and self.instance.is_enrollable(self.profile.user)\n )\n self.note('enrolled', 'enrollable')\n\n\nclass CourseModuleBaseMixin(object):\n module_kw = \"module_slug\"\n module_permissions_classes = (\n CourseModulePermission,\n )\n\n def get_permissions(self):\n perms = super().get_permissions()\n perms.extend((Perm() for Perm in self.module_permissions_classes))\n return perms\n\n # get_course_module_object\n\n def get_resource_objects(self):\n super().get_resource_objects()\n self.module = self.get_course_module_object()\n self.note(\"module\")\n\n\nclass CourseModuleMixin(CourseModuleBaseMixin, CourseInstanceMixin):\n def get_course_module_object(self):\n return get_object_or_404(\n CourseModule,\n url=self.kwargs[self.module_kw],\n course_instance=self.instance\n )\n\n\nclass CourseModuleBaseView(CourseModuleMixin, BaseTemplateView):\n pass\n", "path": "course/viewbase.py"}]}
| 2,294 | 472 |
gh_patches_debug_14097
|
rasdani/github-patches
|
git_diff
|
pretix__pretix-1443
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Required file question breaks validation on edit
Editing an order with a product with a required file question breaks when the customer tries to edit the order. In that case even if the customer already provided a file earlier the validation for the question fails on edit.
Required file question breaks validation on edit
Editing an order with a product with a required file question breaks when the customer tries to edit the order. In that case even if the customer already provided a file earlier the validation for the question fails on edit.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/pretix/base/forms/widgets.py`
Content:
```
1 import os
2
3 from django import forms
4 from django.utils.formats import get_format
5 from django.utils.functional import lazy
6 from django.utils.timezone import now
7 from django.utils.translation import ugettext_lazy as _
8
9 from pretix.base.models import OrderPosition
10 from pretix.multidomain.urlreverse import eventreverse
11
12
13 class DatePickerWidget(forms.DateInput):
14 def __init__(self, attrs=None, date_format=None):
15 attrs = attrs or {}
16 if 'placeholder' in attrs:
17 del attrs['placeholder']
18 date_attrs = dict(attrs)
19 date_attrs.setdefault('class', 'form-control')
20 date_attrs['class'] += ' datepickerfield'
21
22 df = date_format or get_format('DATE_INPUT_FORMATS')[0]
23 date_attrs['placeholder'] = now().replace(
24 year=2000, month=12, day=31, hour=18, minute=0, second=0, microsecond=0
25 ).strftime(df)
26
27 forms.DateInput.__init__(self, date_attrs, date_format)
28
29
30 class TimePickerWidget(forms.TimeInput):
31 def __init__(self, attrs=None, time_format=None):
32 attrs = attrs or {}
33 if 'placeholder' in attrs:
34 del attrs['placeholder']
35 time_attrs = dict(attrs)
36 time_attrs.setdefault('class', 'form-control')
37 time_attrs['class'] += ' timepickerfield'
38
39 tf = time_format or get_format('TIME_INPUT_FORMATS')[0]
40 time_attrs['placeholder'] = now().replace(
41 year=2000, month=12, day=31, hour=18, minute=0, second=0, microsecond=0
42 ).strftime(tf)
43
44 forms.TimeInput.__init__(self, time_attrs, time_format)
45
46
47 class UploadedFileWidget(forms.ClearableFileInput):
48 def __init__(self, *args, **kwargs):
49 self.position = kwargs.pop('position')
50 self.event = kwargs.pop('event')
51 self.answer = kwargs.pop('answer')
52 super().__init__(*args, **kwargs)
53
54 class FakeFile:
55 def __init__(self, file, position, event, answer):
56 self.file = file
57 self.position = position
58 self.event = event
59 self.answer = answer
60
61 def __str__(self):
62 return os.path.basename(self.file.name).split('.', 1)[-1]
63
64 @property
65 def url(self):
66 if isinstance(self.position, OrderPosition):
67 return eventreverse(self.event, 'presale:event.order.download.answer', kwargs={
68 'order': self.position.order.code,
69 'secret': self.position.order.secret,
70 'answer': self.answer.pk,
71 })
72 else:
73 return eventreverse(self.event, 'presale:event.cart.download.answer', kwargs={
74 'answer': self.answer.pk,
75 })
76
77 def format_value(self, value):
78 if self.is_initial(value):
79 return self.FakeFile(value, self.position, self.event, self.answer)
80
81
82 class SplitDateTimePickerWidget(forms.SplitDateTimeWidget):
83 template_name = 'pretixbase/forms/widgets/splitdatetime.html'
84
85 def __init__(self, attrs=None, date_format=None, time_format=None):
86 attrs = attrs or {}
87 if 'placeholder' in attrs:
88 del attrs['placeholder']
89 date_attrs = dict(attrs)
90 time_attrs = dict(attrs)
91 date_attrs.setdefault('class', 'form-control splitdatetimepart')
92 time_attrs.setdefault('class', 'form-control splitdatetimepart')
93 date_attrs.setdefault('autocomplete', 'off')
94 time_attrs.setdefault('autocomplete', 'off')
95 date_attrs['class'] += ' datepickerfield'
96 time_attrs['class'] += ' timepickerfield'
97
98 def date_placeholder():
99 df = date_format or get_format('DATE_INPUT_FORMATS')[0]
100 return now().replace(
101 year=2000, month=12, day=31, hour=18, minute=0, second=0, microsecond=0
102 ).strftime(df)
103
104 def time_placeholder():
105 tf = time_format or get_format('TIME_INPUT_FORMATS')[0]
106 return now().replace(
107 year=2000, month=1, day=1, hour=0, minute=0, second=0, microsecond=0
108 ).strftime(tf)
109
110 date_attrs['placeholder'] = lazy(date_placeholder, str)
111 time_attrs['placeholder'] = lazy(time_placeholder, str)
112
113 widgets = (
114 forms.DateInput(attrs=date_attrs, format=date_format),
115 forms.TimeInput(attrs=time_attrs, format=time_format),
116 )
117 # Skip one hierarchy level
118 forms.MultiWidget.__init__(self, widgets, attrs)
119
120
121 class BusinessBooleanRadio(forms.RadioSelect):
122 def __init__(self, require_business=False, attrs=None):
123 self.require_business = require_business
124 if self.require_business:
125 choices = (
126 ('business', _('Business customer')),
127 )
128 else:
129 choices = (
130 ('individual', _('Individual customer')),
131 ('business', _('Business customer')),
132 )
133 super().__init__(attrs, choices)
134
135 def format_value(self, value):
136 if self.require_business:
137 return 'business'
138 try:
139 return {True: 'business', False: 'individual'}[value]
140 except KeyError:
141 return 'individual'
142
143 def value_from_datadict(self, data, files, name):
144 value = data.get(name)
145 if self.require_business:
146 return True
147 return {
148 'business': True,
149 True: True,
150 'True': True,
151 'individual': False,
152 'False': False,
153 False: False,
154 }.get(value)
155
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/pretix/base/forms/widgets.py b/src/pretix/base/forms/widgets.py
--- a/src/pretix/base/forms/widgets.py
+++ b/src/pretix/base/forms/widgets.py
@@ -46,6 +46,14 @@
class UploadedFileWidget(forms.ClearableFileInput):
def __init__(self, *args, **kwargs):
+ # Browsers can't recognize that the server already has a file uploaded
+ # Don't mark this input as being required if we already have an answer
+ # (this needs to be done via the attrs, otherwise we wouldn't get the "required" star on the field label)
+ attrs = kwargs.get('attrs', {})
+ if kwargs.get('required') and kwargs.get('initial'):
+ attrs.update({'required': None})
+ kwargs.update({'attrs': attrs})
+
self.position = kwargs.pop('position')
self.event = kwargs.pop('event')
self.answer = kwargs.pop('answer')
|
{"golden_diff": "diff --git a/src/pretix/base/forms/widgets.py b/src/pretix/base/forms/widgets.py\n--- a/src/pretix/base/forms/widgets.py\n+++ b/src/pretix/base/forms/widgets.py\n@@ -46,6 +46,14 @@\n \n class UploadedFileWidget(forms.ClearableFileInput):\n def __init__(self, *args, **kwargs):\n+ # Browsers can't recognize that the server already has a file uploaded\n+ # Don't mark this input as being required if we already have an answer\n+ # (this needs to be done via the attrs, otherwise we wouldn't get the \"required\" star on the field label)\n+ attrs = kwargs.get('attrs', {})\n+ if kwargs.get('required') and kwargs.get('initial'):\n+ attrs.update({'required': None})\n+ kwargs.update({'attrs': attrs})\n+\n self.position = kwargs.pop('position')\n self.event = kwargs.pop('event')\n self.answer = kwargs.pop('answer')\n", "issue": "Required file question breaks validation on edit\nEditing an order with a product with a required file question breaks when the customer tries to edit the order. In that case even if the customer already provided a file earlier the validation for the question fails on edit.\nRequired file question breaks validation on edit\nEditing an order with a product with a required file question breaks when the customer tries to edit the order. In that case even if the customer already provided a file earlier the validation for the question fails on edit.\n", "before_files": [{"content": "import os\n\nfrom django import forms\nfrom django.utils.formats import get_format\nfrom django.utils.functional import lazy\nfrom django.utils.timezone import now\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom pretix.base.models import OrderPosition\nfrom pretix.multidomain.urlreverse import eventreverse\n\n\nclass DatePickerWidget(forms.DateInput):\n def __init__(self, attrs=None, date_format=None):\n attrs = attrs or {}\n if 'placeholder' in attrs:\n del attrs['placeholder']\n date_attrs = dict(attrs)\n date_attrs.setdefault('class', 'form-control')\n date_attrs['class'] += ' datepickerfield'\n\n df = date_format or get_format('DATE_INPUT_FORMATS')[0]\n date_attrs['placeholder'] = now().replace(\n year=2000, month=12, day=31, hour=18, minute=0, second=0, microsecond=0\n ).strftime(df)\n\n forms.DateInput.__init__(self, date_attrs, date_format)\n\n\nclass TimePickerWidget(forms.TimeInput):\n def __init__(self, attrs=None, time_format=None):\n attrs = attrs or {}\n if 'placeholder' in attrs:\n del attrs['placeholder']\n time_attrs = dict(attrs)\n time_attrs.setdefault('class', 'form-control')\n time_attrs['class'] += ' timepickerfield'\n\n tf = time_format or get_format('TIME_INPUT_FORMATS')[0]\n time_attrs['placeholder'] = now().replace(\n year=2000, month=12, day=31, hour=18, minute=0, second=0, microsecond=0\n ).strftime(tf)\n\n forms.TimeInput.__init__(self, time_attrs, time_format)\n\n\nclass UploadedFileWidget(forms.ClearableFileInput):\n def __init__(self, *args, **kwargs):\n self.position = kwargs.pop('position')\n self.event = kwargs.pop('event')\n self.answer = kwargs.pop('answer')\n super().__init__(*args, **kwargs)\n\n class FakeFile:\n def __init__(self, file, position, event, answer):\n self.file = file\n self.position = position\n self.event = event\n self.answer = answer\n\n def __str__(self):\n return os.path.basename(self.file.name).split('.', 1)[-1]\n\n @property\n def url(self):\n if isinstance(self.position, OrderPosition):\n return eventreverse(self.event, 'presale:event.order.download.answer', kwargs={\n 'order': self.position.order.code,\n 'secret': self.position.order.secret,\n 'answer': self.answer.pk,\n })\n else:\n return eventreverse(self.event, 'presale:event.cart.download.answer', kwargs={\n 'answer': self.answer.pk,\n })\n\n def format_value(self, value):\n if self.is_initial(value):\n return self.FakeFile(value, self.position, self.event, self.answer)\n\n\nclass SplitDateTimePickerWidget(forms.SplitDateTimeWidget):\n template_name = 'pretixbase/forms/widgets/splitdatetime.html'\n\n def __init__(self, attrs=None, date_format=None, time_format=None):\n attrs = attrs or {}\n if 'placeholder' in attrs:\n del attrs['placeholder']\n date_attrs = dict(attrs)\n time_attrs = dict(attrs)\n date_attrs.setdefault('class', 'form-control splitdatetimepart')\n time_attrs.setdefault('class', 'form-control splitdatetimepart')\n date_attrs.setdefault('autocomplete', 'off')\n time_attrs.setdefault('autocomplete', 'off')\n date_attrs['class'] += ' datepickerfield'\n time_attrs['class'] += ' timepickerfield'\n\n def date_placeholder():\n df = date_format or get_format('DATE_INPUT_FORMATS')[0]\n return now().replace(\n year=2000, month=12, day=31, hour=18, minute=0, second=0, microsecond=0\n ).strftime(df)\n\n def time_placeholder():\n tf = time_format or get_format('TIME_INPUT_FORMATS')[0]\n return now().replace(\n year=2000, month=1, day=1, hour=0, minute=0, second=0, microsecond=0\n ).strftime(tf)\n\n date_attrs['placeholder'] = lazy(date_placeholder, str)\n time_attrs['placeholder'] = lazy(time_placeholder, str)\n\n widgets = (\n forms.DateInput(attrs=date_attrs, format=date_format),\n forms.TimeInput(attrs=time_attrs, format=time_format),\n )\n # Skip one hierarchy level\n forms.MultiWidget.__init__(self, widgets, attrs)\n\n\nclass BusinessBooleanRadio(forms.RadioSelect):\n def __init__(self, require_business=False, attrs=None):\n self.require_business = require_business\n if self.require_business:\n choices = (\n ('business', _('Business customer')),\n )\n else:\n choices = (\n ('individual', _('Individual customer')),\n ('business', _('Business customer')),\n )\n super().__init__(attrs, choices)\n\n def format_value(self, value):\n if self.require_business:\n return 'business'\n try:\n return {True: 'business', False: 'individual'}[value]\n except KeyError:\n return 'individual'\n\n def value_from_datadict(self, data, files, name):\n value = data.get(name)\n if self.require_business:\n return True\n return {\n 'business': True,\n True: True,\n 'True': True,\n 'individual': False,\n 'False': False,\n False: False,\n }.get(value)\n", "path": "src/pretix/base/forms/widgets.py"}], "after_files": [{"content": "import os\n\nfrom django import forms\nfrom django.utils.formats import get_format\nfrom django.utils.functional import lazy\nfrom django.utils.timezone import now\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom pretix.base.models import OrderPosition\nfrom pretix.multidomain.urlreverse import eventreverse\n\n\nclass DatePickerWidget(forms.DateInput):\n def __init__(self, attrs=None, date_format=None):\n attrs = attrs or {}\n if 'placeholder' in attrs:\n del attrs['placeholder']\n date_attrs = dict(attrs)\n date_attrs.setdefault('class', 'form-control')\n date_attrs['class'] += ' datepickerfield'\n\n df = date_format or get_format('DATE_INPUT_FORMATS')[0]\n date_attrs['placeholder'] = now().replace(\n year=2000, month=12, day=31, hour=18, minute=0, second=0, microsecond=0\n ).strftime(df)\n\n forms.DateInput.__init__(self, date_attrs, date_format)\n\n\nclass TimePickerWidget(forms.TimeInput):\n def __init__(self, attrs=None, time_format=None):\n attrs = attrs or {}\n if 'placeholder' in attrs:\n del attrs['placeholder']\n time_attrs = dict(attrs)\n time_attrs.setdefault('class', 'form-control')\n time_attrs['class'] += ' timepickerfield'\n\n tf = time_format or get_format('TIME_INPUT_FORMATS')[0]\n time_attrs['placeholder'] = now().replace(\n year=2000, month=12, day=31, hour=18, minute=0, second=0, microsecond=0\n ).strftime(tf)\n\n forms.TimeInput.__init__(self, time_attrs, time_format)\n\n\nclass UploadedFileWidget(forms.ClearableFileInput):\n def __init__(self, *args, **kwargs):\n # Browsers can't recognize that the server already has a file uploaded\n # Don't mark this input as being required if we already have an answer\n # (this needs to be done via the attrs, otherwise we wouldn't get the \"required\" star on the field label)\n attrs = kwargs.get('attrs', {})\n if kwargs.get('required') and kwargs.get('initial'):\n attrs.update({'required': None})\n kwargs.update({'attrs': attrs})\n\n self.position = kwargs.pop('position')\n self.event = kwargs.pop('event')\n self.answer = kwargs.pop('answer')\n super().__init__(*args, **kwargs)\n\n class FakeFile:\n def __init__(self, file, position, event, answer):\n self.file = file\n self.position = position\n self.event = event\n self.answer = answer\n\n def __str__(self):\n return os.path.basename(self.file.name).split('.', 1)[-1]\n\n @property\n def url(self):\n if isinstance(self.position, OrderPosition):\n return eventreverse(self.event, 'presale:event.order.download.answer', kwargs={\n 'order': self.position.order.code,\n 'secret': self.position.order.secret,\n 'answer': self.answer.pk,\n })\n else:\n return eventreverse(self.event, 'presale:event.cart.download.answer', kwargs={\n 'answer': self.answer.pk,\n })\n\n def format_value(self, value):\n if self.is_initial(value):\n return self.FakeFile(value, self.position, self.event, self.answer)\n\n\nclass SplitDateTimePickerWidget(forms.SplitDateTimeWidget):\n template_name = 'pretixbase/forms/widgets/splitdatetime.html'\n\n def __init__(self, attrs=None, date_format=None, time_format=None):\n attrs = attrs or {}\n if 'placeholder' in attrs:\n del attrs['placeholder']\n date_attrs = dict(attrs)\n time_attrs = dict(attrs)\n date_attrs.setdefault('class', 'form-control splitdatetimepart')\n time_attrs.setdefault('class', 'form-control splitdatetimepart')\n date_attrs.setdefault('autocomplete', 'off')\n time_attrs.setdefault('autocomplete', 'off')\n date_attrs['class'] += ' datepickerfield'\n time_attrs['class'] += ' timepickerfield'\n\n def date_placeholder():\n df = date_format or get_format('DATE_INPUT_FORMATS')[0]\n return now().replace(\n year=2000, month=12, day=31, hour=18, minute=0, second=0, microsecond=0\n ).strftime(df)\n\n def time_placeholder():\n tf = time_format or get_format('TIME_INPUT_FORMATS')[0]\n return now().replace(\n year=2000, month=1, day=1, hour=0, minute=0, second=0, microsecond=0\n ).strftime(tf)\n\n date_attrs['placeholder'] = lazy(date_placeholder, str)\n time_attrs['placeholder'] = lazy(time_placeholder, str)\n\n widgets = (\n forms.DateInput(attrs=date_attrs, format=date_format),\n forms.TimeInput(attrs=time_attrs, format=time_format),\n )\n # Skip one hierarchy level\n forms.MultiWidget.__init__(self, widgets, attrs)\n\n\nclass BusinessBooleanRadio(forms.RadioSelect):\n def __init__(self, require_business=False, attrs=None):\n self.require_business = require_business\n if self.require_business:\n choices = (\n ('business', _('Business customer')),\n )\n else:\n choices = (\n ('individual', _('Individual customer')),\n ('business', _('Business customer')),\n )\n super().__init__(attrs, choices)\n\n def format_value(self, value):\n if self.require_business:\n return 'business'\n try:\n return {True: 'business', False: 'individual'}[value]\n except KeyError:\n return 'individual'\n\n def value_from_datadict(self, data, files, name):\n value = data.get(name)\n if self.require_business:\n return True\n return {\n 'business': True,\n True: True,\n 'True': True,\n 'individual': False,\n 'False': False,\n False: False,\n }.get(value)\n", "path": "src/pretix/base/forms/widgets.py"}]}
| 1,933 | 214 |
gh_patches_debug_31598
|
rasdani/github-patches
|
git_diff
|
getsentry__sentry-3191
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Infinite loop in sqlquerycount for MySQL
```
May 05 14:36:41 sentry-1-001 sentry[39317]: File "/opt/sentry/venv/lib/python2.7/site-packages/sentry/debug/utils/patch_context.py", line 27, in wrapped
May 05 14:36:41 sentry-1-001 sentry[39317]: return self.callback(self.func, *args, **kwargs)
May 05 14:36:41 sentry-1-001 sentry[39317]: File "/opt/sentry/venv/lib/python2.7/site-packages/sentry/utils/performance/sqlquerycount.py", line 57, in cursor
May 05 14:36:41 sentry-1-001 sentry[39317]: result = func(self, *args, **kwargs)
May 05 14:36:41 sentry-1-001 sentry[39317]: File "/opt/sentry/venv/lib/python2.7/site-packages/sentry/debug/utils/patch_context.py", line 27, in wrapped
May 05 14:36:41 sentry-1-001 sentry[39317]: return self.callback(self.func, *args, **kwargs)
May 05 14:36:41 sentry-1-001 sentry[39317]: File "/opt/sentry/venv/lib/python2.7/site-packages/sentry/utils/performance/sqlquerycount.py", line 57, in cursor
May 05 14:36:41 sentry-1-001 sentry[39317]: result = func(self, *args, **kwargs)
(patch_context.py)
22 def patch(self):
23 func = getattr(self.target, self.attr)
24
25 def wrapped(*args, **kwargs):
26 __traceback_hide__ = True # NOQA
27 return self.callback(self.func, *args, **kwargs)
28
29 wrapped.__name__ = func.__name__
30 if hasattr(func, '__doc__'):
31 wrapped.__doc__ = func.__doc__
32 if hasattr(func, '__module__'):
33 wrapped.__module__ = func.__module__
(sqlquerycount.py)
53
54
55 def get_cursor_wrapper(state):
56 def cursor(func, self, *args, **kwargs):
57 result = func(self, *args, **kwargs)
58
59 return CursorWrapper(result, self, state)
60 return cursor
61
```
```
May 05 14:36:41 sentry-1-001 sentry[39317]: [ERROR] maximum recursion depth exceeded in __instancecheck__
May 05 14:36:41 sentry-1-001 sentry[39317]: Traceback (most recent call last):
May 05 14:36:41 sentry-1-001 sentry[39317]: File "/opt/sentry/venv/lib/python2.7/site-packages/sentry/options/store.py", line 165, in get_store
May 05 14:36:41 sentry-1-001 sentry[39317]: value = self.model.objects.get(key=key.name).value
May 05 14:36:41 sentry-1-001 sentry[39317]: File "/opt/sentry/venv/lib/python2.7/site-packages/sentry/../django/db/models/manager.py", line 151, in get
May 05 14:36:41 sentry-1-001 sentry[39317]: return self.get_queryset().get(*args, **kwargs)
May 05 14:36:41 sentry-1-001 sentry[39317]: File "/opt/sentry/venv/lib/python2.7/site-packages/sentry/../django/db/models/query.py", line 304, in get
May 05 14:36:41 sentry-1-001 sentry[39317]: num = len(clone)
May 05 14:36:41 sentry-1-001 sentry[39317]: File "/opt/sentry/venv/lib/python2.7/site-packages/sentry/../django/db/models/query.py", line 77, in __len__
May 05 14:36:41 sentry-1-001 sentry[39317]: self._fetch_all()
May 05 14:36:41 sentry-1-001 sentry[39317]: File "/opt/sentry/venv/lib/python2.7/site-packages/sentry/../django/db/models/query.py", line 857, in _fetch_all
May 05 14:36:41 sentry-1-001 sentry[39317]: self._result_cache = list(self.iterator())
May 05 14:36:41 sentry-1-001 sentry[39317]: File "/opt/sentry/venv/lib/python2.7/site-packages/sentry/../django/db/models/query.py", line 220, in iterator
May 05 14:36:41 sentry-1-001 sentry[39317]: for row in compiler.results_iter():
May 05 14:36:41 sentry-1-001 sentry[39317]: File "/opt/sentry/venv/lib/python2.7/site-packages/sentry/../django/db/models/sql/compiler.py", line 713, in results_iter
May 05 14:36:41 sentry-1-001 sentry[39317]: for rows in self.execute_sql(MULTI):
May 05 14:36:41 sentry-1-001 sentry[39317]: File "/opt/sentry/venv/lib/python2.7/site-packages/sentry/../django/db/models/sql/compiler.py", line 785, in execute_sql
May 05 14:36:41 sentry-1-001 sentry[39317]: cursor = self.connection.cursor()
May 05 14:36:41 sentry-1-001 sentry[39317]: File "/opt/sentry/venv/lib/python2.7/site-packages/sentry/debug/utils/patch_context.py", line 27, in wrapped
May 05 14:36:41 sentry-1-001 sentry[39317]: return self.callback(self.func, *args, **kwargs)
May 05 14:36:41 sentry-1-001 sentry[39317]: File "/opt/sentry/venv/lib/python2.7/site-packages/sentry/utils/performance/sqlquerycount.py", line 57, in cursor
May 05 14:36:41 sentry-1-001 sentry[39317]: result = func(self, *args, **kwargs)
May 05 14:36:41 sentry-1-001 sentry[39317]: File "/opt/sentry/venv/lib/python2.7/site-packages/sentry/debug/utils/patch_context.py", line 27, in wrapped
May 05 14:36:41 sentry-1-001 sentry[39317]: return self.callback(self.func, *args, **kwargs)
May 05 14:36:41 sentry-1-001 sentry[39317]: File "/opt/sentry/venv/lib/python2.7/site-packages/sentry/utils/performance/sqlquerycount.py", line 57, in cursor
May 05 14:36:41 sentry-1-001 sentry[39317]: result = func(self, *args, **kwargs)
```
@mitsuhiko @dcramer
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/sentry/debug/utils/patch_context.py`
Content:
```
1 from __future__ import absolute_import
2
3 from sentry.utils.imports import import_string
4
5
6 class PatchContext(object):
7 def __init__(self, target, callback):
8 target, attr = target.rsplit('.', 1)
9 target = import_string(target)
10 self.func = getattr(target, attr)
11 self.target = target
12 self.attr = attr
13 self.callback = callback
14
15 def __enter__(self):
16 self.patch()
17 return self
18
19 def __exit__(self, exc_type, exc_value, traceback):
20 self.unpatch()
21
22 def patch(self):
23 func = getattr(self.target, self.attr)
24
25 def wrapped(*args, **kwargs):
26 __traceback_hide__ = True # NOQA
27 return self.callback(self.func, *args, **kwargs)
28
29 wrapped.__name__ = func.__name__
30 if hasattr(func, '__doc__'):
31 wrapped.__doc__ = func.__doc__
32 if hasattr(func, '__module__'):
33 wrapped.__module__ = func.__module__
34
35 setattr(self.target, self.attr, wrapped)
36
37 def unpatch(self):
38 setattr(self.target, self.attr, self.func)
39
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/sentry/debug/utils/patch_context.py b/src/sentry/debug/utils/patch_context.py
--- a/src/sentry/debug/utils/patch_context.py
+++ b/src/sentry/debug/utils/patch_context.py
@@ -1,5 +1,6 @@
from __future__ import absolute_import
+from threading import Lock
from sentry.utils.imports import import_string
@@ -7,10 +8,12 @@
def __init__(self, target, callback):
target, attr = target.rsplit('.', 1)
target = import_string(target)
- self.func = getattr(target, attr)
self.target = target
self.attr = attr
self.callback = callback
+ self._lock = Lock()
+ with self._lock:
+ self.func = getattr(target, attr)
def __enter__(self):
self.patch()
@@ -20,19 +23,21 @@
self.unpatch()
def patch(self):
- func = getattr(self.target, self.attr)
+ with self._lock:
+ func = getattr(self.target, self.attr)
- def wrapped(*args, **kwargs):
- __traceback_hide__ = True # NOQA
- return self.callback(self.func, *args, **kwargs)
+ def wrapped(*args, **kwargs):
+ __traceback_hide__ = True # NOQA
+ return self.callback(self.func, *args, **kwargs)
- wrapped.__name__ = func.__name__
- if hasattr(func, '__doc__'):
- wrapped.__doc__ = func.__doc__
- if hasattr(func, '__module__'):
- wrapped.__module__ = func.__module__
+ wrapped.__name__ = func.__name__
+ if hasattr(func, '__doc__'):
+ wrapped.__doc__ = func.__doc__
+ if hasattr(func, '__module__'):
+ wrapped.__module__ = func.__module__
- setattr(self.target, self.attr, wrapped)
+ setattr(self.target, self.attr, wrapped)
def unpatch(self):
- setattr(self.target, self.attr, self.func)
+ with self._lock:
+ setattr(self.target, self.attr, self.func)
|
{"golden_diff": "diff --git a/src/sentry/debug/utils/patch_context.py b/src/sentry/debug/utils/patch_context.py\n--- a/src/sentry/debug/utils/patch_context.py\n+++ b/src/sentry/debug/utils/patch_context.py\n@@ -1,5 +1,6 @@\n from __future__ import absolute_import\n \n+from threading import Lock\n from sentry.utils.imports import import_string\n \n \n@@ -7,10 +8,12 @@\n def __init__(self, target, callback):\n target, attr = target.rsplit('.', 1)\n target = import_string(target)\n- self.func = getattr(target, attr)\n self.target = target\n self.attr = attr\n self.callback = callback\n+ self._lock = Lock()\n+ with self._lock:\n+ self.func = getattr(target, attr)\n \n def __enter__(self):\n self.patch()\n@@ -20,19 +23,21 @@\n self.unpatch()\n \n def patch(self):\n- func = getattr(self.target, self.attr)\n+ with self._lock:\n+ func = getattr(self.target, self.attr)\n \n- def wrapped(*args, **kwargs):\n- __traceback_hide__ = True # NOQA\n- return self.callback(self.func, *args, **kwargs)\n+ def wrapped(*args, **kwargs):\n+ __traceback_hide__ = True # NOQA\n+ return self.callback(self.func, *args, **kwargs)\n \n- wrapped.__name__ = func.__name__\n- if hasattr(func, '__doc__'):\n- wrapped.__doc__ = func.__doc__\n- if hasattr(func, '__module__'):\n- wrapped.__module__ = func.__module__\n+ wrapped.__name__ = func.__name__\n+ if hasattr(func, '__doc__'):\n+ wrapped.__doc__ = func.__doc__\n+ if hasattr(func, '__module__'):\n+ wrapped.__module__ = func.__module__\n \n- setattr(self.target, self.attr, wrapped)\n+ setattr(self.target, self.attr, wrapped)\n \n def unpatch(self):\n- setattr(self.target, self.attr, self.func)\n+ with self._lock:\n+ setattr(self.target, self.attr, self.func)\n", "issue": "Infinite loop in sqlquerycount for MySQL\n```\nMay 05 14:36:41 sentry-1-001 sentry[39317]: File \"/opt/sentry/venv/lib/python2.7/site-packages/sentry/debug/utils/patch_context.py\", line 27, in wrapped \nMay 05 14:36:41 sentry-1-001 sentry[39317]: return self.callback(self.func, *args, **kwargs) \nMay 05 14:36:41 sentry-1-001 sentry[39317]: File \"/opt/sentry/venv/lib/python2.7/site-packages/sentry/utils/performance/sqlquerycount.py\", line 57, in cursor \nMay 05 14:36:41 sentry-1-001 sentry[39317]: result = func(self, *args, **kwargs) \nMay 05 14:36:41 sentry-1-001 sentry[39317]: File \"/opt/sentry/venv/lib/python2.7/site-packages/sentry/debug/utils/patch_context.py\", line 27, in wrapped \nMay 05 14:36:41 sentry-1-001 sentry[39317]: return self.callback(self.func, *args, **kwargs) \nMay 05 14:36:41 sentry-1-001 sentry[39317]: File \"/opt/sentry/venv/lib/python2.7/site-packages/sentry/utils/performance/sqlquerycount.py\", line 57, in cursor \nMay 05 14:36:41 sentry-1-001 sentry[39317]: result = func(self, *args, **kwargs)\n\n(patch_context.py)\n\n22 def patch(self): \n23 func = getattr(self.target, self.attr) \n24 \n25 def wrapped(*args, **kwargs): \n26 __traceback_hide__ = True # NOQA \n27 return self.callback(self.func, *args, **kwargs) \n28 \n29 wrapped.__name__ = func.__name__ \n30 if hasattr(func, '__doc__'): \n31 wrapped.__doc__ = func.__doc__ \n32 if hasattr(func, '__module__'): \n33 wrapped.__module__ = func.__module__\n\n(sqlquerycount.py)\n\n53 \n54 \n55 def get_cursor_wrapper(state): \n56 def cursor(func, self, *args, **kwargs): \n57 result = func(self, *args, **kwargs) \n58 \n59 return CursorWrapper(result, self, state) \n60 return cursor \n61\n```\n\n```\nMay 05 14:36:41 sentry-1-001 sentry[39317]: [ERROR] maximum recursion depth exceeded in __instancecheck__\nMay 05 14:36:41 sentry-1-001 sentry[39317]: Traceback (most recent call last):\nMay 05 14:36:41 sentry-1-001 sentry[39317]: File \"/opt/sentry/venv/lib/python2.7/site-packages/sentry/options/store.py\", line 165, in get_store\nMay 05 14:36:41 sentry-1-001 sentry[39317]: value = self.model.objects.get(key=key.name).value\nMay 05 14:36:41 sentry-1-001 sentry[39317]: File \"/opt/sentry/venv/lib/python2.7/site-packages/sentry/../django/db/models/manager.py\", line 151, in get\nMay 05 14:36:41 sentry-1-001 sentry[39317]: return self.get_queryset().get(*args, **kwargs)\nMay 05 14:36:41 sentry-1-001 sentry[39317]: File \"/opt/sentry/venv/lib/python2.7/site-packages/sentry/../django/db/models/query.py\", line 304, in get\nMay 05 14:36:41 sentry-1-001 sentry[39317]: num = len(clone)\nMay 05 14:36:41 sentry-1-001 sentry[39317]: File \"/opt/sentry/venv/lib/python2.7/site-packages/sentry/../django/db/models/query.py\", line 77, in __len__\nMay 05 14:36:41 sentry-1-001 sentry[39317]: self._fetch_all()\nMay 05 14:36:41 sentry-1-001 sentry[39317]: File \"/opt/sentry/venv/lib/python2.7/site-packages/sentry/../django/db/models/query.py\", line 857, in _fetch_all\nMay 05 14:36:41 sentry-1-001 sentry[39317]: self._result_cache = list(self.iterator())\nMay 05 14:36:41 sentry-1-001 sentry[39317]: File \"/opt/sentry/venv/lib/python2.7/site-packages/sentry/../django/db/models/query.py\", line 220, in iterator\nMay 05 14:36:41 sentry-1-001 sentry[39317]: for row in compiler.results_iter():\nMay 05 14:36:41 sentry-1-001 sentry[39317]: File \"/opt/sentry/venv/lib/python2.7/site-packages/sentry/../django/db/models/sql/compiler.py\", line 713, in results_iter\nMay 05 14:36:41 sentry-1-001 sentry[39317]: for rows in self.execute_sql(MULTI):\nMay 05 14:36:41 sentry-1-001 sentry[39317]: File \"/opt/sentry/venv/lib/python2.7/site-packages/sentry/../django/db/models/sql/compiler.py\", line 785, in execute_sql\nMay 05 14:36:41 sentry-1-001 sentry[39317]: cursor = self.connection.cursor()\nMay 05 14:36:41 sentry-1-001 sentry[39317]: File \"/opt/sentry/venv/lib/python2.7/site-packages/sentry/debug/utils/patch_context.py\", line 27, in wrapped\nMay 05 14:36:41 sentry-1-001 sentry[39317]: return self.callback(self.func, *args, **kwargs)\nMay 05 14:36:41 sentry-1-001 sentry[39317]: File \"/opt/sentry/venv/lib/python2.7/site-packages/sentry/utils/performance/sqlquerycount.py\", line 57, in cursor\nMay 05 14:36:41 sentry-1-001 sentry[39317]: result = func(self, *args, **kwargs)\nMay 05 14:36:41 sentry-1-001 sentry[39317]: File \"/opt/sentry/venv/lib/python2.7/site-packages/sentry/debug/utils/patch_context.py\", line 27, in wrapped\nMay 05 14:36:41 sentry-1-001 sentry[39317]: return self.callback(self.func, *args, **kwargs)\nMay 05 14:36:41 sentry-1-001 sentry[39317]: File \"/opt/sentry/venv/lib/python2.7/site-packages/sentry/utils/performance/sqlquerycount.py\", line 57, in cursor\nMay 05 14:36:41 sentry-1-001 sentry[39317]: result = func(self, *args, **kwargs)\n```\n\n@mitsuhiko @dcramer \n\n", "before_files": [{"content": "from __future__ import absolute_import\n\nfrom sentry.utils.imports import import_string\n\n\nclass PatchContext(object):\n def __init__(self, target, callback):\n target, attr = target.rsplit('.', 1)\n target = import_string(target)\n self.func = getattr(target, attr)\n self.target = target\n self.attr = attr\n self.callback = callback\n\n def __enter__(self):\n self.patch()\n return self\n\n def __exit__(self, exc_type, exc_value, traceback):\n self.unpatch()\n\n def patch(self):\n func = getattr(self.target, self.attr)\n\n def wrapped(*args, **kwargs):\n __traceback_hide__ = True # NOQA\n return self.callback(self.func, *args, **kwargs)\n\n wrapped.__name__ = func.__name__\n if hasattr(func, '__doc__'):\n wrapped.__doc__ = func.__doc__\n if hasattr(func, '__module__'):\n wrapped.__module__ = func.__module__\n\n setattr(self.target, self.attr, wrapped)\n\n def unpatch(self):\n setattr(self.target, self.attr, self.func)\n", "path": "src/sentry/debug/utils/patch_context.py"}], "after_files": [{"content": "from __future__ import absolute_import\n\nfrom threading import Lock\nfrom sentry.utils.imports import import_string\n\n\nclass PatchContext(object):\n def __init__(self, target, callback):\n target, attr = target.rsplit('.', 1)\n target = import_string(target)\n self.target = target\n self.attr = attr\n self.callback = callback\n self._lock = Lock()\n with self._lock:\n self.func = getattr(target, attr)\n\n def __enter__(self):\n self.patch()\n return self\n\n def __exit__(self, exc_type, exc_value, traceback):\n self.unpatch()\n\n def patch(self):\n with self._lock:\n func = getattr(self.target, self.attr)\n\n def wrapped(*args, **kwargs):\n __traceback_hide__ = True # NOQA\n return self.callback(self.func, *args, **kwargs)\n\n wrapped.__name__ = func.__name__\n if hasattr(func, '__doc__'):\n wrapped.__doc__ = func.__doc__\n if hasattr(func, '__module__'):\n wrapped.__module__ = func.__module__\n\n setattr(self.target, self.attr, wrapped)\n\n def unpatch(self):\n with self._lock:\n setattr(self.target, self.attr, self.func)\n", "path": "src/sentry/debug/utils/patch_context.py"}]}
| 2,535 | 489 |
gh_patches_debug_4496
|
rasdani/github-patches
|
git_diff
|
pycontribs__jira-92
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
jiraclient is broken for non-oauth logins by commit 292597c
See https://github.com/pycontribs/jira/commit/292597c573e976c7da42ce570a81086fae301166#diff-30139c45274d7869e2ee349578b0e30cR247
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `jira/jirashell.py`
Content:
```
1 #!/usr/bin/env python
2
3 """
4 Starts an interactive JIRA session in an ipython terminal. Script arguments
5 support changing the server and a persistent authentication over HTTP BASIC.
6 """
7
8 import sys
9 try:
10 import configparser
11 except:
12 from six.moves import configparser
13 from six.moves import input
14
15 from six.moves.urllib.parse import parse_qsl
16
17 import argparse
18 from getpass import getpass
19 from sys import exit
20 import os
21 import requests
22 from oauthlib.oauth1 import SIGNATURE_RSA
23 from requests_oauthlib import OAuth1
24
25 import webbrowser
26 from jira import JIRA, __version__
27
28 CONFIG_PATH = os.path.join(
29 os.path.expanduser('~'), '.jira-python', 'jirashell.ini')
30
31
32 def oauth_dance(server, consumer_key, key_cert_data, print_tokens=False, verify=None):
33 if verify is None:
34 verify = server.startswith('https')
35
36 # step 1: get request tokens
37 oauth = OAuth1(
38 consumer_key, signature_method=SIGNATURE_RSA, rsa_key=key_cert_data)
39 r = requests.post(
40 server + '/plugins/servlet/oauth/request-token', verify=verify, auth=oauth)
41 request = dict(parse_qsl(r.text))
42 request_token = request['oauth_token']
43 request_token_secret = request['oauth_token_secret']
44 if print_tokens:
45 print("Request tokens received.")
46 print(" Request token: {}".format(request_token))
47 print(" Request token secret: {}".format(request_token_secret))
48
49 # step 2: prompt user to validate
50 auth_url = '{}/plugins/servlet/oauth/authorize?oauth_token={}'.format(
51 server, request_token)
52 if print_tokens:
53 print(
54 "Please visit this URL to authorize the OAuth request:\n\t{}".format(auth_url))
55 else:
56 webbrowser.open_new(auth_url)
57 print(
58 "Your browser is opening the OAuth authorization for this client session.")
59
60 approved = input(
61 'Have you authorized this program to connect on your behalf to {}? (y/n)'.format(server))
62
63 if approved.lower() != 'y':
64 exit(
65 'Abandoning OAuth dance. Your partner faceplants. The audience boos. You feel shame.')
66
67 # step 3: get access tokens for validated user
68 oauth = OAuth1(consumer_key,
69 signature_method=SIGNATURE_RSA,
70 rsa_key=key_cert_data,
71 resource_owner_key=request_token,
72 resource_owner_secret=request_token_secret
73 )
74 r = requests.post(
75 server + '/plugins/servlet/oauth/access-token', verify=verify, auth=oauth)
76 access = dict(parse_qsl(r.text))
77
78 if print_tokens:
79 print("Access tokens received.")
80 print(" Access token: {}".format(access['oauth_token']))
81 print(" Access token secret: {}".format(
82 access['oauth_token_secret']))
83
84 return {
85 'access_token': access['oauth_token'],
86 'access_token_secret': access['oauth_token_secret'],
87 'consumer_key': consumer_key,
88 'key_cert': key_cert_data,
89 }
90
91
92 def process_config():
93 if not os.path.exists(CONFIG_PATH):
94 return {}, {}, {}
95
96 parser = configparser.ConfigParser()
97 try:
98 parser.read(CONFIG_PATH)
99 except configparser.ParsingError as err:
100 print("Couldn't read config file at path: {}".format(
101 CONFIG_PATH))
102 raise
103
104 if parser.has_section('options'):
105 options = {}
106 for option, value in parser.items('options'):
107 if option in ("verify", "async"):
108 value = parser.getboolean('options', option)
109 options[option] = value
110 else:
111 options = {}
112
113 if parser.has_section('basic_auth'):
114 basic_auth = dict(parser.items('basic_auth'))
115 else:
116 basic_auth = {}
117
118 if parser.has_section('oauth'):
119 oauth = dict(parser.items('oauth'))
120 else:
121 oauth = {}
122
123 return options, basic_auth, oauth
124
125
126 def process_command_line():
127 parser = argparse.ArgumentParser(
128 description='Start an interactive JIRA shell with the REST API.')
129 jira_group = parser.add_argument_group('JIRA server connection options')
130 jira_group.add_argument('-s', '--server',
131 help='The JIRA instance to connect to, including context path.')
132 jira_group.add_argument('-r', '--rest-path',
133 help='The root path of the REST API to use.')
134 jira_group.add_argument('-v', '--rest-api-version',
135 help='The version of the API under the specified name.')
136
137 jira_group.add_argument('--no-verify', action='store_true',
138 help='do not verify the ssl certificate')
139
140 basic_auth_group = parser.add_argument_group('BASIC auth options')
141 basic_auth_group.add_argument('-u', '--username',
142 help='The username to connect to this JIRA instance with.')
143 basic_auth_group.add_argument('-p', '--password',
144 help='The password associated with this user.')
145 basic_auth_group.add_argument('-P', '--prompt-for-password', action='store_true',
146 help='Prompt for the password at the command line.')
147
148 oauth_group = parser.add_argument_group('OAuth options')
149 oauth_group.add_argument('-od', '--oauth-dance', action='store_true',
150 help='Start a 3-legged OAuth authentication dance with JIRA.')
151 oauth_group.add_argument('-ck', '--consumer-key',
152 help='OAuth consumer key.')
153 oauth_group.add_argument('-k', '--key-cert',
154 help='Private key to sign OAuth requests with (should be the pair of the public key\
155 configured in the JIRA application link)')
156 oauth_group.add_argument('-pt', '--print-tokens', action='store_true',
157 help='Print the negotiated OAuth tokens as they are retrieved.')
158
159 oauth_already_group = parser.add_argument_group(
160 'OAuth options for already-authenticated access tokens')
161 oauth_already_group.add_argument('-at', '--access-token',
162 help='OAuth access token for the user.')
163 oauth_already_group.add_argument('-ats', '--access-token-secret',
164 help='Secret for the OAuth access token.')
165
166 args = parser.parse_args()
167
168 options = {}
169 if args.server:
170 options['server'] = args.server
171
172 if args.rest_path:
173 options['rest_path'] = args.rest_path
174
175 if args.rest_api_version:
176 options['rest_api_version'] = args.rest_api_version
177
178 options['verify'] = True
179 if args.no_verify:
180 options['verify'] = False
181
182 if args.prompt_for_password:
183 args.password = getpass()
184
185 basic_auth = {}
186 if args.username:
187 basic_auth['username'] = args.username
188
189 if args.password:
190 basic_auth['password'] = args.password
191
192 key_cert_data = None
193 if args.key_cert:
194 with open(args.key_cert, 'r') as key_cert_file:
195 key_cert_data = key_cert_file.read()
196
197 oauth = {
198 'oauth_dance': False,
199 }
200 if args.oauth_dance:
201 oauth = {
202 'oauth_dance': True,
203 'consumer_key': args.consumer_key,
204 'key_cert': key_cert_data,
205 'print_tokens': args.print_tokens,
206 }
207 elif args.access_token and args.access_token_secret and args.consumer_key and args.key_cert:
208 oauth = {
209 'access_token': args.access_token,
210 'oauth_dance': False,
211 'access_token_secret': args.access_token_secret,
212 'consumer_key': args.consumer_key,
213 'key_cert': key_cert_data,
214 }
215
216 return options, basic_auth, oauth
217
218
219 def get_config():
220 options, basic_auth, oauth = process_config()
221
222 cmd_options, cmd_basic_auth, cmd_oauth = process_command_line()
223
224 options.update(cmd_options)
225 basic_auth.update(cmd_basic_auth)
226 oauth.update(cmd_oauth)
227
228 return options, basic_auth, oauth
229
230
231 def main():
232 try:
233 get_ipython
234 except NameError:
235 pass
236 else:
237 exit("Running ipython inside ipython isn't supported. :(")
238
239 options, basic_auth, oauth = get_config()
240
241 if basic_auth:
242 basic_auth = (basic_auth['username'], basic_auth['password'])
243
244 if oauth['oauth_dance']:
245 oauth = oauth_dance(
246 options['server'], oauth['consumer_key'], oauth['key_cert'], oauth['print_tokens'], options['verify'])
247
248 jira = JIRA(options=options, basic_auth=basic_auth, oauth=oauth)
249
250 from IPython.frontend.terminal.embed import InteractiveShellEmbed
251
252 ipshell = InteractiveShellEmbed(
253 banner1='<JIRA Shell ' + __version__ + ' (' + jira.client_info() + ')>')
254 ipshell("*** JIRA shell active; client is in 'jira'."
255 ' Press Ctrl-D to exit.')
256
257 if __name__ == '__main__':
258 status = main()
259 exit(status)
260
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/jira/jirashell.py b/jira/jirashell.py
--- a/jira/jirashell.py
+++ b/jira/jirashell.py
@@ -244,6 +244,8 @@
if oauth['oauth_dance']:
oauth = oauth_dance(
options['server'], oauth['consumer_key'], oauth['key_cert'], oauth['print_tokens'], options['verify'])
+ else:
+ oauth = None
jira = JIRA(options=options, basic_auth=basic_auth, oauth=oauth)
|
{"golden_diff": "diff --git a/jira/jirashell.py b/jira/jirashell.py\n--- a/jira/jirashell.py\n+++ b/jira/jirashell.py\n@@ -244,6 +244,8 @@\n if oauth['oauth_dance']:\n oauth = oauth_dance(\n options['server'], oauth['consumer_key'], oauth['key_cert'], oauth['print_tokens'], options['verify'])\n+ else:\n+ oauth = None\n \n jira = JIRA(options=options, basic_auth=basic_auth, oauth=oauth)\n", "issue": "jiraclient is broken for non-oauth logins by commit 292597c\nSee https://github.com/pycontribs/jira/commit/292597c573e976c7da42ce570a81086fae301166#diff-30139c45274d7869e2ee349578b0e30cR247\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\n\"\"\"\nStarts an interactive JIRA session in an ipython terminal. Script arguments\nsupport changing the server and a persistent authentication over HTTP BASIC.\n\"\"\"\n\nimport sys\ntry:\n import configparser\nexcept:\n from six.moves import configparser\n from six.moves import input\n\nfrom six.moves.urllib.parse import parse_qsl\n\nimport argparse\nfrom getpass import getpass\nfrom sys import exit\nimport os\nimport requests\nfrom oauthlib.oauth1 import SIGNATURE_RSA\nfrom requests_oauthlib import OAuth1\n\nimport webbrowser\nfrom jira import JIRA, __version__\n\nCONFIG_PATH = os.path.join(\n os.path.expanduser('~'), '.jira-python', 'jirashell.ini')\n\n\ndef oauth_dance(server, consumer_key, key_cert_data, print_tokens=False, verify=None):\n if verify is None:\n verify = server.startswith('https')\n\n # step 1: get request tokens\n oauth = OAuth1(\n consumer_key, signature_method=SIGNATURE_RSA, rsa_key=key_cert_data)\n r = requests.post(\n server + '/plugins/servlet/oauth/request-token', verify=verify, auth=oauth)\n request = dict(parse_qsl(r.text))\n request_token = request['oauth_token']\n request_token_secret = request['oauth_token_secret']\n if print_tokens:\n print(\"Request tokens received.\")\n print(\" Request token: {}\".format(request_token))\n print(\" Request token secret: {}\".format(request_token_secret))\n\n # step 2: prompt user to validate\n auth_url = '{}/plugins/servlet/oauth/authorize?oauth_token={}'.format(\n server, request_token)\n if print_tokens:\n print(\n \"Please visit this URL to authorize the OAuth request:\\n\\t{}\".format(auth_url))\n else:\n webbrowser.open_new(auth_url)\n print(\n \"Your browser is opening the OAuth authorization for this client session.\")\n\n approved = input(\n 'Have you authorized this program to connect on your behalf to {}? (y/n)'.format(server))\n\n if approved.lower() != 'y':\n exit(\n 'Abandoning OAuth dance. Your partner faceplants. The audience boos. You feel shame.')\n\n # step 3: get access tokens for validated user\n oauth = OAuth1(consumer_key,\n signature_method=SIGNATURE_RSA,\n rsa_key=key_cert_data,\n resource_owner_key=request_token,\n resource_owner_secret=request_token_secret\n )\n r = requests.post(\n server + '/plugins/servlet/oauth/access-token', verify=verify, auth=oauth)\n access = dict(parse_qsl(r.text))\n\n if print_tokens:\n print(\"Access tokens received.\")\n print(\" Access token: {}\".format(access['oauth_token']))\n print(\" Access token secret: {}\".format(\n access['oauth_token_secret']))\n\n return {\n 'access_token': access['oauth_token'],\n 'access_token_secret': access['oauth_token_secret'],\n 'consumer_key': consumer_key,\n 'key_cert': key_cert_data,\n }\n\n\ndef process_config():\n if not os.path.exists(CONFIG_PATH):\n return {}, {}, {}\n\n parser = configparser.ConfigParser()\n try:\n parser.read(CONFIG_PATH)\n except configparser.ParsingError as err:\n print(\"Couldn't read config file at path: {}\".format(\n CONFIG_PATH))\n raise\n\n if parser.has_section('options'):\n options = {}\n for option, value in parser.items('options'):\n if option in (\"verify\", \"async\"):\n value = parser.getboolean('options', option)\n options[option] = value\n else:\n options = {}\n\n if parser.has_section('basic_auth'):\n basic_auth = dict(parser.items('basic_auth'))\n else:\n basic_auth = {}\n\n if parser.has_section('oauth'):\n oauth = dict(parser.items('oauth'))\n else:\n oauth = {}\n\n return options, basic_auth, oauth\n\n\ndef process_command_line():\n parser = argparse.ArgumentParser(\n description='Start an interactive JIRA shell with the REST API.')\n jira_group = parser.add_argument_group('JIRA server connection options')\n jira_group.add_argument('-s', '--server',\n help='The JIRA instance to connect to, including context path.')\n jira_group.add_argument('-r', '--rest-path',\n help='The root path of the REST API to use.')\n jira_group.add_argument('-v', '--rest-api-version',\n help='The version of the API under the specified name.')\n\n jira_group.add_argument('--no-verify', action='store_true',\n help='do not verify the ssl certificate')\n\n basic_auth_group = parser.add_argument_group('BASIC auth options')\n basic_auth_group.add_argument('-u', '--username',\n help='The username to connect to this JIRA instance with.')\n basic_auth_group.add_argument('-p', '--password',\n help='The password associated with this user.')\n basic_auth_group.add_argument('-P', '--prompt-for-password', action='store_true',\n help='Prompt for the password at the command line.')\n\n oauth_group = parser.add_argument_group('OAuth options')\n oauth_group.add_argument('-od', '--oauth-dance', action='store_true',\n help='Start a 3-legged OAuth authentication dance with JIRA.')\n oauth_group.add_argument('-ck', '--consumer-key',\n help='OAuth consumer key.')\n oauth_group.add_argument('-k', '--key-cert',\n help='Private key to sign OAuth requests with (should be the pair of the public key\\\n configured in the JIRA application link)')\n oauth_group.add_argument('-pt', '--print-tokens', action='store_true',\n help='Print the negotiated OAuth tokens as they are retrieved.')\n\n oauth_already_group = parser.add_argument_group(\n 'OAuth options for already-authenticated access tokens')\n oauth_already_group.add_argument('-at', '--access-token',\n help='OAuth access token for the user.')\n oauth_already_group.add_argument('-ats', '--access-token-secret',\n help='Secret for the OAuth access token.')\n\n args = parser.parse_args()\n\n options = {}\n if args.server:\n options['server'] = args.server\n\n if args.rest_path:\n options['rest_path'] = args.rest_path\n\n if args.rest_api_version:\n options['rest_api_version'] = args.rest_api_version\n\n options['verify'] = True\n if args.no_verify:\n options['verify'] = False\n\n if args.prompt_for_password:\n args.password = getpass()\n\n basic_auth = {}\n if args.username:\n basic_auth['username'] = args.username\n\n if args.password:\n basic_auth['password'] = args.password\n\n key_cert_data = None\n if args.key_cert:\n with open(args.key_cert, 'r') as key_cert_file:\n key_cert_data = key_cert_file.read()\n\n oauth = {\n 'oauth_dance': False,\n }\n if args.oauth_dance:\n oauth = {\n 'oauth_dance': True,\n 'consumer_key': args.consumer_key,\n 'key_cert': key_cert_data,\n 'print_tokens': args.print_tokens,\n }\n elif args.access_token and args.access_token_secret and args.consumer_key and args.key_cert:\n oauth = {\n 'access_token': args.access_token,\n 'oauth_dance': False,\n 'access_token_secret': args.access_token_secret,\n 'consumer_key': args.consumer_key,\n 'key_cert': key_cert_data,\n }\n\n return options, basic_auth, oauth\n\n\ndef get_config():\n options, basic_auth, oauth = process_config()\n\n cmd_options, cmd_basic_auth, cmd_oauth = process_command_line()\n\n options.update(cmd_options)\n basic_auth.update(cmd_basic_auth)\n oauth.update(cmd_oauth)\n\n return options, basic_auth, oauth\n\n\ndef main():\n try:\n get_ipython\n except NameError:\n pass\n else:\n exit(\"Running ipython inside ipython isn't supported. :(\")\n\n options, basic_auth, oauth = get_config()\n\n if basic_auth:\n basic_auth = (basic_auth['username'], basic_auth['password'])\n\n if oauth['oauth_dance']:\n oauth = oauth_dance(\n options['server'], oauth['consumer_key'], oauth['key_cert'], oauth['print_tokens'], options['verify'])\n\n jira = JIRA(options=options, basic_auth=basic_auth, oauth=oauth)\n\n from IPython.frontend.terminal.embed import InteractiveShellEmbed\n\n ipshell = InteractiveShellEmbed(\n banner1='<JIRA Shell ' + __version__ + ' (' + jira.client_info() + ')>')\n ipshell(\"*** JIRA shell active; client is in 'jira'.\"\n ' Press Ctrl-D to exit.')\n\nif __name__ == '__main__':\n status = main()\n exit(status)\n", "path": "jira/jirashell.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\n\"\"\"\nStarts an interactive JIRA session in an ipython terminal. Script arguments\nsupport changing the server and a persistent authentication over HTTP BASIC.\n\"\"\"\n\nimport sys\ntry:\n import configparser\nexcept:\n from six.moves import configparser\n from six.moves import input\n\nfrom six.moves.urllib.parse import parse_qsl\n\nimport argparse\nfrom getpass import getpass\nfrom sys import exit\nimport os\nimport requests\nfrom oauthlib.oauth1 import SIGNATURE_RSA\nfrom requests_oauthlib import OAuth1\n\nimport webbrowser\nfrom jira import JIRA, __version__\n\nCONFIG_PATH = os.path.join(\n os.path.expanduser('~'), '.jira-python', 'jirashell.ini')\n\n\ndef oauth_dance(server, consumer_key, key_cert_data, print_tokens=False, verify=None):\n if verify is None:\n verify = server.startswith('https')\n\n # step 1: get request tokens\n oauth = OAuth1(\n consumer_key, signature_method=SIGNATURE_RSA, rsa_key=key_cert_data)\n r = requests.post(\n server + '/plugins/servlet/oauth/request-token', verify=verify, auth=oauth)\n request = dict(parse_qsl(r.text))\n request_token = request['oauth_token']\n request_token_secret = request['oauth_token_secret']\n if print_tokens:\n print(\"Request tokens received.\")\n print(\" Request token: {}\".format(request_token))\n print(\" Request token secret: {}\".format(request_token_secret))\n\n # step 2: prompt user to validate\n auth_url = '{}/plugins/servlet/oauth/authorize?oauth_token={}'.format(\n server, request_token)\n if print_tokens:\n print(\n \"Please visit this URL to authorize the OAuth request:\\n\\t{}\".format(auth_url))\n else:\n webbrowser.open_new(auth_url)\n print(\n \"Your browser is opening the OAuth authorization for this client session.\")\n\n approved = input(\n 'Have you authorized this program to connect on your behalf to {}? (y/n)'.format(server))\n\n if approved.lower() != 'y':\n exit(\n 'Abandoning OAuth dance. Your partner faceplants. The audience boos. You feel shame.')\n\n # step 3: get access tokens for validated user\n oauth = OAuth1(consumer_key,\n signature_method=SIGNATURE_RSA,\n rsa_key=key_cert_data,\n resource_owner_key=request_token,\n resource_owner_secret=request_token_secret\n )\n r = requests.post(\n server + '/plugins/servlet/oauth/access-token', verify=verify, auth=oauth)\n access = dict(parse_qsl(r.text))\n\n if print_tokens:\n print(\"Access tokens received.\")\n print(\" Access token: {}\".format(access['oauth_token']))\n print(\" Access token secret: {}\".format(\n access['oauth_token_secret']))\n\n return {\n 'access_token': access['oauth_token'],\n 'access_token_secret': access['oauth_token_secret'],\n 'consumer_key': consumer_key,\n 'key_cert': key_cert_data,\n }\n\n\ndef process_config():\n if not os.path.exists(CONFIG_PATH):\n return {}, {}, {}\n\n parser = configparser.ConfigParser()\n try:\n parser.read(CONFIG_PATH)\n except configparser.ParsingError as err:\n print(\"Couldn't read config file at path: {}\".format(\n CONFIG_PATH))\n raise\n\n if parser.has_section('options'):\n options = {}\n for option, value in parser.items('options'):\n if option in (\"verify\", \"async\"):\n value = parser.getboolean('options', option)\n options[option] = value\n else:\n options = {}\n\n if parser.has_section('basic_auth'):\n basic_auth = dict(parser.items('basic_auth'))\n else:\n basic_auth = {}\n\n if parser.has_section('oauth'):\n oauth = dict(parser.items('oauth'))\n else:\n oauth = {}\n\n return options, basic_auth, oauth\n\n\ndef process_command_line():\n parser = argparse.ArgumentParser(\n description='Start an interactive JIRA shell with the REST API.')\n jira_group = parser.add_argument_group('JIRA server connection options')\n jira_group.add_argument('-s', '--server',\n help='The JIRA instance to connect to, including context path.')\n jira_group.add_argument('-r', '--rest-path',\n help='The root path of the REST API to use.')\n jira_group.add_argument('-v', '--rest-api-version',\n help='The version of the API under the specified name.')\n\n jira_group.add_argument('--no-verify', action='store_true',\n help='do not verify the ssl certificate')\n\n basic_auth_group = parser.add_argument_group('BASIC auth options')\n basic_auth_group.add_argument('-u', '--username',\n help='The username to connect to this JIRA instance with.')\n basic_auth_group.add_argument('-p', '--password',\n help='The password associated with this user.')\n basic_auth_group.add_argument('-P', '--prompt-for-password', action='store_true',\n help='Prompt for the password at the command line.')\n\n oauth_group = parser.add_argument_group('OAuth options')\n oauth_group.add_argument('-od', '--oauth-dance', action='store_true',\n help='Start a 3-legged OAuth authentication dance with JIRA.')\n oauth_group.add_argument('-ck', '--consumer-key',\n help='OAuth consumer key.')\n oauth_group.add_argument('-k', '--key-cert',\n help='Private key to sign OAuth requests with (should be the pair of the public key\\\n configured in the JIRA application link)')\n oauth_group.add_argument('-pt', '--print-tokens', action='store_true',\n help='Print the negotiated OAuth tokens as they are retrieved.')\n\n oauth_already_group = parser.add_argument_group(\n 'OAuth options for already-authenticated access tokens')\n oauth_already_group.add_argument('-at', '--access-token',\n help='OAuth access token for the user.')\n oauth_already_group.add_argument('-ats', '--access-token-secret',\n help='Secret for the OAuth access token.')\n\n args = parser.parse_args()\n\n options = {}\n if args.server:\n options['server'] = args.server\n\n if args.rest_path:\n options['rest_path'] = args.rest_path\n\n if args.rest_api_version:\n options['rest_api_version'] = args.rest_api_version\n\n options['verify'] = True\n if args.no_verify:\n options['verify'] = False\n\n if args.prompt_for_password:\n args.password = getpass()\n\n basic_auth = {}\n if args.username:\n basic_auth['username'] = args.username\n\n if args.password:\n basic_auth['password'] = args.password\n\n key_cert_data = None\n if args.key_cert:\n with open(args.key_cert, 'r') as key_cert_file:\n key_cert_data = key_cert_file.read()\n\n oauth = {\n 'oauth_dance': False,\n }\n if args.oauth_dance:\n oauth = {\n 'oauth_dance': True,\n 'consumer_key': args.consumer_key,\n 'key_cert': key_cert_data,\n 'print_tokens': args.print_tokens,\n }\n elif args.access_token and args.access_token_secret and args.consumer_key and args.key_cert:\n oauth = {\n 'access_token': args.access_token,\n 'oauth_dance': False,\n 'access_token_secret': args.access_token_secret,\n 'consumer_key': args.consumer_key,\n 'key_cert': key_cert_data,\n }\n\n return options, basic_auth, oauth\n\n\ndef get_config():\n options, basic_auth, oauth = process_config()\n\n cmd_options, cmd_basic_auth, cmd_oauth = process_command_line()\n\n options.update(cmd_options)\n basic_auth.update(cmd_basic_auth)\n oauth.update(cmd_oauth)\n\n return options, basic_auth, oauth\n\n\ndef main():\n try:\n get_ipython\n except NameError:\n pass\n else:\n exit(\"Running ipython inside ipython isn't supported. :(\")\n\n options, basic_auth, oauth = get_config()\n\n if basic_auth:\n basic_auth = (basic_auth['username'], basic_auth['password'])\n\n if oauth['oauth_dance']:\n oauth = oauth_dance(\n options['server'], oauth['consumer_key'], oauth['key_cert'], oauth['print_tokens'], options['verify'])\n else:\n oauth = None\n\n jira = JIRA(options=options, basic_auth=basic_auth, oauth=oauth)\n\n from IPython.frontend.terminal.embed import InteractiveShellEmbed\n\n ipshell = InteractiveShellEmbed(\n banner1='<JIRA Shell ' + __version__ + ' (' + jira.client_info() + ')>')\n ipshell(\"*** JIRA shell active; client is in 'jira'.\"\n ' Press Ctrl-D to exit.')\n\nif __name__ == '__main__':\n status = main()\n exit(status)\n", "path": "jira/jirashell.py"}]}
| 2,957 | 122 |
gh_patches_debug_28274
|
rasdani/github-patches
|
git_diff
|
certbot__certbot-427
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
nginxparser does not recognize 'if' statements
E.g., this is unparseable by nginxparser:
```
if ($http_origin ~* ^https://www\.example\.com) {
add_header Access-Control-Allow-Origin "$http_origin";
}
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `letsencrypt_nginx/nginxparser.py`
Content:
```
1 """Very low-level nginx config parser based on pyparsing."""
2 import string
3
4 from pyparsing import (
5 Literal, White, Word, alphanums, CharsNotIn, Forward, Group,
6 Optional, OneOrMore, ZeroOrMore, pythonStyleComment)
7
8
9 class RawNginxParser(object):
10 # pylint: disable=expression-not-assigned
11 """A class that parses nginx configuration with pyparsing."""
12
13 # constants
14 left_bracket = Literal("{").suppress()
15 right_bracket = Literal("}").suppress()
16 semicolon = Literal(";").suppress()
17 space = White().suppress()
18 key = Word(alphanums + "_/")
19 value = CharsNotIn("{};,")
20 location = CharsNotIn("{};," + string.whitespace)
21 # modifier for location uri [ = | ~ | ~* | ^~ ]
22 modifier = Literal("=") | Literal("~*") | Literal("~") | Literal("^~")
23
24 # rules
25 assignment = (key + Optional(space + value) + semicolon)
26 block = Forward()
27
28 block << Group(
29 Group(key + Optional(space + modifier) + Optional(space + location))
30 + left_bracket
31 + Group(ZeroOrMore(Group(assignment) | block))
32 + right_bracket)
33
34 script = OneOrMore(Group(assignment) | block).ignore(pythonStyleComment)
35
36 def __init__(self, source):
37 self.source = source
38
39 def parse(self):
40 """Returns the parsed tree."""
41 return self.script.parseString(self.source)
42
43 def as_list(self):
44 """Returns the parsed tree as a list."""
45 return self.parse().asList()
46
47
48 class RawNginxDumper(object):
49 # pylint: disable=too-few-public-methods
50 """A class that dumps nginx configuration from the provided tree."""
51 def __init__(self, blocks, indentation=4):
52 self.blocks = blocks
53 self.indentation = indentation
54
55 def __iter__(self, blocks=None, current_indent=0, spacer=' '):
56 """Iterates the dumped nginx content."""
57 blocks = blocks or self.blocks
58 for key, values in blocks:
59 if current_indent:
60 yield spacer
61 indentation = spacer * current_indent
62 if isinstance(key, list):
63 yield indentation + spacer.join(key) + ' {'
64 for parameter in values:
65 if isinstance(parameter[0], list):
66 dumped = self.__iter__(
67 [parameter],
68 current_indent + self.indentation)
69 for line in dumped:
70 yield line
71 else:
72 dumped = spacer.join(parameter) + ';'
73 yield spacer * (
74 current_indent + self.indentation) + dumped
75
76 yield indentation + '}'
77 else:
78 yield spacer * current_indent + key + spacer + values + ';'
79
80 def as_string(self):
81 """Return the parsed block as a string."""
82 return '\n'.join(self)
83
84
85 # Shortcut functions to respect Python's serialization interface
86 # (like pyyaml, picker or json)
87
88 def loads(source):
89 """Parses from a string.
90
91 :param str souce: The string to parse
92 :returns: The parsed tree
93 :rtype: list
94
95 """
96 return RawNginxParser(source).as_list()
97
98
99 def load(_file):
100 """Parses from a file.
101
102 :param file _file: The file to parse
103 :returns: The parsed tree
104 :rtype: list
105
106 """
107 return loads(_file.read())
108
109
110 def dumps(blocks, indentation=4):
111 """Dump to a string.
112
113 :param list block: The parsed tree
114 :param int indentation: The number of spaces to indent
115 :rtype: str
116
117 """
118 return RawNginxDumper(blocks, indentation).as_string()
119
120
121 def dump(blocks, _file, indentation=4):
122 """Dump to a file.
123
124 :param list block: The parsed tree
125 :param file _file: The file to dump to
126 :param int indentation: The number of spaces to indent
127 :rtype: NoneType
128
129 """
130 return _file.write(dumps(blocks, indentation))
131
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/letsencrypt_nginx/nginxparser.py b/letsencrypt_nginx/nginxparser.py
--- a/letsencrypt_nginx/nginxparser.py
+++ b/letsencrypt_nginx/nginxparser.py
@@ -3,7 +3,7 @@
from pyparsing import (
Literal, White, Word, alphanums, CharsNotIn, Forward, Group,
- Optional, OneOrMore, ZeroOrMore, pythonStyleComment)
+ Optional, OneOrMore, Regex, ZeroOrMore, pythonStyleComment)
class RawNginxParser(object):
@@ -16,17 +16,21 @@
semicolon = Literal(";").suppress()
space = White().suppress()
key = Word(alphanums + "_/")
- value = CharsNotIn("{};,")
+ # Matches anything that is not a special character AND any chars in single
+ # or double quotes
+ value = Regex(r"((\".*\")?(\'.*\')?[^\{\};,]?)+")
location = CharsNotIn("{};," + string.whitespace)
# modifier for location uri [ = | ~ | ~* | ^~ ]
modifier = Literal("=") | Literal("~*") | Literal("~") | Literal("^~")
# rules
assignment = (key + Optional(space + value) + semicolon)
+ location_statement = Optional(space + modifier) + Optional(space + location)
+ if_statement = Literal("if") + space + Regex(r"\(.+\)") + space
block = Forward()
block << Group(
- Group(key + Optional(space + modifier) + Optional(space + location))
+ (Group(key + location_statement) ^ Group(if_statement))
+ left_bracket
+ Group(ZeroOrMore(Group(assignment) | block))
+ right_bracket)
|
{"golden_diff": "diff --git a/letsencrypt_nginx/nginxparser.py b/letsencrypt_nginx/nginxparser.py\n--- a/letsencrypt_nginx/nginxparser.py\n+++ b/letsencrypt_nginx/nginxparser.py\n@@ -3,7 +3,7 @@\n \n from pyparsing import (\n Literal, White, Word, alphanums, CharsNotIn, Forward, Group,\n- Optional, OneOrMore, ZeroOrMore, pythonStyleComment)\n+ Optional, OneOrMore, Regex, ZeroOrMore, pythonStyleComment)\n \n \n class RawNginxParser(object):\n@@ -16,17 +16,21 @@\n semicolon = Literal(\";\").suppress()\n space = White().suppress()\n key = Word(alphanums + \"_/\")\n- value = CharsNotIn(\"{};,\")\n+ # Matches anything that is not a special character AND any chars in single\n+ # or double quotes\n+ value = Regex(r\"((\\\".*\\\")?(\\'.*\\')?[^\\{\\};,]?)+\")\n location = CharsNotIn(\"{};,\" + string.whitespace)\n # modifier for location uri [ = | ~ | ~* | ^~ ]\n modifier = Literal(\"=\") | Literal(\"~*\") | Literal(\"~\") | Literal(\"^~\")\n \n # rules\n assignment = (key + Optional(space + value) + semicolon)\n+ location_statement = Optional(space + modifier) + Optional(space + location)\n+ if_statement = Literal(\"if\") + space + Regex(r\"\\(.+\\)\") + space\n block = Forward()\n \n block << Group(\n- Group(key + Optional(space + modifier) + Optional(space + location))\n+ (Group(key + location_statement) ^ Group(if_statement))\n + left_bracket\n + Group(ZeroOrMore(Group(assignment) | block))\n + right_bracket)\n", "issue": "nginxparser does not recognize 'if' statements\nE.g., this is unparseable by nginxparser:\n\n```\nif ($http_origin ~* ^https://www\\.example\\.com) {\n add_header Access-Control-Allow-Origin \"$http_origin\";\n}\n```\n\n", "before_files": [{"content": "\"\"\"Very low-level nginx config parser based on pyparsing.\"\"\"\nimport string\n\nfrom pyparsing import (\n Literal, White, Word, alphanums, CharsNotIn, Forward, Group,\n Optional, OneOrMore, ZeroOrMore, pythonStyleComment)\n\n\nclass RawNginxParser(object):\n # pylint: disable=expression-not-assigned\n \"\"\"A class that parses nginx configuration with pyparsing.\"\"\"\n\n # constants\n left_bracket = Literal(\"{\").suppress()\n right_bracket = Literal(\"}\").suppress()\n semicolon = Literal(\";\").suppress()\n space = White().suppress()\n key = Word(alphanums + \"_/\")\n value = CharsNotIn(\"{};,\")\n location = CharsNotIn(\"{};,\" + string.whitespace)\n # modifier for location uri [ = | ~ | ~* | ^~ ]\n modifier = Literal(\"=\") | Literal(\"~*\") | Literal(\"~\") | Literal(\"^~\")\n\n # rules\n assignment = (key + Optional(space + value) + semicolon)\n block = Forward()\n\n block << Group(\n Group(key + Optional(space + modifier) + Optional(space + location))\n + left_bracket\n + Group(ZeroOrMore(Group(assignment) | block))\n + right_bracket)\n\n script = OneOrMore(Group(assignment) | block).ignore(pythonStyleComment)\n\n def __init__(self, source):\n self.source = source\n\n def parse(self):\n \"\"\"Returns the parsed tree.\"\"\"\n return self.script.parseString(self.source)\n\n def as_list(self):\n \"\"\"Returns the parsed tree as a list.\"\"\"\n return self.parse().asList()\n\n\nclass RawNginxDumper(object):\n # pylint: disable=too-few-public-methods\n \"\"\"A class that dumps nginx configuration from the provided tree.\"\"\"\n def __init__(self, blocks, indentation=4):\n self.blocks = blocks\n self.indentation = indentation\n\n def __iter__(self, blocks=None, current_indent=0, spacer=' '):\n \"\"\"Iterates the dumped nginx content.\"\"\"\n blocks = blocks or self.blocks\n for key, values in blocks:\n if current_indent:\n yield spacer\n indentation = spacer * current_indent\n if isinstance(key, list):\n yield indentation + spacer.join(key) + ' {'\n for parameter in values:\n if isinstance(parameter[0], list):\n dumped = self.__iter__(\n [parameter],\n current_indent + self.indentation)\n for line in dumped:\n yield line\n else:\n dumped = spacer.join(parameter) + ';'\n yield spacer * (\n current_indent + self.indentation) + dumped\n\n yield indentation + '}'\n else:\n yield spacer * current_indent + key + spacer + values + ';'\n\n def as_string(self):\n \"\"\"Return the parsed block as a string.\"\"\"\n return '\\n'.join(self)\n\n\n# Shortcut functions to respect Python's serialization interface\n# (like pyyaml, picker or json)\n\ndef loads(source):\n \"\"\"Parses from a string.\n\n :param str souce: The string to parse\n :returns: The parsed tree\n :rtype: list\n\n \"\"\"\n return RawNginxParser(source).as_list()\n\n\ndef load(_file):\n \"\"\"Parses from a file.\n\n :param file _file: The file to parse\n :returns: The parsed tree\n :rtype: list\n\n \"\"\"\n return loads(_file.read())\n\n\ndef dumps(blocks, indentation=4):\n \"\"\"Dump to a string.\n\n :param list block: The parsed tree\n :param int indentation: The number of spaces to indent\n :rtype: str\n\n \"\"\"\n return RawNginxDumper(blocks, indentation).as_string()\n\n\ndef dump(blocks, _file, indentation=4):\n \"\"\"Dump to a file.\n\n :param list block: The parsed tree\n :param file _file: The file to dump to\n :param int indentation: The number of spaces to indent\n :rtype: NoneType\n\n \"\"\"\n return _file.write(dumps(blocks, indentation))\n", "path": "letsencrypt_nginx/nginxparser.py"}], "after_files": [{"content": "\"\"\"Very low-level nginx config parser based on pyparsing.\"\"\"\nimport string\n\nfrom pyparsing import (\n Literal, White, Word, alphanums, CharsNotIn, Forward, Group,\n Optional, OneOrMore, Regex, ZeroOrMore, pythonStyleComment)\n\n\nclass RawNginxParser(object):\n # pylint: disable=expression-not-assigned\n \"\"\"A class that parses nginx configuration with pyparsing.\"\"\"\n\n # constants\n left_bracket = Literal(\"{\").suppress()\n right_bracket = Literal(\"}\").suppress()\n semicolon = Literal(\";\").suppress()\n space = White().suppress()\n key = Word(alphanums + \"_/\")\n # Matches anything that is not a special character AND any chars in single\n # or double quotes\n value = Regex(r\"((\\\".*\\\")?(\\'.*\\')?[^\\{\\};,]?)+\")\n location = CharsNotIn(\"{};,\" + string.whitespace)\n # modifier for location uri [ = | ~ | ~* | ^~ ]\n modifier = Literal(\"=\") | Literal(\"~*\") | Literal(\"~\") | Literal(\"^~\")\n\n # rules\n assignment = (key + Optional(space + value) + semicolon)\n location_statement = Optional(space + modifier) + Optional(space + location)\n if_statement = Literal(\"if\") + space + Regex(r\"\\(.+\\)\") + space\n block = Forward()\n\n block << Group(\n (Group(key + location_statement) ^ Group(if_statement))\n + left_bracket\n + Group(ZeroOrMore(Group(assignment) | block))\n + right_bracket)\n\n script = OneOrMore(Group(assignment) | block).ignore(pythonStyleComment)\n\n def __init__(self, source):\n self.source = source\n\n def parse(self):\n \"\"\"Returns the parsed tree.\"\"\"\n return self.script.parseString(self.source)\n\n def as_list(self):\n \"\"\"Returns the parsed tree as a list.\"\"\"\n return self.parse().asList()\n\n\nclass RawNginxDumper(object):\n # pylint: disable=too-few-public-methods\n \"\"\"A class that dumps nginx configuration from the provided tree.\"\"\"\n def __init__(self, blocks, indentation=4):\n self.blocks = blocks\n self.indentation = indentation\n\n def __iter__(self, blocks=None, current_indent=0, spacer=' '):\n \"\"\"Iterates the dumped nginx content.\"\"\"\n blocks = blocks or self.blocks\n for key, values in blocks:\n if current_indent:\n yield spacer\n indentation = spacer * current_indent\n if isinstance(key, list):\n yield indentation + spacer.join(key) + ' {'\n for parameter in values:\n if isinstance(parameter[0], list):\n dumped = self.__iter__(\n [parameter],\n current_indent + self.indentation)\n for line in dumped:\n yield line\n else:\n dumped = spacer.join(parameter) + ';'\n yield spacer * (\n current_indent + self.indentation) + dumped\n\n yield indentation + '}'\n else:\n yield spacer * current_indent + key + spacer + values + ';'\n\n def as_string(self):\n \"\"\"Return the parsed block as a string.\"\"\"\n return '\\n'.join(self)\n\n\n# Shortcut functions to respect Python's serialization interface\n# (like pyyaml, picker or json)\n\ndef loads(source):\n \"\"\"Parses from a string.\n\n :param str souce: The string to parse\n :returns: The parsed tree\n :rtype: list\n\n \"\"\"\n return RawNginxParser(source).as_list()\n\n\ndef load(_file):\n \"\"\"Parses from a file.\n\n :param file _file: The file to parse\n :returns: The parsed tree\n :rtype: list\n\n \"\"\"\n return loads(_file.read())\n\n\ndef dumps(blocks, indentation=4):\n \"\"\"Dump to a string.\n\n :param list block: The parsed tree\n :param int indentation: The number of spaces to indent\n :rtype: str\n\n \"\"\"\n return RawNginxDumper(blocks, indentation).as_string()\n\n\ndef dump(blocks, _file, indentation=4):\n \"\"\"Dump to a file.\n\n :param list block: The parsed tree\n :param file _file: The file to dump to\n :param int indentation: The number of spaces to indent\n :rtype: NoneType\n\n \"\"\"\n return _file.write(dumps(blocks, indentation))\n", "path": "letsencrypt_nginx/nginxparser.py"}]}
| 1,501 | 399 |
gh_patches_debug_238
|
rasdani/github-patches
|
git_diff
|
mitmproxy__mitmproxy-6117
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Warn new users about the lazy creation of connections (when requests are expected to be served in the script fully and only)
#### Problem Description
The [example script](https://docs.mitmproxy.org/stable/addons-examples/#http-reply-from-proxy) for not sending any data to the server does not prevent mitmproxy from **establishing a connection** to the server.
For which reason is said connection established when no data has to be sent to this host right away and possibly never in the future?
I trusted mitmproxy to **not send _any_ data, as stated**, but I had to discover (the hard way) that **that's not the case**.
I used mitmproxy in an environment where it required to stay silent, but it wasn't compliant.
Could you please consider warning new users about this behavior?
<strike>Is there an easy way to prevent establishing connections?
Is it planned to do so on default in this case?</strike>
*EDIT*: Trying to prevent connections by rerouting the connection to a closed port killed the flow for the client. Routing to a different host with invalid certificate worked though, warning me in the event log and suggesting setting connection strategy to lazy and it worked.
#### Steps to reproduce the behavior:
1. Load the example script
2. Have the client request examle.com
3. View the event log
#### System Information
Mitmproxy: 9.0.1
Python: 3.10.6
OpenSSL: OpenSSL 3.0.7 1 Nov 2022
Platform: Linux-5.15.0-71-generic-x86_64-with-glibc2.35
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/addons/http-reply-from-proxy.py`
Content:
```
1 """Send a reply from the proxy without sending any data to the remote server."""
2 from mitmproxy import http
3
4
5 def request(flow: http.HTTPFlow) -> None:
6 if flow.request.pretty_url == "http://example.com/path":
7 flow.response = http.Response.make(
8 200, # (optional) status code
9 b"Hello World", # (optional) content
10 {"Content-Type": "text/html"}, # (optional) headers
11 )
12
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/examples/addons/http-reply-from-proxy.py b/examples/addons/http-reply-from-proxy.py
--- a/examples/addons/http-reply-from-proxy.py
+++ b/examples/addons/http-reply-from-proxy.py
@@ -1,4 +1,4 @@
-"""Send a reply from the proxy without sending any data to the remote server."""
+"""Send a reply from the proxy without sending the request to the remote server."""
from mitmproxy import http
|
{"golden_diff": "diff --git a/examples/addons/http-reply-from-proxy.py b/examples/addons/http-reply-from-proxy.py\n--- a/examples/addons/http-reply-from-proxy.py\n+++ b/examples/addons/http-reply-from-proxy.py\n@@ -1,4 +1,4 @@\n-\"\"\"Send a reply from the proxy without sending any data to the remote server.\"\"\"\n+\"\"\"Send a reply from the proxy without sending the request to the remote server.\"\"\"\n from mitmproxy import http\n", "issue": "Warn new users about the lazy creation of connections (when requests are expected to be served in the script fully and only)\n#### Problem Description\r\nThe [example script](https://docs.mitmproxy.org/stable/addons-examples/#http-reply-from-proxy) for not sending any data to the server does not prevent mitmproxy from **establishing a connection** to the server.\r\nFor which reason is said connection established when no data has to be sent to this host right away and possibly never in the future?\r\nI trusted mitmproxy to **not send _any_ data, as stated**, but I had to discover (the hard way) that **that's not the case**.\r\nI used mitmproxy in an environment where it required to stay silent, but it wasn't compliant.\r\n\r\nCould you please consider warning new users about this behavior?\r\n<strike>Is there an easy way to prevent establishing connections?\r\nIs it planned to do so on default in this case?</strike>\r\n*EDIT*: Trying to prevent connections by rerouting the connection to a closed port killed the flow for the client. Routing to a different host with invalid certificate worked though, warning me in the event log and suggesting setting connection strategy to lazy and it worked.\r\n\r\n#### Steps to reproduce the behavior:\r\n1. Load the example script\r\n2. Have the client request examle.com\r\n3. View the event log\r\n\r\n#### System Information\r\nMitmproxy: 9.0.1\r\nPython: 3.10.6\r\nOpenSSL: OpenSSL 3.0.7 1 Nov 2022\r\nPlatform: Linux-5.15.0-71-generic-x86_64-with-glibc2.35\r\n\r\n\n", "before_files": [{"content": "\"\"\"Send a reply from the proxy without sending any data to the remote server.\"\"\"\nfrom mitmproxy import http\n\n\ndef request(flow: http.HTTPFlow) -> None:\n if flow.request.pretty_url == \"http://example.com/path\":\n flow.response = http.Response.make(\n 200, # (optional) status code\n b\"Hello World\", # (optional) content\n {\"Content-Type\": \"text/html\"}, # (optional) headers\n )\n", "path": "examples/addons/http-reply-from-proxy.py"}], "after_files": [{"content": "\"\"\"Send a reply from the proxy without sending the request to the remote server.\"\"\"\nfrom mitmproxy import http\n\n\ndef request(flow: http.HTTPFlow) -> None:\n if flow.request.pretty_url == \"http://example.com/path\":\n flow.response = http.Response.make(\n 200, # (optional) status code\n b\"Hello World\", # (optional) content\n {\"Content-Type\": \"text/html\"}, # (optional) headers\n )\n", "path": "examples/addons/http-reply-from-proxy.py"}]}
| 738 | 95 |
gh_patches_debug_65366
|
rasdani/github-patches
|
git_diff
|
PaddlePaddle__models-399
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
使用 generate_sequence_by_rnn_lm 进行train的时候报错
在 generate_sequence_by_rnn_lm 这个模型下运行 train.py 的时候,当测试文件的路径不存在的时候会报错。错误的原因是把conf写成了config。错误行数是train.py 的112行
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `generate_sequence_by_rnn_lm/train.py`
Content:
```
1 import os
2 import sys
3 import gzip
4
5 import paddle.v2 as paddle
6 import config as conf
7 import reader
8 from network_conf import rnn_lm
9 from utils import logger, build_dict, load_dict
10
11
12 def train(topology,
13 train_reader,
14 test_reader,
15 model_save_dir="models",
16 num_passes=10):
17 """
18 train model.
19
20 :param topology: cost layer of the model to train.
21 :type topology: LayerOuput
22 :param train_reader: train data reader.
23 :type trainer_reader: collections.Iterable
24 :param test_reader: test data reader.
25 :type test_reader: collections.Iterable
26 :param model_save_dir: path to save the trained model
27 :type model_save_dir: str
28 :param num_passes: number of epoch
29 :type num_passes: int
30 """
31 if not os.path.exists(model_save_dir):
32 os.mkdir(model_save_dir)
33
34 # initialize PaddlePaddle
35 paddle.init(use_gpu=conf.use_gpu, trainer_count=conf.trainer_count)
36
37 # create optimizer
38 adam_optimizer = paddle.optimizer.Adam(
39 learning_rate=1e-3,
40 regularization=paddle.optimizer.L2Regularization(rate=1e-3),
41 model_average=paddle.optimizer.ModelAverage(
42 average_window=0.5, max_average_window=10000))
43
44 # create parameters
45 parameters = paddle.parameters.create(topology)
46 # create trainer
47 trainer = paddle.trainer.SGD(
48 cost=topology, parameters=parameters, update_equation=adam_optimizer)
49
50 # define the event_handler callback
51 def event_handler(event):
52 if isinstance(event, paddle.event.EndIteration):
53 if not event.batch_id % conf.log_period:
54 logger.info("Pass %d, Batch %d, Cost %f, %s" % (
55 event.pass_id, event.batch_id, event.cost, event.metrics))
56
57 if (not event.batch_id %
58 conf.save_period_by_batches) and event.batch_id:
59 save_name = os.path.join(model_save_dir,
60 "rnn_lm_pass_%05d_batch_%03d.tar.gz" %
61 (event.pass_id, event.batch_id))
62 with gzip.open(save_name, "w") as f:
63 trainer.save_parameter_to_tar(f)
64
65 if isinstance(event, paddle.event.EndPass):
66 if test_reader is not None:
67 result = trainer.test(reader=test_reader)
68 logger.info("Test with Pass %d, %s" %
69 (event.pass_id, result.metrics))
70 save_name = os.path.join(model_save_dir, "rnn_lm_pass_%05d.tar.gz" %
71 (event.pass_id))
72 with gzip.open(save_name, "w") as f:
73 trainer.save_parameter_to_tar(f)
74
75 logger.info("start training...")
76 trainer.train(
77 reader=train_reader, event_handler=event_handler, num_passes=num_passes)
78
79 logger.info("Training is finished.")
80
81
82 def main():
83 # prepare vocab
84 if not (os.path.exists(conf.vocab_file) and
85 os.path.getsize(conf.vocab_file)):
86 logger.info(("word dictionary does not exist, "
87 "build it from the training data"))
88 build_dict(conf.train_file, conf.vocab_file, conf.max_word_num,
89 conf.cutoff_word_fre)
90 logger.info("load word dictionary.")
91 word_dict = load_dict(conf.vocab_file)
92 logger.info("dictionay size = %d" % (len(word_dict)))
93
94 cost = rnn_lm(
95 len(word_dict), conf.emb_dim, conf.hidden_size, conf.stacked_rnn_num,
96 conf.rnn_type)
97
98 # define reader
99 reader_args = {
100 "file_name": conf.train_file,
101 "word_dict": word_dict,
102 }
103 train_reader = paddle.batch(
104 paddle.reader.shuffle(
105 reader.rnn_reader(**reader_args), buf_size=102400),
106 batch_size=conf.batch_size)
107 test_reader = None
108 if os.path.exists(conf.test_file) and os.path.getsize(conf.test_file):
109 test_reader = paddle.batch(
110 paddle.reader.shuffle(
111 reader.rnn_reader(**reader_args), buf_size=65536),
112 batch_size=config.batch_size)
113
114 train(
115 topology=cost,
116 train_reader=train_reader,
117 test_reader=test_reader,
118 model_save_dir=conf.model_save_dir,
119 num_passes=conf.num_passes)
120
121
122 if __name__ == "__main__":
123 main()
124
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/generate_sequence_by_rnn_lm/train.py b/generate_sequence_by_rnn_lm/train.py
--- a/generate_sequence_by_rnn_lm/train.py
+++ b/generate_sequence_by_rnn_lm/train.py
@@ -109,7 +109,7 @@
test_reader = paddle.batch(
paddle.reader.shuffle(
reader.rnn_reader(**reader_args), buf_size=65536),
- batch_size=config.batch_size)
+ batch_size=conf.batch_size)
train(
topology=cost,
|
{"golden_diff": "diff --git a/generate_sequence_by_rnn_lm/train.py b/generate_sequence_by_rnn_lm/train.py\n--- a/generate_sequence_by_rnn_lm/train.py\n+++ b/generate_sequence_by_rnn_lm/train.py\n@@ -109,7 +109,7 @@\n test_reader = paddle.batch(\n paddle.reader.shuffle(\n reader.rnn_reader(**reader_args), buf_size=65536),\n- batch_size=config.batch_size)\n+ batch_size=conf.batch_size)\n \n train(\n topology=cost,\n", "issue": "\u4f7f\u7528 generate_sequence_by_rnn_lm \u8fdb\u884ctrain\u7684\u65f6\u5019\u62a5\u9519\n\u5728 generate_sequence_by_rnn_lm \u8fd9\u4e2a\u6a21\u578b\u4e0b\u8fd0\u884c train.py \u7684\u65f6\u5019\uff0c\u5f53\u6d4b\u8bd5\u6587\u4ef6\u7684\u8def\u5f84\u4e0d\u5b58\u5728\u7684\u65f6\u5019\u4f1a\u62a5\u9519\u3002\u9519\u8bef\u7684\u539f\u56e0\u662f\u628aconf\u5199\u6210\u4e86config\u3002\u9519\u8bef\u884c\u6570\u662ftrain.py \u7684112\u884c\n", "before_files": [{"content": "import os\nimport sys\nimport gzip\n\nimport paddle.v2 as paddle\nimport config as conf\nimport reader\nfrom network_conf import rnn_lm\nfrom utils import logger, build_dict, load_dict\n\n\ndef train(topology,\n train_reader,\n test_reader,\n model_save_dir=\"models\",\n num_passes=10):\n \"\"\"\n train model.\n\n :param topology: cost layer of the model to train.\n :type topology: LayerOuput\n :param train_reader: train data reader.\n :type trainer_reader: collections.Iterable\n :param test_reader: test data reader.\n :type test_reader: collections.Iterable\n :param model_save_dir: path to save the trained model\n :type model_save_dir: str\n :param num_passes: number of epoch\n :type num_passes: int\n \"\"\"\n if not os.path.exists(model_save_dir):\n os.mkdir(model_save_dir)\n\n # initialize PaddlePaddle\n paddle.init(use_gpu=conf.use_gpu, trainer_count=conf.trainer_count)\n\n # create optimizer\n adam_optimizer = paddle.optimizer.Adam(\n learning_rate=1e-3,\n regularization=paddle.optimizer.L2Regularization(rate=1e-3),\n model_average=paddle.optimizer.ModelAverage(\n average_window=0.5, max_average_window=10000))\n\n # create parameters\n parameters = paddle.parameters.create(topology)\n # create trainer\n trainer = paddle.trainer.SGD(\n cost=topology, parameters=parameters, update_equation=adam_optimizer)\n\n # define the event_handler callback\n def event_handler(event):\n if isinstance(event, paddle.event.EndIteration):\n if not event.batch_id % conf.log_period:\n logger.info(\"Pass %d, Batch %d, Cost %f, %s\" % (\n event.pass_id, event.batch_id, event.cost, event.metrics))\n\n if (not event.batch_id %\n conf.save_period_by_batches) and event.batch_id:\n save_name = os.path.join(model_save_dir,\n \"rnn_lm_pass_%05d_batch_%03d.tar.gz\" %\n (event.pass_id, event.batch_id))\n with gzip.open(save_name, \"w\") as f:\n trainer.save_parameter_to_tar(f)\n\n if isinstance(event, paddle.event.EndPass):\n if test_reader is not None:\n result = trainer.test(reader=test_reader)\n logger.info(\"Test with Pass %d, %s\" %\n (event.pass_id, result.metrics))\n save_name = os.path.join(model_save_dir, \"rnn_lm_pass_%05d.tar.gz\" %\n (event.pass_id))\n with gzip.open(save_name, \"w\") as f:\n trainer.save_parameter_to_tar(f)\n\n logger.info(\"start training...\")\n trainer.train(\n reader=train_reader, event_handler=event_handler, num_passes=num_passes)\n\n logger.info(\"Training is finished.\")\n\n\ndef main():\n # prepare vocab\n if not (os.path.exists(conf.vocab_file) and\n os.path.getsize(conf.vocab_file)):\n logger.info((\"word dictionary does not exist, \"\n \"build it from the training data\"))\n build_dict(conf.train_file, conf.vocab_file, conf.max_word_num,\n conf.cutoff_word_fre)\n logger.info(\"load word dictionary.\")\n word_dict = load_dict(conf.vocab_file)\n logger.info(\"dictionay size = %d\" % (len(word_dict)))\n\n cost = rnn_lm(\n len(word_dict), conf.emb_dim, conf.hidden_size, conf.stacked_rnn_num,\n conf.rnn_type)\n\n # define reader\n reader_args = {\n \"file_name\": conf.train_file,\n \"word_dict\": word_dict,\n }\n train_reader = paddle.batch(\n paddle.reader.shuffle(\n reader.rnn_reader(**reader_args), buf_size=102400),\n batch_size=conf.batch_size)\n test_reader = None\n if os.path.exists(conf.test_file) and os.path.getsize(conf.test_file):\n test_reader = paddle.batch(\n paddle.reader.shuffle(\n reader.rnn_reader(**reader_args), buf_size=65536),\n batch_size=config.batch_size)\n\n train(\n topology=cost,\n train_reader=train_reader,\n test_reader=test_reader,\n model_save_dir=conf.model_save_dir,\n num_passes=conf.num_passes)\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "generate_sequence_by_rnn_lm/train.py"}], "after_files": [{"content": "import os\nimport sys\nimport gzip\n\nimport paddle.v2 as paddle\nimport config as conf\nimport reader\nfrom network_conf import rnn_lm\nfrom utils import logger, build_dict, load_dict\n\n\ndef train(topology,\n train_reader,\n test_reader,\n model_save_dir=\"models\",\n num_passes=10):\n \"\"\"\n train model.\n\n :param topology: cost layer of the model to train.\n :type topology: LayerOuput\n :param train_reader: train data reader.\n :type trainer_reader: collections.Iterable\n :param test_reader: test data reader.\n :type test_reader: collections.Iterable\n :param model_save_dir: path to save the trained model\n :type model_save_dir: str\n :param num_passes: number of epoch\n :type num_passes: int\n \"\"\"\n if not os.path.exists(model_save_dir):\n os.mkdir(model_save_dir)\n\n # initialize PaddlePaddle\n paddle.init(use_gpu=conf.use_gpu, trainer_count=conf.trainer_count)\n\n # create optimizer\n adam_optimizer = paddle.optimizer.Adam(\n learning_rate=1e-3,\n regularization=paddle.optimizer.L2Regularization(rate=1e-3),\n model_average=paddle.optimizer.ModelAverage(\n average_window=0.5, max_average_window=10000))\n\n # create parameters\n parameters = paddle.parameters.create(topology)\n # create trainer\n trainer = paddle.trainer.SGD(\n cost=topology, parameters=parameters, update_equation=adam_optimizer)\n\n # define the event_handler callback\n def event_handler(event):\n if isinstance(event, paddle.event.EndIteration):\n if not event.batch_id % conf.log_period:\n logger.info(\"Pass %d, Batch %d, Cost %f, %s\" % (\n event.pass_id, event.batch_id, event.cost, event.metrics))\n\n if (not event.batch_id %\n conf.save_period_by_batches) and event.batch_id:\n save_name = os.path.join(model_save_dir,\n \"rnn_lm_pass_%05d_batch_%03d.tar.gz\" %\n (event.pass_id, event.batch_id))\n with gzip.open(save_name, \"w\") as f:\n trainer.save_parameter_to_tar(f)\n\n if isinstance(event, paddle.event.EndPass):\n if test_reader is not None:\n result = trainer.test(reader=test_reader)\n logger.info(\"Test with Pass %d, %s\" %\n (event.pass_id, result.metrics))\n save_name = os.path.join(model_save_dir, \"rnn_lm_pass_%05d.tar.gz\" %\n (event.pass_id))\n with gzip.open(save_name, \"w\") as f:\n trainer.save_parameter_to_tar(f)\n\n logger.info(\"start training...\")\n trainer.train(\n reader=train_reader, event_handler=event_handler, num_passes=num_passes)\n\n logger.info(\"Training is finished.\")\n\n\ndef main():\n # prepare vocab\n if not (os.path.exists(conf.vocab_file) and\n os.path.getsize(conf.vocab_file)):\n logger.info((\"word dictionary does not exist, \"\n \"build it from the training data\"))\n build_dict(conf.train_file, conf.vocab_file, conf.max_word_num,\n conf.cutoff_word_fre)\n logger.info(\"load word dictionary.\")\n word_dict = load_dict(conf.vocab_file)\n logger.info(\"dictionay size = %d\" % (len(word_dict)))\n\n cost = rnn_lm(\n len(word_dict), conf.emb_dim, conf.hidden_size, conf.stacked_rnn_num,\n conf.rnn_type)\n\n # define reader\n reader_args = {\n \"file_name\": conf.train_file,\n \"word_dict\": word_dict,\n }\n train_reader = paddle.batch(\n paddle.reader.shuffle(\n reader.rnn_reader(**reader_args), buf_size=102400),\n batch_size=conf.batch_size)\n test_reader = None\n if os.path.exists(conf.test_file) and os.path.getsize(conf.test_file):\n test_reader = paddle.batch(\n paddle.reader.shuffle(\n reader.rnn_reader(**reader_args), buf_size=65536),\n batch_size=conf.batch_size)\n\n train(\n topology=cost,\n train_reader=train_reader,\n test_reader=test_reader,\n model_save_dir=conf.model_save_dir,\n num_passes=conf.num_passes)\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "generate_sequence_by_rnn_lm/train.py"}]}
| 1,550 | 113 |
gh_patches_debug_4986
|
rasdani/github-patches
|
git_diff
|
apluslms__a-plus-802
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Edit course menu has outdated link to Glyphicons
A+ edit course - menu - add menu item. The menu item has an icon. The hint in the icon field has outdated link to the Bootstrap documentation. You can not find the right icons in that link anymore. The link must be fixed. That is very easy to do.
https://github.com/apluslms/a-plus/blob/57031587030092ac4c482fde53ff9d9577da0f0f/external_services/models.py#L212
We still use Bootstrap 3. The right link is this:
https://getbootstrap.com/docs/3.4/components/#glyphicons
The old link lands in the latest Bootstrap version.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `external_services/models.py`
Content:
```
1 from django.core.exceptions import ValidationError
2 from django.urls import reverse
3 from django.db import models
4 from django.utils.functional import cached_property
5 from django.utils.translation import gettext_lazy as _
6 from urllib.parse import urljoin, urlsplit
7
8 from course.models import CourseInstance
9 from inheritance.models import ModelWithInheritance
10 from lib.helpers import Enum
11 from lib.models import UrlMixin
12
13
14 def validate_no_domain(value):
15 if value and '://' in value:
16 raise ValidationError(_("Url can not contain scheme or domain part."))
17
18
19 class LinkService(ModelWithInheritance):
20 '''
21 A link to an external service.
22 '''
23 DESTINATION_REGION = Enum([
24 ('INTERNAL', 0, _('Destination is hosted internally. Link to internal privacy notice.')),
25 ('ORGANIZATION', 1, _('Destination is hosted in the same organization. Link to a privacy notice.')),
26 ('EEA', 3, _('Destination is hosted in the European Economic Area. Link to a privacy notice.')),
27 ('PRIVACYSHIELD', 5, _('Destination is hosted outside of the European Economic Area and used to be certified under the now invalid EU-US Privacy Shield. Link to an extended privacy notice.')),
28 ('GLOBAL', 6, _('Destination is hosted outside of the European Economic Area. Link to an extended privacy notice.')),
29 ])
30 url = models.CharField(
31 max_length=256,
32 help_text=_("The service URL")
33 )
34 destination_region = models.PositiveSmallIntegerField(
35 choices=DESTINATION_REGION.choices,
36 default=DESTINATION_REGION.GLOBAL,
37 help_text=_("The geographical area of the destination. Will display correct user notice."),
38 )
39 privacy_notice_url = models.CharField(
40 max_length=512,
41 blank=True,
42 help_text=_("A link to the service privacy notice. This is mandatory for services outside organization!"))
43 menu_label = models.CharField(
44 max_length=255,
45 help_text=_("A default label to show in the course menu.")
46 )
47 menu_icon_class = models.CharField(
48 max_length=32,
49 default="globe",
50 help_text=_("A default menu icon style name, see http://getbootstrap.com/components/#glyphicons-glyphs")
51 )
52 enabled = models.BooleanField(
53 default=True,
54 help_text=_("If not enabled, the service is disabled for all course instances.")
55 )
56
57 class Meta:
58 ordering = ["menu_label"]
59
60 def __str__(self):
61 out = "{}: {}".format(self.menu_label, self.url)
62 if not self.enabled:
63 return "[Disabled] " + out
64 return out
65
66 def clean(self):
67 errors = {}
68 if self.destination_region > self.DESTINATION_REGION.ORGANIZATION and not self.privacy_notice_url:
69 errors['privacy_notice_url'] = ValidationError(_('Privacy notice URL is mandatory for services outside organization.'))
70 if errors:
71 raise ValidationError(errors)
72
73 @property
74 def url_parts(self):
75 return urlsplit(self.url)
76
77 @property
78 def method(self):
79 return 'GET'
80
81 @property
82 def sends_user_info(self):
83 return False
84
85 def get_url(self, replace=None, kwargs={}):
86 '''Return the URL to the launch page of this service.'''
87 if self.destination_region > self.DESTINATION_REGION.INTERNAL:
88 return reverse('external-service-link', kwargs=kwargs)
89 return self.get_final_url(replace)
90
91 def get_final_url(self, replace=None):
92 '''Return the launch URL for this service.
93
94 The optional replace parameter may be a relative URL that is joined to
95 the URL path of this service. The relative URL must not include a domain.
96 '''
97 url = self.url
98 if replace:
99 assert '://' not in replace and not replace.startswith('//'), "Replace can't include domain"
100 url = urljoin(url, replace)
101 return url
102
103
104 class LTIService(LinkService):
105 '''
106 A provider of an LTI service.
107 '''
108 LTI_ACCESS = Enum([
109 ('ANON_API_NO', 0, _('Anonymous service, no API access')),
110 ('PUBLIC_API_NO', 5, _('Public service, no API access')),
111 ('PUBLIC_API_YES', 10, _('Public service, allow API access')),
112 ])
113 access_settings = models.IntegerField(
114 choices=LTI_ACCESS.choices,
115 default=LTI_ACCESS.ANON_API_NO,
116 help_text=_("Select whether to pass pseudonymised user data to the LTI service.<br>Public services can also enable sharing the user's API token and course API URL in the LTI launch request. This grants the LTI tool API access with the user's privileges.")
117 )
118 consumer_key = models.CharField(
119 max_length=128,
120 help_text=_("The consumer key provided by the LTI service.")
121 )
122 consumer_secret = models.CharField(
123 max_length=128,
124 help_text=_("The consumer secret provided by the LTI service.")
125 )
126
127 def __str__(self):
128 out = "(LTI) {}: {}".format(self.menu_label, self.url)
129 if not self.enabled:
130 return "[Disabled] " + out
131 return out
132
133 @property
134 def method(self):
135 return 'POST'
136
137 @property
138 def sends_user_info(self):
139 return True
140
141 @property
142 def is_anonymous(self):
143 return self.access_settings == self.LTI_ACCESS.ANON_API_NO
144
145 @property
146 def api_access(self):
147 return self.access_settings == self.LTI_ACCESS.PUBLIC_API_YES
148
149 def get_url(self, replace=None, kwargs={}):
150 return reverse('lti-login', kwargs=kwargs)
151
152
153 class MenuItemManager(models.Manager):
154
155 def get_queryset(self):
156 return super().get_queryset().select_related(
157 'course_instance', 'course_instance__course')
158
159
160 class MenuItem(UrlMixin, models.Model):
161 '''
162 Attaches link to course menu.
163 '''
164 ACCESS = Enum([
165 ('STUDENT', 0, _("All students, assistants and teachers can access.")),
166 ('ASSISTANT', 5, _("Only assistants and teachers can access.")),
167 ('TEACHER', 10, _("Only teachers can access.")),
168 ])
169 course_instance = models.ForeignKey(
170 CourseInstance,
171 on_delete=models.CASCADE,
172 related_name="ext_services",
173 help_text=_("A course where the menu item exists.")
174 )
175 access = models.IntegerField(
176 choices=ACCESS.choices,
177 default=ACCESS.STUDENT,
178 )
179 service = models.ForeignKey(
180 LinkService,
181 on_delete=models.CASCADE,
182 blank=True,
183 null=True,
184 help_text=_("An external service to link to. These are configured by administrators.")
185 )
186 menu_url = models.CharField(
187 max_length=256,
188 blank=True,
189 null=True,
190 validators=[validate_no_domain],
191 help_text=_("""URL that is a) relative to the service URL or b) this course if no service is selected.
192 Case a: url starting with / overwrites path in service url and extends it otherwise.
193 case b: url starting with / is absolute within this service and relative to the course path otherwise.
194 Note that URL entered here can not include scheme or domain.""")
195 )
196 menu_group_label = models.CharField(
197 max_length=255,
198 blank=True,
199 null=True,
200 help_text=_("Places menu item under a group label.")
201 )
202 menu_label = models.CharField(
203 max_length=255,
204 blank=True,
205 null=True,
206 help_text=_("Label for the menu link (else service default).")
207 )
208 menu_icon_class = models.CharField(
209 max_length=32,
210 null=True,
211 blank=True,
212 help_text=_("Menu icon style name (else service default), e.g. star see http://getbootstrap.com/components/#glyphicons-glyphs")
213 )
214 menu_weight = models.IntegerField(
215 default=0,
216 help_text=_("Heavier menu entries are placed after lighter ones.")
217 )
218 enabled = models.BooleanField(default=True)
219
220 class Meta:
221 ordering = ["course_instance", "menu_weight", "menu_label"]
222
223 def __str__(self):
224 out = self.label
225 if not self.is_enabled:
226 return "[Disabled] " + out
227 return out
228
229 def clean(self):
230 errors = {}
231 if not self.service:
232 if not self.menu_url:
233 errors['menu_url'] = ValidationError(_('Relative URL is required when there is no preconfigured service selected.'))
234 if not self.menu_label:
235 errors['menu_label'] = ValidationError(_('Menu label is required when there is no preconfigured service selected.'))
236 if errors:
237 raise ValidationError(errors)
238
239 @cached_property
240 def is_enabled(self):
241 if self.service:
242 return self.service.enabled and self.enabled
243 return self.enabled
244
245 @cached_property
246 def label(self):
247 if self.menu_label:
248 return self.menu_label
249 if self.service:
250 return self.service.menu_label
251 return ""
252
253 @cached_property
254 def icon_class(self):
255 if self.menu_icon_class:
256 return self.menu_icon_class
257 if self.service:
258 return self.service.menu_icon_class
259 return ""
260
261 @cached_property
262 def url(self):
263 if self.service:
264 kwargs = {
265 "course_slug": self.course_instance.course.url,
266 "instance_slug": self.course_instance.url,
267 "menu_id": self.id,
268 }
269 return self.service.as_leaf_class().get_url(replace=self.menu_url, kwargs=kwargs)
270 if '://' in self.menu_url:
271 # Deprecated, but DB can have old urls
272 return self.menu_url
273 return urljoin(self.course_instance.get_absolute_url(), self.menu_url)
274
275 @cached_property
276 def final_url(self):
277 if self.service:
278 return self.service.as_leaf_class().get_final_url(self.menu_url)
279 else:
280 return urljoin(self.course_instance.get_absolute_url(), self.menu_url)
281
282 def get_url_kwargs(self):
283 return dict(menu_id=self.id, **self.course_instance.get_url_kwargs())
284
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/external_services/models.py b/external_services/models.py
--- a/external_services/models.py
+++ b/external_services/models.py
@@ -209,7 +209,7 @@
max_length=32,
null=True,
blank=True,
- help_text=_("Menu icon style name (else service default), e.g. star see http://getbootstrap.com/components/#glyphicons-glyphs")
+ help_text=_("Menu icon style name (else service default), e.g. star see https://getbootstrap.com/docs/3.4/components/#glyphicons")
)
menu_weight = models.IntegerField(
default=0,
|
{"golden_diff": "diff --git a/external_services/models.py b/external_services/models.py\n--- a/external_services/models.py\n+++ b/external_services/models.py\n@@ -209,7 +209,7 @@\n max_length=32,\n null=True,\n blank=True,\n- help_text=_(\"Menu icon style name (else service default), e.g. star see http://getbootstrap.com/components/#glyphicons-glyphs\")\n+ help_text=_(\"Menu icon style name (else service default), e.g. star see https://getbootstrap.com/docs/3.4/components/#glyphicons\")\n )\n menu_weight = models.IntegerField(\n default=0,\n", "issue": "Edit course menu has outdated link to Glyphicons\nA+ edit course - menu - add menu item. The menu item has an icon. The hint in the icon field has outdated link to the Bootstrap documentation. You can not find the right icons in that link anymore. The link must be fixed. That is very easy to do.\r\n\r\nhttps://github.com/apluslms/a-plus/blob/57031587030092ac4c482fde53ff9d9577da0f0f/external_services/models.py#L212\r\n\r\nWe still use Bootstrap 3. The right link is this: \r\nhttps://getbootstrap.com/docs/3.4/components/#glyphicons\r\n\r\nThe old link lands in the latest Bootstrap version.\n", "before_files": [{"content": "from django.core.exceptions import ValidationError\nfrom django.urls import reverse\nfrom django.db import models\nfrom django.utils.functional import cached_property\nfrom django.utils.translation import gettext_lazy as _\nfrom urllib.parse import urljoin, urlsplit\n\nfrom course.models import CourseInstance\nfrom inheritance.models import ModelWithInheritance\nfrom lib.helpers import Enum\nfrom lib.models import UrlMixin\n\n\ndef validate_no_domain(value):\n if value and '://' in value:\n raise ValidationError(_(\"Url can not contain scheme or domain part.\"))\n\n\nclass LinkService(ModelWithInheritance):\n '''\n A link to an external service.\n '''\n DESTINATION_REGION = Enum([\n ('INTERNAL', 0, _('Destination is hosted internally. Link to internal privacy notice.')),\n ('ORGANIZATION', 1, _('Destination is hosted in the same organization. Link to a privacy notice.')),\n ('EEA', 3, _('Destination is hosted in the European Economic Area. Link to a privacy notice.')),\n ('PRIVACYSHIELD', 5, _('Destination is hosted outside of the European Economic Area and used to be certified under the now invalid EU-US Privacy Shield. Link to an extended privacy notice.')),\n ('GLOBAL', 6, _('Destination is hosted outside of the European Economic Area. Link to an extended privacy notice.')),\n ])\n url = models.CharField(\n max_length=256,\n help_text=_(\"The service URL\")\n )\n destination_region = models.PositiveSmallIntegerField(\n choices=DESTINATION_REGION.choices,\n default=DESTINATION_REGION.GLOBAL,\n help_text=_(\"The geographical area of the destination. Will display correct user notice.\"),\n )\n privacy_notice_url = models.CharField(\n max_length=512,\n blank=True,\n help_text=_(\"A link to the service privacy notice. This is mandatory for services outside organization!\"))\n menu_label = models.CharField(\n max_length=255,\n help_text=_(\"A default label to show in the course menu.\")\n )\n menu_icon_class = models.CharField(\n max_length=32,\n default=\"globe\",\n help_text=_(\"A default menu icon style name, see http://getbootstrap.com/components/#glyphicons-glyphs\")\n )\n enabled = models.BooleanField(\n default=True,\n help_text=_(\"If not enabled, the service is disabled for all course instances.\")\n )\n\n class Meta:\n ordering = [\"menu_label\"]\n\n def __str__(self):\n out = \"{}: {}\".format(self.menu_label, self.url)\n if not self.enabled:\n return \"[Disabled] \" + out\n return out\n\n def clean(self):\n errors = {}\n if self.destination_region > self.DESTINATION_REGION.ORGANIZATION and not self.privacy_notice_url:\n errors['privacy_notice_url'] = ValidationError(_('Privacy notice URL is mandatory for services outside organization.'))\n if errors:\n raise ValidationError(errors)\n\n @property\n def url_parts(self):\n return urlsplit(self.url)\n\n @property\n def method(self):\n return 'GET'\n\n @property\n def sends_user_info(self):\n return False\n\n def get_url(self, replace=None, kwargs={}):\n '''Return the URL to the launch page of this service.'''\n if self.destination_region > self.DESTINATION_REGION.INTERNAL:\n return reverse('external-service-link', kwargs=kwargs)\n return self.get_final_url(replace)\n\n def get_final_url(self, replace=None):\n '''Return the launch URL for this service.\n\n The optional replace parameter may be a relative URL that is joined to\n the URL path of this service. The relative URL must not include a domain.\n '''\n url = self.url\n if replace:\n assert '://' not in replace and not replace.startswith('//'), \"Replace can't include domain\"\n url = urljoin(url, replace)\n return url\n\n\nclass LTIService(LinkService):\n '''\n A provider of an LTI service.\n '''\n LTI_ACCESS = Enum([\n ('ANON_API_NO', 0, _('Anonymous service, no API access')),\n ('PUBLIC_API_NO', 5, _('Public service, no API access')),\n ('PUBLIC_API_YES', 10, _('Public service, allow API access')),\n ])\n access_settings = models.IntegerField(\n choices=LTI_ACCESS.choices,\n default=LTI_ACCESS.ANON_API_NO,\n help_text=_(\"Select whether to pass pseudonymised user data to the LTI service.<br>Public services can also enable sharing the user's API token and course API URL in the LTI launch request. This grants the LTI tool API access with the user's privileges.\")\n )\n consumer_key = models.CharField(\n max_length=128,\n help_text=_(\"The consumer key provided by the LTI service.\")\n )\n consumer_secret = models.CharField(\n max_length=128,\n help_text=_(\"The consumer secret provided by the LTI service.\")\n )\n\n def __str__(self):\n out = \"(LTI) {}: {}\".format(self.menu_label, self.url)\n if not self.enabled:\n return \"[Disabled] \" + out\n return out\n\n @property\n def method(self):\n return 'POST'\n\n @property\n def sends_user_info(self):\n return True\n\n @property\n def is_anonymous(self):\n return self.access_settings == self.LTI_ACCESS.ANON_API_NO\n\n @property\n def api_access(self):\n return self.access_settings == self.LTI_ACCESS.PUBLIC_API_YES\n\n def get_url(self, replace=None, kwargs={}):\n return reverse('lti-login', kwargs=kwargs)\n\n\nclass MenuItemManager(models.Manager):\n\n def get_queryset(self):\n return super().get_queryset().select_related(\n 'course_instance', 'course_instance__course')\n\n\nclass MenuItem(UrlMixin, models.Model):\n '''\n Attaches link to course menu.\n '''\n ACCESS = Enum([\n ('STUDENT', 0, _(\"All students, assistants and teachers can access.\")),\n ('ASSISTANT', 5, _(\"Only assistants and teachers can access.\")),\n ('TEACHER', 10, _(\"Only teachers can access.\")),\n ])\n course_instance = models.ForeignKey(\n CourseInstance,\n on_delete=models.CASCADE,\n related_name=\"ext_services\",\n help_text=_(\"A course where the menu item exists.\")\n )\n access = models.IntegerField(\n choices=ACCESS.choices,\n default=ACCESS.STUDENT,\n )\n service = models.ForeignKey(\n LinkService,\n on_delete=models.CASCADE,\n blank=True,\n null=True,\n help_text=_(\"An external service to link to. These are configured by administrators.\")\n )\n menu_url = models.CharField(\n max_length=256,\n blank=True,\n null=True,\n validators=[validate_no_domain],\n help_text=_(\"\"\"URL that is a) relative to the service URL or b) this course if no service is selected.\nCase a: url starting with / overwrites path in service url and extends it otherwise.\ncase b: url starting with / is absolute within this service and relative to the course path otherwise.\nNote that URL entered here can not include scheme or domain.\"\"\")\n )\n menu_group_label = models.CharField(\n max_length=255,\n blank=True,\n null=True,\n help_text=_(\"Places menu item under a group label.\")\n )\n menu_label = models.CharField(\n max_length=255,\n blank=True,\n null=True,\n help_text=_(\"Label for the menu link (else service default).\")\n )\n menu_icon_class = models.CharField(\n max_length=32,\n null=True,\n blank=True,\n help_text=_(\"Menu icon style name (else service default), e.g. star see http://getbootstrap.com/components/#glyphicons-glyphs\")\n )\n menu_weight = models.IntegerField(\n default=0,\n help_text=_(\"Heavier menu entries are placed after lighter ones.\")\n )\n enabled = models.BooleanField(default=True)\n\n class Meta:\n ordering = [\"course_instance\", \"menu_weight\", \"menu_label\"]\n\n def __str__(self):\n out = self.label\n if not self.is_enabled:\n return \"[Disabled] \" + out\n return out\n\n def clean(self):\n errors = {}\n if not self.service:\n if not self.menu_url:\n errors['menu_url'] = ValidationError(_('Relative URL is required when there is no preconfigured service selected.'))\n if not self.menu_label:\n errors['menu_label'] = ValidationError(_('Menu label is required when there is no preconfigured service selected.'))\n if errors:\n raise ValidationError(errors)\n\n @cached_property\n def is_enabled(self):\n if self.service:\n return self.service.enabled and self.enabled\n return self.enabled\n\n @cached_property\n def label(self):\n if self.menu_label:\n return self.menu_label\n if self.service:\n return self.service.menu_label\n return \"\"\n\n @cached_property\n def icon_class(self):\n if self.menu_icon_class:\n return self.menu_icon_class\n if self.service:\n return self.service.menu_icon_class\n return \"\"\n\n @cached_property\n def url(self):\n if self.service:\n kwargs = {\n \"course_slug\": self.course_instance.course.url,\n \"instance_slug\": self.course_instance.url,\n \"menu_id\": self.id,\n }\n return self.service.as_leaf_class().get_url(replace=self.menu_url, kwargs=kwargs)\n if '://' in self.menu_url:\n # Deprecated, but DB can have old urls\n return self.menu_url\n return urljoin(self.course_instance.get_absolute_url(), self.menu_url)\n\n @cached_property\n def final_url(self):\n if self.service:\n return self.service.as_leaf_class().get_final_url(self.menu_url)\n else:\n return urljoin(self.course_instance.get_absolute_url(), self.menu_url)\n\n def get_url_kwargs(self):\n return dict(menu_id=self.id, **self.course_instance.get_url_kwargs())\n", "path": "external_services/models.py"}], "after_files": [{"content": "from django.core.exceptions import ValidationError\nfrom django.urls import reverse\nfrom django.db import models\nfrom django.utils.functional import cached_property\nfrom django.utils.translation import gettext_lazy as _\nfrom urllib.parse import urljoin, urlsplit\n\nfrom course.models import CourseInstance\nfrom inheritance.models import ModelWithInheritance\nfrom lib.helpers import Enum\nfrom lib.models import UrlMixin\n\n\ndef validate_no_domain(value):\n if value and '://' in value:\n raise ValidationError(_(\"Url can not contain scheme or domain part.\"))\n\n\nclass LinkService(ModelWithInheritance):\n '''\n A link to an external service.\n '''\n DESTINATION_REGION = Enum([\n ('INTERNAL', 0, _('Destination is hosted internally. Link to internal privacy notice.')),\n ('ORGANIZATION', 1, _('Destination is hosted in the same organization. Link to a privacy notice.')),\n ('EEA', 3, _('Destination is hosted in the European Economic Area. Link to a privacy notice.')),\n ('PRIVACYSHIELD', 5, _('Destination is hosted outside of the European Economic Area and used to be certified under the now invalid EU-US Privacy Shield. Link to an extended privacy notice.')),\n ('GLOBAL', 6, _('Destination is hosted outside of the European Economic Area. Link to an extended privacy notice.')),\n ])\n url = models.CharField(\n max_length=256,\n help_text=_(\"The service URL\")\n )\n destination_region = models.PositiveSmallIntegerField(\n choices=DESTINATION_REGION.choices,\n default=DESTINATION_REGION.GLOBAL,\n help_text=_(\"The geographical area of the destination. Will display correct user notice.\"),\n )\n privacy_notice_url = models.CharField(\n max_length=512,\n blank=True,\n help_text=_(\"A link to the service privacy notice. This is mandatory for services outside organization!\"))\n menu_label = models.CharField(\n max_length=255,\n help_text=_(\"A default label to show in the course menu.\")\n )\n menu_icon_class = models.CharField(\n max_length=32,\n default=\"globe\",\n help_text=_(\"A default menu icon style name, see http://getbootstrap.com/components/#glyphicons-glyphs\")\n )\n enabled = models.BooleanField(\n default=True,\n help_text=_(\"If not enabled, the service is disabled for all course instances.\")\n )\n\n class Meta:\n ordering = [\"menu_label\"]\n\n def __str__(self):\n out = \"{}: {}\".format(self.menu_label, self.url)\n if not self.enabled:\n return \"[Disabled] \" + out\n return out\n\n def clean(self):\n errors = {}\n if self.destination_region > self.DESTINATION_REGION.ORGANIZATION and not self.privacy_notice_url:\n errors['privacy_notice_url'] = ValidationError(_('Privacy notice URL is mandatory for services outside organization.'))\n if errors:\n raise ValidationError(errors)\n\n @property\n def url_parts(self):\n return urlsplit(self.url)\n\n @property\n def method(self):\n return 'GET'\n\n @property\n def sends_user_info(self):\n return False\n\n def get_url(self, replace=None, kwargs={}):\n '''Return the URL to the launch page of this service.'''\n if self.destination_region > self.DESTINATION_REGION.INTERNAL:\n return reverse('external-service-link', kwargs=kwargs)\n return self.get_final_url(replace)\n\n def get_final_url(self, replace=None):\n '''Return the launch URL for this service.\n\n The optional replace parameter may be a relative URL that is joined to\n the URL path of this service. The relative URL must not include a domain.\n '''\n url = self.url\n if replace:\n assert '://' not in replace and not replace.startswith('//'), \"Replace can't include domain\"\n url = urljoin(url, replace)\n return url\n\n\nclass LTIService(LinkService):\n '''\n A provider of an LTI service.\n '''\n LTI_ACCESS = Enum([\n ('ANON_API_NO', 0, _('Anonymous service, no API access')),\n ('PUBLIC_API_NO', 5, _('Public service, no API access')),\n ('PUBLIC_API_YES', 10, _('Public service, allow API access')),\n ])\n access_settings = models.IntegerField(\n choices=LTI_ACCESS.choices,\n default=LTI_ACCESS.ANON_API_NO,\n help_text=_(\"Select whether to pass pseudonymised user data to the LTI service.<br>Public services can also enable sharing the user's API token and course API URL in the LTI launch request. This grants the LTI tool API access with the user's privileges.\")\n )\n consumer_key = models.CharField(\n max_length=128,\n help_text=_(\"The consumer key provided by the LTI service.\")\n )\n consumer_secret = models.CharField(\n max_length=128,\n help_text=_(\"The consumer secret provided by the LTI service.\")\n )\n\n def __str__(self):\n out = \"(LTI) {}: {}\".format(self.menu_label, self.url)\n if not self.enabled:\n return \"[Disabled] \" + out\n return out\n\n @property\n def method(self):\n return 'POST'\n\n @property\n def sends_user_info(self):\n return True\n\n @property\n def is_anonymous(self):\n return self.access_settings == self.LTI_ACCESS.ANON_API_NO\n\n @property\n def api_access(self):\n return self.access_settings == self.LTI_ACCESS.PUBLIC_API_YES\n\n def get_url(self, replace=None, kwargs={}):\n return reverse('lti-login', kwargs=kwargs)\n\n\nclass MenuItemManager(models.Manager):\n\n def get_queryset(self):\n return super().get_queryset().select_related(\n 'course_instance', 'course_instance__course')\n\n\nclass MenuItem(UrlMixin, models.Model):\n '''\n Attaches link to course menu.\n '''\n ACCESS = Enum([\n ('STUDENT', 0, _(\"All students, assistants and teachers can access.\")),\n ('ASSISTANT', 5, _(\"Only assistants and teachers can access.\")),\n ('TEACHER', 10, _(\"Only teachers can access.\")),\n ])\n course_instance = models.ForeignKey(\n CourseInstance,\n on_delete=models.CASCADE,\n related_name=\"ext_services\",\n help_text=_(\"A course where the menu item exists.\")\n )\n access = models.IntegerField(\n choices=ACCESS.choices,\n default=ACCESS.STUDENT,\n )\n service = models.ForeignKey(\n LinkService,\n on_delete=models.CASCADE,\n blank=True,\n null=True,\n help_text=_(\"An external service to link to. These are configured by administrators.\")\n )\n menu_url = models.CharField(\n max_length=256,\n blank=True,\n null=True,\n validators=[validate_no_domain],\n help_text=_(\"\"\"URL that is a) relative to the service URL or b) this course if no service is selected.\nCase a: url starting with / overwrites path in service url and extends it otherwise.\ncase b: url starting with / is absolute within this service and relative to the course path otherwise.\nNote that URL entered here can not include scheme or domain.\"\"\")\n )\n menu_group_label = models.CharField(\n max_length=255,\n blank=True,\n null=True,\n help_text=_(\"Places menu item under a group label.\")\n )\n menu_label = models.CharField(\n max_length=255,\n blank=True,\n null=True,\n help_text=_(\"Label for the menu link (else service default).\")\n )\n menu_icon_class = models.CharField(\n max_length=32,\n null=True,\n blank=True,\n help_text=_(\"Menu icon style name (else service default), e.g. star see https://getbootstrap.com/docs/3.4/components/#glyphicons\")\n )\n menu_weight = models.IntegerField(\n default=0,\n help_text=_(\"Heavier menu entries are placed after lighter ones.\")\n )\n enabled = models.BooleanField(default=True)\n\n class Meta:\n ordering = [\"course_instance\", \"menu_weight\", \"menu_label\"]\n\n def __str__(self):\n out = self.label\n if not self.is_enabled:\n return \"[Disabled] \" + out\n return out\n\n def clean(self):\n errors = {}\n if not self.service:\n if not self.menu_url:\n errors['menu_url'] = ValidationError(_('Relative URL is required when there is no preconfigured service selected.'))\n if not self.menu_label:\n errors['menu_label'] = ValidationError(_('Menu label is required when there is no preconfigured service selected.'))\n if errors:\n raise ValidationError(errors)\n\n @cached_property\n def is_enabled(self):\n if self.service:\n return self.service.enabled and self.enabled\n return self.enabled\n\n @cached_property\n def label(self):\n if self.menu_label:\n return self.menu_label\n if self.service:\n return self.service.menu_label\n return \"\"\n\n @cached_property\n def icon_class(self):\n if self.menu_icon_class:\n return self.menu_icon_class\n if self.service:\n return self.service.menu_icon_class\n return \"\"\n\n @cached_property\n def url(self):\n if self.service:\n kwargs = {\n \"course_slug\": self.course_instance.course.url,\n \"instance_slug\": self.course_instance.url,\n \"menu_id\": self.id,\n }\n return self.service.as_leaf_class().get_url(replace=self.menu_url, kwargs=kwargs)\n if '://' in self.menu_url:\n # Deprecated, but DB can have old urls\n return self.menu_url\n return urljoin(self.course_instance.get_absolute_url(), self.menu_url)\n\n @cached_property\n def final_url(self):\n if self.service:\n return self.service.as_leaf_class().get_final_url(self.menu_url)\n else:\n return urljoin(self.course_instance.get_absolute_url(), self.menu_url)\n\n def get_url_kwargs(self):\n return dict(menu_id=self.id, **self.course_instance.get_url_kwargs())\n", "path": "external_services/models.py"}]}
| 3,303 | 141 |
gh_patches_debug_2394
|
rasdani/github-patches
|
git_diff
|
pyca__cryptography-1530
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release Automation Fixes for Seventh Release
The release script is not properly waiting for the wheel job it starts to finish before downloading. This causes it to download previous releases and attempt to upload them.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tasks.py`
Content:
```
1 # This file is dual licensed under the terms of the Apache License, Version
2 # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3 # for complete details.
4
5 from __future__ import absolute_import, division, print_function
6
7 import getpass
8 import os
9 import time
10
11 import invoke
12
13 import requests
14
15
16 JENKINS_URL = "https://jenkins.cryptography.io/job/cryptography-wheel-builder"
17
18
19 def wait_for_build_completed(session):
20 while True:
21 response = session.get(
22 "{0}/lastBuild/api/json/".format(JENKINS_URL),
23 headers={
24 "Accept": "application/json",
25 }
26 )
27 response.raise_for_status()
28 if not response.json()["building"]:
29 assert response.json()["result"] == "SUCCESS"
30 break
31 time.sleep(0.1)
32
33
34 def download_artifacts(session):
35 response = session.get(
36 "{0}/lastBuild/api/json/".format(JENKINS_URL),
37 headers={
38 "Accept": "application/json"
39 }
40 )
41 response.raise_for_status()
42 assert not response.json()["building"]
43 assert response.json()["result"] == "SUCCESS"
44
45 paths = []
46
47 for run in response.json()["runs"]:
48 response = session.get(
49 run["url"] + "api/json/",
50 headers={
51 "Accept": "application/json",
52 }
53 )
54 response.raise_for_status()
55 for artifact in response.json()["artifacts"]:
56 response = session.get(
57 "{0}artifact/{1}".format(run["url"], artifact["relativePath"])
58 )
59 out_path = os.path.join(
60 os.path.dirname(__file__),
61 "dist",
62 artifact["fileName"],
63 )
64 with open(out_path, "wb") as f:
65 f.write(response.content)
66 paths.append(out_path)
67 return paths
68
69
70 @invoke.task
71 def release(version):
72 """
73 ``version`` should be a string like '0.4' or '1.0'.
74 """
75 invoke.run("git tag -s {0} -m '{0} release'".format(version))
76 invoke.run("git push --tags")
77
78 invoke.run("python setup.py sdist")
79 invoke.run("cd vectors/ && python setup.py sdist bdist_wheel")
80
81 invoke.run(
82 "twine upload -s dist/cryptography-{0}* "
83 "vectors/dist/cryptography_vectors-{0}*".format(version)
84 )
85
86 session = requests.Session()
87
88 # This tells the CDN to delete the cached response for the URL. We do this
89 # so that the Jenkins builders will see the new sdist immediately when they
90 # go to build the wheels.
91 response = session.request(
92 "PURGE", "https://pypi.python.org/simple/cryptography/"
93 )
94 response.raise_for_status()
95
96 username = getpass.getpass("Input the GitHub/Jenkins username: ")
97 token = getpass.getpass("Input the Jenkins token: ")
98 response = session.post(
99 "{0}/build".format(JENKINS_URL),
100 auth=requests.auth.HTTPBasicAuth(
101 username, token
102 ),
103 params={
104 "cause": "Building wheels for {0}".format(version)
105 }
106 )
107 response.raise_for_status()
108 wait_for_build_completed(session)
109 paths = download_artifacts(session)
110 invoke.run("twine upload {0}".format(" ".join(paths)))
111
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/tasks.py b/tasks.py
--- a/tasks.py
+++ b/tasks.py
@@ -17,6 +17,9 @@
def wait_for_build_completed(session):
+ # Wait 3 seconds before actually checking if the build is complete, to
+ # ensure that it had time to really start.
+ time.sleep(3)
while True:
response = session.get(
"{0}/lastBuild/api/json/".format(JENKINS_URL),
|
{"golden_diff": "diff --git a/tasks.py b/tasks.py\n--- a/tasks.py\n+++ b/tasks.py\n@@ -17,6 +17,9 @@\n \n \n def wait_for_build_completed(session):\n+ # Wait 3 seconds before actually checking if the build is complete, to\n+ # ensure that it had time to really start.\n+ time.sleep(3)\n while True:\n response = session.get(\n \"{0}/lastBuild/api/json/\".format(JENKINS_URL),\n", "issue": "Release Automation Fixes for Seventh Release\nThe release script is not properly waiting for the wheel job it starts to finish before downloading. This causes it to download previous releases and attempt to upload them.\n\n", "before_files": [{"content": "# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\nfrom __future__ import absolute_import, division, print_function\n\nimport getpass\nimport os\nimport time\n\nimport invoke\n\nimport requests\n\n\nJENKINS_URL = \"https://jenkins.cryptography.io/job/cryptography-wheel-builder\"\n\n\ndef wait_for_build_completed(session):\n while True:\n response = session.get(\n \"{0}/lastBuild/api/json/\".format(JENKINS_URL),\n headers={\n \"Accept\": \"application/json\",\n }\n )\n response.raise_for_status()\n if not response.json()[\"building\"]:\n assert response.json()[\"result\"] == \"SUCCESS\"\n break\n time.sleep(0.1)\n\n\ndef download_artifacts(session):\n response = session.get(\n \"{0}/lastBuild/api/json/\".format(JENKINS_URL),\n headers={\n \"Accept\": \"application/json\"\n }\n )\n response.raise_for_status()\n assert not response.json()[\"building\"]\n assert response.json()[\"result\"] == \"SUCCESS\"\n\n paths = []\n\n for run in response.json()[\"runs\"]:\n response = session.get(\n run[\"url\"] + \"api/json/\",\n headers={\n \"Accept\": \"application/json\",\n }\n )\n response.raise_for_status()\n for artifact in response.json()[\"artifacts\"]:\n response = session.get(\n \"{0}artifact/{1}\".format(run[\"url\"], artifact[\"relativePath\"])\n )\n out_path = os.path.join(\n os.path.dirname(__file__),\n \"dist\",\n artifact[\"fileName\"],\n )\n with open(out_path, \"wb\") as f:\n f.write(response.content)\n paths.append(out_path)\n return paths\n\n\[email protected]\ndef release(version):\n \"\"\"\n ``version`` should be a string like '0.4' or '1.0'.\n \"\"\"\n invoke.run(\"git tag -s {0} -m '{0} release'\".format(version))\n invoke.run(\"git push --tags\")\n\n invoke.run(\"python setup.py sdist\")\n invoke.run(\"cd vectors/ && python setup.py sdist bdist_wheel\")\n\n invoke.run(\n \"twine upload -s dist/cryptography-{0}* \"\n \"vectors/dist/cryptography_vectors-{0}*\".format(version)\n )\n\n session = requests.Session()\n\n # This tells the CDN to delete the cached response for the URL. We do this\n # so that the Jenkins builders will see the new sdist immediately when they\n # go to build the wheels.\n response = session.request(\n \"PURGE\", \"https://pypi.python.org/simple/cryptography/\"\n )\n response.raise_for_status()\n\n username = getpass.getpass(\"Input the GitHub/Jenkins username: \")\n token = getpass.getpass(\"Input the Jenkins token: \")\n response = session.post(\n \"{0}/build\".format(JENKINS_URL),\n auth=requests.auth.HTTPBasicAuth(\n username, token\n ),\n params={\n \"cause\": \"Building wheels for {0}\".format(version)\n }\n )\n response.raise_for_status()\n wait_for_build_completed(session)\n paths = download_artifacts(session)\n invoke.run(\"twine upload {0}\".format(\" \".join(paths)))\n", "path": "tasks.py"}], "after_files": [{"content": "# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\nfrom __future__ import absolute_import, division, print_function\n\nimport getpass\nimport os\nimport time\n\nimport invoke\n\nimport requests\n\n\nJENKINS_URL = \"https://jenkins.cryptography.io/job/cryptography-wheel-builder\"\n\n\ndef wait_for_build_completed(session):\n # Wait 3 seconds before actually checking if the build is complete, to\n # ensure that it had time to really start.\n time.sleep(3)\n while True:\n response = session.get(\n \"{0}/lastBuild/api/json/\".format(JENKINS_URL),\n headers={\n \"Accept\": \"application/json\",\n }\n )\n response.raise_for_status()\n if not response.json()[\"building\"]:\n assert response.json()[\"result\"] == \"SUCCESS\"\n break\n time.sleep(0.1)\n\n\ndef download_artifacts(session):\n response = session.get(\n \"{0}/lastBuild/api/json/\".format(JENKINS_URL),\n headers={\n \"Accept\": \"application/json\"\n }\n )\n response.raise_for_status()\n assert not response.json()[\"building\"]\n assert response.json()[\"result\"] == \"SUCCESS\"\n\n paths = []\n\n for run in response.json()[\"runs\"]:\n response = session.get(\n run[\"url\"] + \"api/json/\",\n headers={\n \"Accept\": \"application/json\",\n }\n )\n response.raise_for_status()\n for artifact in response.json()[\"artifacts\"]:\n response = session.get(\n \"{0}artifact/{1}\".format(run[\"url\"], artifact[\"relativePath\"])\n )\n out_path = os.path.join(\n os.path.dirname(__file__),\n \"dist\",\n artifact[\"fileName\"],\n )\n with open(out_path, \"wb\") as f:\n f.write(response.content)\n paths.append(out_path)\n return paths\n\n\[email protected]\ndef release(version):\n \"\"\"\n ``version`` should be a string like '0.4' or '1.0'.\n \"\"\"\n invoke.run(\"git tag -s {0} -m '{0} release'\".format(version))\n invoke.run(\"git push --tags\")\n\n invoke.run(\"python setup.py sdist\")\n invoke.run(\"cd vectors/ && python setup.py sdist bdist_wheel\")\n\n invoke.run(\n \"twine upload -s dist/cryptography-{0}* \"\n \"vectors/dist/cryptography_vectors-{0}*\".format(version)\n )\n\n session = requests.Session()\n\n # This tells the CDN to delete the cached response for the URL. We do this\n # so that the Jenkins builders will see the new sdist immediately when they\n # go to build the wheels.\n response = session.request(\n \"PURGE\", \"https://pypi.python.org/simple/cryptography/\"\n )\n response.raise_for_status()\n\n username = getpass.getpass(\"Input the GitHub/Jenkins username: \")\n token = getpass.getpass(\"Input the Jenkins token: \")\n response = session.post(\n \"{0}/build\".format(JENKINS_URL),\n auth=requests.auth.HTTPBasicAuth(\n username, token\n ),\n params={\n \"cause\": \"Building wheels for {0}\".format(version)\n }\n )\n response.raise_for_status()\n wait_for_build_completed(session)\n paths = download_artifacts(session)\n invoke.run(\"twine upload {0}\".format(\" \".join(paths)))\n", "path": "tasks.py"}]}
| 1,253 | 104 |
gh_patches_debug_33110
|
rasdani/github-patches
|
git_diff
|
fossasia__open-event-server-934
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Functionality to Manage Events Dashboard and Specific Event Dashboard: Publish, Unpublish, Copy, Delete
- [x] Publish
- [x] Unpublish
- [x] Copy
- [x] Delete
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `open_event/views/admin/models_views/events.py`
Content:
```
1 import os
2
3 from flask import request, flash, url_for, redirect
4 from flask_admin import expose
5 from flask_admin.contrib.sqla import ModelView
6 from flask.ext import login
7
8 from open_event.helpers.helpers import fields_not_empty, string_empty
9 from ....helpers.data import DataManager, save_to_db
10 from ....helpers.data_getter import DataGetter
11 import datetime
12 from werkzeug.utils import secure_filename
13 from werkzeug.datastructures import ImmutableMultiDict
14
15 class EventsView(ModelView):
16 def is_accessible(self):
17 return login.current_user.is_authenticated
18
19 def _handle_view(self, name, **kwargs):
20 if not self.is_accessible():
21 return redirect(url_for('admin.login_view', next=request.url))
22
23 @expose('/')
24 def index_view(self):
25 live_events = DataGetter.get_live_events()
26 draft_events = DataGetter.get_draft_events()
27 past_events = DataGetter.get_past_events()
28 all_events = DataGetter.get_all_events()
29 return self.render('/gentelella/admin/event/index.html',
30 live_events=live_events, draft_events=draft_events, past_events=past_events,
31 all_events=all_events)
32
33 @expose('/create/', methods=('GET', 'POST'))
34 def create_view(self):
35 if request.method == 'POST':
36 imd = ImmutableMultiDict(request.files)
37 for img_file in imd.getlist('sponsors[logo]'):
38 if img_file.filename != '':
39 filename = secure_filename(img_file.filename)
40 img_file.save(os.path.join(os.path.realpath('.') + '/static/media/image/', filename))
41 event = DataManager.create_event(request.form, imd)
42 if request.form.get('state', u'Draft') == u'Published' and string_empty(event.location_name):
43 flash("Your event was saved. To publish your event please review the highlighted fields below.", "warning")
44 return redirect(url_for('.edit_view', event_id=event.id) + "#step=location_name")
45 if event:
46 return redirect(url_for('.details_view', event_id=event.id))
47 return redirect(url_for('.index_view'))
48 return self.render('/gentelella/admin/event/new/new.html',
49 start_date=datetime.datetime.now() + datetime.timedelta(days=10),
50 event_types=DataGetter.get_event_types(),
51 event_topics=DataGetter.get_event_topics())
52
53 @expose('/<int:event_id>/', methods=('GET', 'POST'))
54 def details_view(self, event_id):
55 event = DataGetter.get_event(event_id)
56
57 checklist = {"": ""}
58
59 if fields_not_empty(event, ['name', 'start_time', 'end_time', 'location_name', 'organizer_name',
60 'organizer_description']):
61 checklist["1"] = 'success'
62 elif fields_not_empty(event, ['name', 'start_time', 'end_time']):
63 checklist["1"] = 'missing_some'
64 else:
65 checklist["1"] = 'missing_main'
66
67 call_for_speakers = DataGetter.get_call_for_papers(event_id).first()
68 if call_for_speakers:
69 if fields_not_empty(call_for_speakers, ['announcement', 'start_date', 'end_date']):
70 checklist["4"] = "success"
71 elif fields_not_empty(call_for_speakers, ['start_date', 'end_date']):
72 checklist["4"] = "missing_some"
73 else:
74 checklist["4"] = 'missing_main'
75 else:
76 checklist["4"] = "optional"
77
78 sponsors = DataGetter.get_sponsors(event_id).all()
79 if not sponsors:
80 checklist["2"] = 'missing_main'
81 else:
82 for sponsor in sponsors:
83 if fields_not_empty(sponsor, ['name', 'description', 'url', 'level', 'logo']):
84 checklist["2"] = 'success'
85 break
86 else:
87 checklist["2"] = 'missing_some'
88
89 session_types = DataGetter.get_session_types_by_event_id(event_id)
90 tracks = DataGetter.get_tracks(event_id)
91 microlocations = DataGetter.get_microlocations(event_id)
92
93 if not session_types and not tracks and not microlocations:
94 checklist["3"] = 'optional'
95 elif not session_types or not tracks or not microlocations:
96 checklist["3"] = 'missing_main'
97 else:
98 for session_type in session_types:
99 if fields_not_empty(session_type, ['name', 'length']):
100 checklist["3"] = 'success'
101 break
102 else:
103 checklist["3"] = 'missing_some'
104 for microlocation in microlocations:
105 if fields_not_empty(microlocation, ['name']):
106 checklist["3"] = 'success'
107 break
108 else:
109 checklist["3"] = 'missing_some'
110 for tracks in tracks:
111 if fields_not_empty(tracks, ['name', 'color']):
112 checklist["3"] = 'success'
113 break
114 else:
115 checklist["3"] = 'missing_some'
116
117 checklist["5"] = 'success'
118 return self.render('/gentelella/admin/event/details/details.html', event=event, checklist=checklist)
119
120 @expose('/<int:event_id>/edit/', methods=('GET', 'POST'))
121 def edit_view(self, event_id):
122 event = DataGetter.get_event(event_id)
123 session_types = DataGetter.get_session_types_by_event_id(event_id).all()
124 tracks = DataGetter.get_tracks(event_id).all()
125 social_links = DataGetter.get_social_links_by_event_id(event_id)
126 microlocations = DataGetter.get_microlocations(event_id).all()
127 call_for_speakers = DataGetter.get_call_for_papers(event_id).first()
128 sponsors = DataGetter.get_sponsors(event_id)
129
130 if request.method == 'GET':
131 return self.render('/gentelella/admin/event/edit/edit.html', event=event, session_types=session_types,
132 tracks=tracks, social_links=social_links, microlocations=microlocations,
133 call_for_speakers=call_for_speakers, sponsors=sponsors, event_types=DataGetter.get_event_types(),
134 event_topics=DataGetter.get_event_topics())
135 if request.method == "POST":
136 event = DataManager.edit_event(request, event_id, event, session_types, tracks, social_links,
137 microlocations, call_for_speakers, sponsors)
138 if request.form.get('state', u'Draft') == u'Published' and string_empty(event.location_name):
139 flash("Your event was saved. To publish your event please review the highlighted fields below.",
140 "warning")
141 return redirect(url_for('.edit_view', event_id=event.id) + "#step=location_name")
142 return redirect(url_for('.details_view', event_id=event_id))
143
144 @expose('/<event_id>/delete/', methods=('GET',))
145 def delete_view(self, event_id):
146 if request.method == "GET":
147 DataManager.delete_event(event_id)
148 return redirect(url_for('.index_view'))
149
150 @expose('/<int:event_id>/update/', methods=('POST',))
151 def save_closing_date(self, event_id):
152 event = DataGetter.get_event(event_id)
153 event.closing_datetime = request.form['closing_datetime']
154 save_to_db(event, 'Closing Datetime Updated')
155 return self.render('/gentelella/admin/event/details/details.html', event=event)
156
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/open_event/views/admin/models_views/events.py b/open_event/views/admin/models_views/events.py
--- a/open_event/views/admin/models_views/events.py
+++ b/open_event/views/admin/models_views/events.py
@@ -145,6 +145,7 @@
def delete_view(self, event_id):
if request.method == "GET":
DataManager.delete_event(event_id)
+ flash("Your event has been deleted.", "danger")
return redirect(url_for('.index_view'))
@expose('/<int:event_id>/update/', methods=('POST',))
@@ -153,3 +154,33 @@
event.closing_datetime = request.form['closing_datetime']
save_to_db(event, 'Closing Datetime Updated')
return self.render('/gentelella/admin/event/details/details.html', event=event)
+
+ @expose('/<int:event_id>/publish/', methods=('GET',))
+ def publish_event(self, event_id):
+ event = DataGetter.get_event(event_id)
+ if string_empty(event.location_name):
+ flash("Your event was saved. To publish your event please review the highlighted fields below.", "warning")
+ return redirect(url_for('.edit_view', event_id=event.id) + "#step=location_name")
+ event.state = 'Published'
+ save_to_db(event, 'Event Published')
+ flash("Your event has been published.", "success")
+ return redirect(url_for('.details_view', event_id=event_id))
+
+ @expose('/<int:event_id>/unpublish/', methods=('GET',))
+ def unpublish_event(self, event_id):
+ event = DataGetter.get_event(event_id)
+ event.state = 'Draft'
+ save_to_db(event, 'Event Unpublished')
+ flash("Your event has been unpublished.", "warning")
+ return redirect(url_for('.details_view', event_id=event_id))
+
+ @expose('/<int:event_id>/copy/', methods=('GET',))
+ def copy_event(self, event_id):
+ event = DataGetter.get_event(event_id)
+ event.name = "Copy of " + event.name
+ return self.render('/gentelella/admin/event/new/new.html',
+ event=event,
+ is_copy=True,
+ start_date=datetime.datetime.now() + datetime.timedelta(days=10),
+ event_types=DataGetter.get_event_types(),
+ event_topics=DataGetter.get_event_topics())
|
{"golden_diff": "diff --git a/open_event/views/admin/models_views/events.py b/open_event/views/admin/models_views/events.py\n--- a/open_event/views/admin/models_views/events.py\n+++ b/open_event/views/admin/models_views/events.py\n@@ -145,6 +145,7 @@\n def delete_view(self, event_id):\n if request.method == \"GET\":\n DataManager.delete_event(event_id)\n+ flash(\"Your event has been deleted.\", \"danger\")\n return redirect(url_for('.index_view'))\n \n @expose('/<int:event_id>/update/', methods=('POST',))\n@@ -153,3 +154,33 @@\n event.closing_datetime = request.form['closing_datetime']\n save_to_db(event, 'Closing Datetime Updated')\n return self.render('/gentelella/admin/event/details/details.html', event=event)\n+\n+ @expose('/<int:event_id>/publish/', methods=('GET',))\n+ def publish_event(self, event_id):\n+ event = DataGetter.get_event(event_id)\n+ if string_empty(event.location_name):\n+ flash(\"Your event was saved. To publish your event please review the highlighted fields below.\", \"warning\")\n+ return redirect(url_for('.edit_view', event_id=event.id) + \"#step=location_name\")\n+ event.state = 'Published'\n+ save_to_db(event, 'Event Published')\n+ flash(\"Your event has been published.\", \"success\")\n+ return redirect(url_for('.details_view', event_id=event_id))\n+\n+ @expose('/<int:event_id>/unpublish/', methods=('GET',))\n+ def unpublish_event(self, event_id):\n+ event = DataGetter.get_event(event_id)\n+ event.state = 'Draft'\n+ save_to_db(event, 'Event Unpublished')\n+ flash(\"Your event has been unpublished.\", \"warning\")\n+ return redirect(url_for('.details_view', event_id=event_id))\n+\n+ @expose('/<int:event_id>/copy/', methods=('GET',))\n+ def copy_event(self, event_id):\n+ event = DataGetter.get_event(event_id)\n+ event.name = \"Copy of \" + event.name\n+ return self.render('/gentelella/admin/event/new/new.html',\n+ event=event,\n+ is_copy=True,\n+ start_date=datetime.datetime.now() + datetime.timedelta(days=10),\n+ event_types=DataGetter.get_event_types(),\n+ event_topics=DataGetter.get_event_topics())\n", "issue": "Functionality to Manage Events Dashboard and Specific Event Dashboard: Publish, Unpublish, Copy, Delete\n- [x] Publish\n- [x] Unpublish\n- [x] Copy\n- [x] Delete\n\n", "before_files": [{"content": "import os\n\nfrom flask import request, flash, url_for, redirect\nfrom flask_admin import expose\nfrom flask_admin.contrib.sqla import ModelView\nfrom flask.ext import login\n\nfrom open_event.helpers.helpers import fields_not_empty, string_empty\nfrom ....helpers.data import DataManager, save_to_db\nfrom ....helpers.data_getter import DataGetter\nimport datetime\nfrom werkzeug.utils import secure_filename\nfrom werkzeug.datastructures import ImmutableMultiDict\n\nclass EventsView(ModelView):\n def is_accessible(self):\n return login.current_user.is_authenticated\n\n def _handle_view(self, name, **kwargs):\n if not self.is_accessible():\n return redirect(url_for('admin.login_view', next=request.url))\n\n @expose('/')\n def index_view(self):\n live_events = DataGetter.get_live_events()\n draft_events = DataGetter.get_draft_events()\n past_events = DataGetter.get_past_events()\n all_events = DataGetter.get_all_events()\n return self.render('/gentelella/admin/event/index.html',\n live_events=live_events, draft_events=draft_events, past_events=past_events,\n all_events=all_events)\n\n @expose('/create/', methods=('GET', 'POST'))\n def create_view(self):\n if request.method == 'POST':\n imd = ImmutableMultiDict(request.files)\n for img_file in imd.getlist('sponsors[logo]'):\n if img_file.filename != '':\n filename = secure_filename(img_file.filename)\n img_file.save(os.path.join(os.path.realpath('.') + '/static/media/image/', filename))\n event = DataManager.create_event(request.form, imd)\n if request.form.get('state', u'Draft') == u'Published' and string_empty(event.location_name):\n flash(\"Your event was saved. To publish your event please review the highlighted fields below.\", \"warning\")\n return redirect(url_for('.edit_view', event_id=event.id) + \"#step=location_name\")\n if event:\n return redirect(url_for('.details_view', event_id=event.id))\n return redirect(url_for('.index_view'))\n return self.render('/gentelella/admin/event/new/new.html',\n start_date=datetime.datetime.now() + datetime.timedelta(days=10),\n event_types=DataGetter.get_event_types(),\n event_topics=DataGetter.get_event_topics())\n\n @expose('/<int:event_id>/', methods=('GET', 'POST'))\n def details_view(self, event_id):\n event = DataGetter.get_event(event_id)\n\n checklist = {\"\": \"\"}\n\n if fields_not_empty(event, ['name', 'start_time', 'end_time', 'location_name', 'organizer_name',\n 'organizer_description']):\n checklist[\"1\"] = 'success'\n elif fields_not_empty(event, ['name', 'start_time', 'end_time']):\n checklist[\"1\"] = 'missing_some'\n else:\n checklist[\"1\"] = 'missing_main'\n\n call_for_speakers = DataGetter.get_call_for_papers(event_id).first()\n if call_for_speakers:\n if fields_not_empty(call_for_speakers, ['announcement', 'start_date', 'end_date']):\n checklist[\"4\"] = \"success\"\n elif fields_not_empty(call_for_speakers, ['start_date', 'end_date']):\n checklist[\"4\"] = \"missing_some\"\n else:\n checklist[\"4\"] = 'missing_main'\n else:\n checklist[\"4\"] = \"optional\"\n\n sponsors = DataGetter.get_sponsors(event_id).all()\n if not sponsors:\n checklist[\"2\"] = 'missing_main'\n else:\n for sponsor in sponsors:\n if fields_not_empty(sponsor, ['name', 'description', 'url', 'level', 'logo']):\n checklist[\"2\"] = 'success'\n break\n else:\n checklist[\"2\"] = 'missing_some'\n\n session_types = DataGetter.get_session_types_by_event_id(event_id)\n tracks = DataGetter.get_tracks(event_id)\n microlocations = DataGetter.get_microlocations(event_id)\n\n if not session_types and not tracks and not microlocations:\n checklist[\"3\"] = 'optional'\n elif not session_types or not tracks or not microlocations:\n checklist[\"3\"] = 'missing_main'\n else:\n for session_type in session_types:\n if fields_not_empty(session_type, ['name', 'length']):\n checklist[\"3\"] = 'success'\n break\n else:\n checklist[\"3\"] = 'missing_some'\n for microlocation in microlocations:\n if fields_not_empty(microlocation, ['name']):\n checklist[\"3\"] = 'success'\n break\n else:\n checklist[\"3\"] = 'missing_some'\n for tracks in tracks:\n if fields_not_empty(tracks, ['name', 'color']):\n checklist[\"3\"] = 'success'\n break\n else:\n checklist[\"3\"] = 'missing_some'\n\n checklist[\"5\"] = 'success'\n return self.render('/gentelella/admin/event/details/details.html', event=event, checklist=checklist)\n\n @expose('/<int:event_id>/edit/', methods=('GET', 'POST'))\n def edit_view(self, event_id):\n event = DataGetter.get_event(event_id)\n session_types = DataGetter.get_session_types_by_event_id(event_id).all()\n tracks = DataGetter.get_tracks(event_id).all()\n social_links = DataGetter.get_social_links_by_event_id(event_id)\n microlocations = DataGetter.get_microlocations(event_id).all()\n call_for_speakers = DataGetter.get_call_for_papers(event_id).first()\n sponsors = DataGetter.get_sponsors(event_id)\n\n if request.method == 'GET':\n return self.render('/gentelella/admin/event/edit/edit.html', event=event, session_types=session_types,\n tracks=tracks, social_links=social_links, microlocations=microlocations,\n call_for_speakers=call_for_speakers, sponsors=sponsors, event_types=DataGetter.get_event_types(),\n event_topics=DataGetter.get_event_topics())\n if request.method == \"POST\":\n event = DataManager.edit_event(request, event_id, event, session_types, tracks, social_links,\n microlocations, call_for_speakers, sponsors)\n if request.form.get('state', u'Draft') == u'Published' and string_empty(event.location_name):\n flash(\"Your event was saved. To publish your event please review the highlighted fields below.\",\n \"warning\")\n return redirect(url_for('.edit_view', event_id=event.id) + \"#step=location_name\")\n return redirect(url_for('.details_view', event_id=event_id))\n\n @expose('/<event_id>/delete/', methods=('GET',))\n def delete_view(self, event_id):\n if request.method == \"GET\":\n DataManager.delete_event(event_id)\n return redirect(url_for('.index_view'))\n\n @expose('/<int:event_id>/update/', methods=('POST',))\n def save_closing_date(self, event_id):\n event = DataGetter.get_event(event_id)\n event.closing_datetime = request.form['closing_datetime']\n save_to_db(event, 'Closing Datetime Updated')\n return self.render('/gentelella/admin/event/details/details.html', event=event)\n", "path": "open_event/views/admin/models_views/events.py"}], "after_files": [{"content": "import os\n\nfrom flask import request, flash, url_for, redirect\nfrom flask_admin import expose\nfrom flask_admin.contrib.sqla import ModelView\nfrom flask.ext import login\n\nfrom open_event.helpers.helpers import fields_not_empty, string_empty\nfrom ....helpers.data import DataManager, save_to_db\nfrom ....helpers.data_getter import DataGetter\nimport datetime\nfrom werkzeug.utils import secure_filename\nfrom werkzeug.datastructures import ImmutableMultiDict\n\nclass EventsView(ModelView):\n def is_accessible(self):\n return login.current_user.is_authenticated\n\n def _handle_view(self, name, **kwargs):\n if not self.is_accessible():\n return redirect(url_for('admin.login_view', next=request.url))\n\n @expose('/')\n def index_view(self):\n live_events = DataGetter.get_live_events()\n draft_events = DataGetter.get_draft_events()\n past_events = DataGetter.get_past_events()\n all_events = DataGetter.get_all_events()\n return self.render('/gentelella/admin/event/index.html',\n live_events=live_events, draft_events=draft_events, past_events=past_events,\n all_events=all_events)\n\n @expose('/create/', methods=('GET', 'POST'))\n def create_view(self):\n if request.method == 'POST':\n imd = ImmutableMultiDict(request.files)\n for img_file in imd.getlist('sponsors[logo]'):\n if img_file.filename != '':\n filename = secure_filename(img_file.filename)\n img_file.save(os.path.join(os.path.realpath('.') + '/static/media/image/', filename))\n event = DataManager.create_event(request.form, imd)\n if request.form.get('state', u'Draft') == u'Published' and string_empty(event.location_name):\n flash(\"Your event was saved. To publish your event please review the highlighted fields below.\", \"warning\")\n return redirect(url_for('.edit_view', event_id=event.id) + \"#step=location_name\")\n if event:\n return redirect(url_for('.details_view', event_id=event.id))\n return redirect(url_for('.index_view'))\n return self.render('/gentelella/admin/event/new/new.html',\n start_date=datetime.datetime.now() + datetime.timedelta(days=10),\n event_types=DataGetter.get_event_types(),\n event_topics=DataGetter.get_event_topics())\n\n @expose('/<int:event_id>/', methods=('GET', 'POST'))\n def details_view(self, event_id):\n event = DataGetter.get_event(event_id)\n\n checklist = {\"\": \"\"}\n\n if fields_not_empty(event, ['name', 'start_time', 'end_time', 'location_name', 'organizer_name',\n 'organizer_description']):\n checklist[\"1\"] = 'success'\n elif fields_not_empty(event, ['name', 'start_time', 'end_time']):\n checklist[\"1\"] = 'missing_some'\n else:\n checklist[\"1\"] = 'missing_main'\n\n call_for_speakers = DataGetter.get_call_for_papers(event_id).first()\n if call_for_speakers:\n if fields_not_empty(call_for_speakers, ['announcement', 'start_date', 'end_date']):\n checklist[\"4\"] = \"success\"\n elif fields_not_empty(call_for_speakers, ['start_date', 'end_date']):\n checklist[\"4\"] = \"missing_some\"\n else:\n checklist[\"4\"] = 'missing_main'\n else:\n checklist[\"4\"] = \"optional\"\n\n sponsors = DataGetter.get_sponsors(event_id).all()\n if not sponsors:\n checklist[\"2\"] = 'missing_main'\n else:\n for sponsor in sponsors:\n if fields_not_empty(sponsor, ['name', 'description', 'url', 'level', 'logo']):\n checklist[\"2\"] = 'success'\n break\n else:\n checklist[\"2\"] = 'missing_some'\n\n session_types = DataGetter.get_session_types_by_event_id(event_id)\n tracks = DataGetter.get_tracks(event_id)\n microlocations = DataGetter.get_microlocations(event_id)\n\n if not session_types and not tracks and not microlocations:\n checklist[\"3\"] = 'optional'\n elif not session_types or not tracks or not microlocations:\n checklist[\"3\"] = 'missing_main'\n else:\n for session_type in session_types:\n if fields_not_empty(session_type, ['name', 'length']):\n checklist[\"3\"] = 'success'\n break\n else:\n checklist[\"3\"] = 'missing_some'\n for microlocation in microlocations:\n if fields_not_empty(microlocation, ['name']):\n checklist[\"3\"] = 'success'\n break\n else:\n checklist[\"3\"] = 'missing_some'\n for tracks in tracks:\n if fields_not_empty(tracks, ['name', 'color']):\n checklist[\"3\"] = 'success'\n break\n else:\n checklist[\"3\"] = 'missing_some'\n\n checklist[\"5\"] = 'success'\n return self.render('/gentelella/admin/event/details/details.html', event=event, checklist=checklist)\n\n @expose('/<int:event_id>/edit/', methods=('GET', 'POST'))\n def edit_view(self, event_id):\n event = DataGetter.get_event(event_id)\n session_types = DataGetter.get_session_types_by_event_id(event_id).all()\n tracks = DataGetter.get_tracks(event_id).all()\n social_links = DataGetter.get_social_links_by_event_id(event_id)\n microlocations = DataGetter.get_microlocations(event_id).all()\n call_for_speakers = DataGetter.get_call_for_papers(event_id).first()\n sponsors = DataGetter.get_sponsors(event_id)\n\n if request.method == 'GET':\n return self.render('/gentelella/admin/event/edit/edit.html', event=event, session_types=session_types,\n tracks=tracks, social_links=social_links, microlocations=microlocations,\n call_for_speakers=call_for_speakers, sponsors=sponsors, event_types=DataGetter.get_event_types(),\n event_topics=DataGetter.get_event_topics())\n if request.method == \"POST\":\n event = DataManager.edit_event(request, event_id, event, session_types, tracks, social_links,\n microlocations, call_for_speakers, sponsors)\n if request.form.get('state', u'Draft') == u'Published' and string_empty(event.location_name):\n flash(\"Your event was saved. To publish your event please review the highlighted fields below.\",\n \"warning\")\n return redirect(url_for('.edit_view', event_id=event.id) + \"#step=location_name\")\n return redirect(url_for('.details_view', event_id=event_id))\n\n @expose('/<event_id>/delete/', methods=('GET',))\n def delete_view(self, event_id):\n if request.method == \"GET\":\n DataManager.delete_event(event_id)\n flash(\"Your event has been deleted.\", \"danger\")\n return redirect(url_for('.index_view'))\n\n @expose('/<int:event_id>/update/', methods=('POST',))\n def save_closing_date(self, event_id):\n event = DataGetter.get_event(event_id)\n event.closing_datetime = request.form['closing_datetime']\n save_to_db(event, 'Closing Datetime Updated')\n return self.render('/gentelella/admin/event/details/details.html', event=event)\n\n @expose('/<int:event_id>/publish/', methods=('GET',))\n def publish_event(self, event_id):\n event = DataGetter.get_event(event_id)\n if string_empty(event.location_name):\n flash(\"Your event was saved. To publish your event please review the highlighted fields below.\", \"warning\")\n return redirect(url_for('.edit_view', event_id=event.id) + \"#step=location_name\")\n event.state = 'Published'\n save_to_db(event, 'Event Published')\n flash(\"Your event has been published.\", \"success\")\n return redirect(url_for('.details_view', event_id=event_id))\n\n @expose('/<int:event_id>/unpublish/', methods=('GET',))\n def unpublish_event(self, event_id):\n event = DataGetter.get_event(event_id)\n event.state = 'Draft'\n save_to_db(event, 'Event Unpublished')\n flash(\"Your event has been unpublished.\", \"warning\")\n return redirect(url_for('.details_view', event_id=event_id))\n\n @expose('/<int:event_id>/copy/', methods=('GET',))\n def copy_event(self, event_id):\n event = DataGetter.get_event(event_id)\n event.name = \"Copy of \" + event.name\n return self.render('/gentelella/admin/event/new/new.html',\n event=event,\n is_copy=True,\n start_date=datetime.datetime.now() + datetime.timedelta(days=10),\n event_types=DataGetter.get_event_types(),\n event_topics=DataGetter.get_event_topics())\n", "path": "open_event/views/admin/models_views/events.py"}]}
| 2,203 | 528 |
gh_patches_debug_14056
|
rasdani/github-patches
|
git_diff
|
NVIDIA__NeMo-4786
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Use of undeclared `logger`.
`logger` is used here but not defined
https://github.com/NVIDIA/NeMo/blob/6abfbbfda654f44313068b950edb0f70b01449b1/nemo/collections/nlp/data/language_modeling/text_memmap_dataset.py#L157
I believe this should be switched to `logging`, which is used throughout the rest of the module.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nemo/collections/nlp/data/language_modeling/text_memmap_dataset.py`
Content:
```
1 # Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import datetime
16 import multiprocessing as mp
17 import os
18 import pickle
19 import time
20 from functools import partial
21
22 import numpy as np
23 import torch
24
25 from nemo.core import Dataset
26 from nemo.utils import logging
27
28 __all__ = ['TextMemMapDataset', 'CSVMemMapDataset', 'build_index_files']
29 __idx_version__ = '0.1' # index file version
30 __idx_suffix__ = 'idx' # index file suffix
31
32
33 class TextMemMapDataset(Dataset):
34 """
35 Allow per-line lazy access to multiple text files using numpy memmap.
36 """
37
38 # FIXME: header_lines=0 by default
39 def __init__(
40 self, dataset_paths, newline_int=10, header_lines=0, workers=None, tokenizer=None, sort_dataset_paths=True,
41 ):
42 super().__init__()
43 self.mdata_midx_list = []
44
45 # Make a single string into a list
46 if isinstance(dataset_paths, str):
47 dataset_paths = [dataset_paths]
48
49 if len(dataset_paths) < 1:
50 raise ValueError("files_list must contain at leat one file name")
51
52 self._newline_int = newline_int
53 # skip first N lines
54 self._header_lines = header_lines
55 self._files_list = dataset_paths
56 self._worker = workers
57 self.tokenizer = tokenizer
58 self._sort_dataset_paths = sort_dataset_paths
59
60 if sort_dataset_paths:
61 self._files_list = sorted(self._files_list)
62
63 logging.info(f"Building data files")
64 # load all files into memmap
65 is_ditributed = torch.distributed.is_available() and torch.distributed.is_initialized()
66
67 if not is_ditributed or (is_ditributed and torch.distributed.get_rank() == 0):
68 build_index_files(dataset_paths, newline_int, workers=self._worker)
69
70 if is_ditributed:
71 torch.distributed.barrier()
72
73 logging.info(f"Loading data files")
74 start_time = time.time()
75 mdata_midx_list = [self.load_file(fn) for fn in self._files_list]
76 logging.info(
77 f'Time loading {len(mdata_midx_list)} mem-mapped files: {datetime.timedelta(seconds=time.time() - start_time)}'
78 )
79
80 logging.info("Computing global indices")
81 midx_bins = np.cumsum([(len(midx) - header_lines) for _, midx in mdata_midx_list])
82
83 self.midx_bins = midx_bins
84 self.mdata_midx_list = mdata_midx_list
85
86 def __del__(self):
87 if self.mdata_midx_list:
88 for mdata, midx in self.mdata_midx_list:
89 mdata._mmap.close()
90
91 def __len__(self):
92 return self.midx_bins[-1]
93
94 def __getitem__(self, idx):
95 """
96 Return a string from binary memmap
97 """
98 if idx >= self.midx_bins[-1]:
99 raise IndexError(f"Index {idx} if out of dataset range with {len(self)} samples")
100
101 # Identify the file containing the record
102 file_id = np.digitize(idx, self.midx_bins, right=False)
103 base_idx = self.midx_bins[file_id - 1] if file_id > 0 else 0
104 file_idx = idx - base_idx + self._header_lines
105 mdata, midx = self.mdata_midx_list[file_id]
106 # load sample
107 if file_idx == 0:
108 i = 0
109 j = midx[0]
110 else:
111 i = midx[file_idx - 1] + 1 # ignore newline
112 j = midx[file_idx]
113
114 text = mdata[i:j].tobytes().decode("utf-8")
115
116 # parse raw text (e.g., tokenize)
117 data = self._build_data_from_text(text)
118
119 return data
120
121 def _build_data_from_text(self, text):
122 """Allows child-classes to modify the parsing of raw text, prior to tokenization"""
123 # tokenize text if tokenizer is given
124 if self.tokenizer is not None:
125 data = self.tokenizer.text_to_ids(text)
126 else:
127 data = text
128
129 return data
130
131 def load_file(self, fn):
132 """
133 Loads a text file as np.int8.
134
135 Returns:
136 mdata - memorymap of np.int8
137 midx - indices pointing to the end-of-line (or end of file) position
138 size - number of lines in file
139 """
140 logging.info(f"Loading {fn}")
141 idx_fn = f"{fn}.{__idx_suffix__}"
142
143 # create data map
144 mdata = np.memmap(fn, dtype=np.uint8, mode='r')
145
146 if os.path.exists(idx_fn):
147 idx_dict = pickle.load(open(idx_fn, 'rb'))
148 midx = idx_dict['midx']
149 # test for header
150 if len(midx) < self._header_lines:
151 raise RuntimeError(f"Missing header, expected {self._header_lines} header lines")
152
153 # test for mismatch in expected newline_int
154 if 'newline_int' in idx_dict:
155 newline_int = idx_dict['newline_int']
156 if self._newline_int != newline_int:
157 logger.warning(f"Mismatch in newline_int, expected = {self._newline_int} but loaded {newline_int}")
158
159 # test for version mismatch (useful to force recreation of index files)
160 idx_version = idx_dict.get('version', '0.0')
161 if __idx_version__ != idx_version:
162 raise RuntimeError(
163 f"Version mismatch: Please delete existing '.{__idx_suffix__}' files. Expected version = {__idx_version__}, but file version = {idx_version}. File path = {idx_fn}"
164 )
165 else:
166 raise ValueError(f'Memory Map for {fn} is not found')
167
168 return (mdata, midx)
169
170
171 class CSVMemMapDataset(TextMemMapDataset):
172 """
173 Allow per-line lazy access to multiple text files using numpy memmap.
174 """
175
176 def __init__(
177 self,
178 dataset_paths,
179 newline_int=10,
180 header_lines=1,
181 workers=None,
182 tokenizer=None,
183 sort_dataset_paths=True,
184 data_col=1,
185 data_sep=',',
186 ):
187 super().__init__(
188 dataset_paths=dataset_paths,
189 newline_int=newline_int,
190 header_lines=header_lines,
191 workers=workers,
192 tokenizer=tokenizer,
193 sort_dataset_paths=sort_dataset_paths,
194 )
195 self._data_col = data_col
196 self._data_sep = data_sep
197
198 def _build_data_from_text(self, text):
199 """Return a CSV field from text"""
200 # get CSV field
201 text = text.split(self._data_sep)[self._data_col]
202 # tokenize
203 return super()._build_data_from_text(text)
204
205
206 def _build_memmap_index_files(newline_int, fn):
207 """Helper function to build an index file"""
208 idx_fn = f"{fn}.{__idx_suffix__}"
209
210 # create data map
211 mdata = np.memmap(fn, dtype=np.uint8, mode='r')
212 if os.path.exists(idx_fn):
213 return False
214 else:
215 logging.info(f"Building idx file = {idx_fn}")
216 midx = np.where(mdata == newline_int)[0]
217 midx_dtype = midx.dtype
218 # add last item in case there is no new-line
219 if (len(midx) == 0) or (midx[-1] + 1 != len(mdata)):
220 midx = np.asarray(midx.tolist() + [len(midx) + 1], dtype=midx_dtype)
221
222 # remove empty lines from end of file
223 midx = midx.tolist()
224 while len(midx) > 1 and (midx[-1] - midx[-2]) < 2:
225 midx.pop(-1)
226 midx = np.asarray(midx, dtype=midx_dtype)
227
228 data = dict(midx=midx, newline_int=newline_int, version=__idx_version__)
229 pickle.dump(data, open(idx_fn, "wb"))
230 mdata._mmap.close()
231 del mdata
232
233 return True
234
235
236 def build_index_files(dataset_paths, newline_int, workers=None):
237 """Auxiliary method to build multiple index files"""
238 if len(dataset_paths) < 1:
239 raise ValueError("files_list must contain at leat one file name")
240
241 if workers is None:
242 workers = max(1, os.cpu_count() // 2)
243
244 logging.info(f"Processing {len(dataset_paths)} data files using {workers} workers")
245 # load all files into memmap
246 start_time = time.time()
247 with mp.Pool(workers) as p:
248 build_status = p.map(partial(_build_memmap_index_files, newline_int), dataset_paths)
249
250 logging.info(
251 f'Time building {sum(build_status)} / {len(build_status)} mem-mapped files: {datetime.timedelta(seconds=time.time() - start_time)}'
252 )
253
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/nemo/collections/nlp/data/language_modeling/text_memmap_dataset.py b/nemo/collections/nlp/data/language_modeling/text_memmap_dataset.py
--- a/nemo/collections/nlp/data/language_modeling/text_memmap_dataset.py
+++ b/nemo/collections/nlp/data/language_modeling/text_memmap_dataset.py
@@ -154,7 +154,9 @@
if 'newline_int' in idx_dict:
newline_int = idx_dict['newline_int']
if self._newline_int != newline_int:
- logger.warning(f"Mismatch in newline_int, expected = {self._newline_int} but loaded {newline_int}")
+ logging.warning(
+ f"Mismatch in newline_int, expected = {self._newline_int} but loaded {newline_int}"
+ )
# test for version mismatch (useful to force recreation of index files)
idx_version = idx_dict.get('version', '0.0')
|
{"golden_diff": "diff --git a/nemo/collections/nlp/data/language_modeling/text_memmap_dataset.py b/nemo/collections/nlp/data/language_modeling/text_memmap_dataset.py\n--- a/nemo/collections/nlp/data/language_modeling/text_memmap_dataset.py\n+++ b/nemo/collections/nlp/data/language_modeling/text_memmap_dataset.py\n@@ -154,7 +154,9 @@\n if 'newline_int' in idx_dict:\n newline_int = idx_dict['newline_int']\n if self._newline_int != newline_int:\n- logger.warning(f\"Mismatch in newline_int, expected = {self._newline_int} but loaded {newline_int}\")\n+ logging.warning(\n+ f\"Mismatch in newline_int, expected = {self._newline_int} but loaded {newline_int}\"\n+ )\n \n # test for version mismatch (useful to force recreation of index files)\n idx_version = idx_dict.get('version', '0.0')\n", "issue": "Use of undeclared `logger`.\n`logger` is used here but not defined\r\n\r\nhttps://github.com/NVIDIA/NeMo/blob/6abfbbfda654f44313068b950edb0f70b01449b1/nemo/collections/nlp/data/language_modeling/text_memmap_dataset.py#L157\r\n\r\nI believe this should be switched to `logging`, which is used throughout the rest of the module.\n", "before_files": [{"content": "# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport datetime\nimport multiprocessing as mp\nimport os\nimport pickle\nimport time\nfrom functools import partial\n\nimport numpy as np\nimport torch\n\nfrom nemo.core import Dataset\nfrom nemo.utils import logging\n\n__all__ = ['TextMemMapDataset', 'CSVMemMapDataset', 'build_index_files']\n__idx_version__ = '0.1' # index file version\n__idx_suffix__ = 'idx' # index file suffix\n\n\nclass TextMemMapDataset(Dataset):\n \"\"\"\n Allow per-line lazy access to multiple text files using numpy memmap.\n \"\"\"\n\n # FIXME: header_lines=0 by default\n def __init__(\n self, dataset_paths, newline_int=10, header_lines=0, workers=None, tokenizer=None, sort_dataset_paths=True,\n ):\n super().__init__()\n self.mdata_midx_list = []\n\n # Make a single string into a list\n if isinstance(dataset_paths, str):\n dataset_paths = [dataset_paths]\n\n if len(dataset_paths) < 1:\n raise ValueError(\"files_list must contain at leat one file name\")\n\n self._newline_int = newline_int\n # skip first N lines\n self._header_lines = header_lines\n self._files_list = dataset_paths\n self._worker = workers\n self.tokenizer = tokenizer\n self._sort_dataset_paths = sort_dataset_paths\n\n if sort_dataset_paths:\n self._files_list = sorted(self._files_list)\n\n logging.info(f\"Building data files\")\n # load all files into memmap\n is_ditributed = torch.distributed.is_available() and torch.distributed.is_initialized()\n\n if not is_ditributed or (is_ditributed and torch.distributed.get_rank() == 0):\n build_index_files(dataset_paths, newline_int, workers=self._worker)\n\n if is_ditributed:\n torch.distributed.barrier()\n\n logging.info(f\"Loading data files\")\n start_time = time.time()\n mdata_midx_list = [self.load_file(fn) for fn in self._files_list]\n logging.info(\n f'Time loading {len(mdata_midx_list)} mem-mapped files: {datetime.timedelta(seconds=time.time() - start_time)}'\n )\n\n logging.info(\"Computing global indices\")\n midx_bins = np.cumsum([(len(midx) - header_lines) for _, midx in mdata_midx_list])\n\n self.midx_bins = midx_bins\n self.mdata_midx_list = mdata_midx_list\n\n def __del__(self):\n if self.mdata_midx_list:\n for mdata, midx in self.mdata_midx_list:\n mdata._mmap.close()\n\n def __len__(self):\n return self.midx_bins[-1]\n\n def __getitem__(self, idx):\n \"\"\"\n Return a string from binary memmap\n \"\"\"\n if idx >= self.midx_bins[-1]:\n raise IndexError(f\"Index {idx} if out of dataset range with {len(self)} samples\")\n\n # Identify the file containing the record\n file_id = np.digitize(idx, self.midx_bins, right=False)\n base_idx = self.midx_bins[file_id - 1] if file_id > 0 else 0\n file_idx = idx - base_idx + self._header_lines\n mdata, midx = self.mdata_midx_list[file_id]\n # load sample\n if file_idx == 0:\n i = 0\n j = midx[0]\n else:\n i = midx[file_idx - 1] + 1 # ignore newline\n j = midx[file_idx]\n\n text = mdata[i:j].tobytes().decode(\"utf-8\")\n\n # parse raw text (e.g., tokenize)\n data = self._build_data_from_text(text)\n\n return data\n\n def _build_data_from_text(self, text):\n \"\"\"Allows child-classes to modify the parsing of raw text, prior to tokenization\"\"\"\n # tokenize text if tokenizer is given\n if self.tokenizer is not None:\n data = self.tokenizer.text_to_ids(text)\n else:\n data = text\n\n return data\n\n def load_file(self, fn):\n \"\"\"\n Loads a text file as np.int8.\n\n Returns:\n mdata - memorymap of np.int8\n midx - indices pointing to the end-of-line (or end of file) position\n size - number of lines in file\n \"\"\"\n logging.info(f\"Loading {fn}\")\n idx_fn = f\"{fn}.{__idx_suffix__}\"\n\n # create data map\n mdata = np.memmap(fn, dtype=np.uint8, mode='r')\n\n if os.path.exists(idx_fn):\n idx_dict = pickle.load(open(idx_fn, 'rb'))\n midx = idx_dict['midx']\n # test for header\n if len(midx) < self._header_lines:\n raise RuntimeError(f\"Missing header, expected {self._header_lines} header lines\")\n\n # test for mismatch in expected newline_int\n if 'newline_int' in idx_dict:\n newline_int = idx_dict['newline_int']\n if self._newline_int != newline_int:\n logger.warning(f\"Mismatch in newline_int, expected = {self._newline_int} but loaded {newline_int}\")\n\n # test for version mismatch (useful to force recreation of index files)\n idx_version = idx_dict.get('version', '0.0')\n if __idx_version__ != idx_version:\n raise RuntimeError(\n f\"Version mismatch: Please delete existing '.{__idx_suffix__}' files. Expected version = {__idx_version__}, but file version = {idx_version}. File path = {idx_fn}\"\n )\n else:\n raise ValueError(f'Memory Map for {fn} is not found')\n\n return (mdata, midx)\n\n\nclass CSVMemMapDataset(TextMemMapDataset):\n \"\"\"\n Allow per-line lazy access to multiple text files using numpy memmap.\n \"\"\"\n\n def __init__(\n self,\n dataset_paths,\n newline_int=10,\n header_lines=1,\n workers=None,\n tokenizer=None,\n sort_dataset_paths=True,\n data_col=1,\n data_sep=',',\n ):\n super().__init__(\n dataset_paths=dataset_paths,\n newline_int=newline_int,\n header_lines=header_lines,\n workers=workers,\n tokenizer=tokenizer,\n sort_dataset_paths=sort_dataset_paths,\n )\n self._data_col = data_col\n self._data_sep = data_sep\n\n def _build_data_from_text(self, text):\n \"\"\"Return a CSV field from text\"\"\"\n # get CSV field\n text = text.split(self._data_sep)[self._data_col]\n # tokenize\n return super()._build_data_from_text(text)\n\n\ndef _build_memmap_index_files(newline_int, fn):\n \"\"\"Helper function to build an index file\"\"\"\n idx_fn = f\"{fn}.{__idx_suffix__}\"\n\n # create data map\n mdata = np.memmap(fn, dtype=np.uint8, mode='r')\n if os.path.exists(idx_fn):\n return False\n else:\n logging.info(f\"Building idx file = {idx_fn}\")\n midx = np.where(mdata == newline_int)[0]\n midx_dtype = midx.dtype\n # add last item in case there is no new-line\n if (len(midx) == 0) or (midx[-1] + 1 != len(mdata)):\n midx = np.asarray(midx.tolist() + [len(midx) + 1], dtype=midx_dtype)\n\n # remove empty lines from end of file\n midx = midx.tolist()\n while len(midx) > 1 and (midx[-1] - midx[-2]) < 2:\n midx.pop(-1)\n midx = np.asarray(midx, dtype=midx_dtype)\n\n data = dict(midx=midx, newline_int=newline_int, version=__idx_version__)\n pickle.dump(data, open(idx_fn, \"wb\"))\n mdata._mmap.close()\n del mdata\n\n return True\n\n\ndef build_index_files(dataset_paths, newline_int, workers=None):\n \"\"\"Auxiliary method to build multiple index files\"\"\"\n if len(dataset_paths) < 1:\n raise ValueError(\"files_list must contain at leat one file name\")\n\n if workers is None:\n workers = max(1, os.cpu_count() // 2)\n\n logging.info(f\"Processing {len(dataset_paths)} data files using {workers} workers\")\n # load all files into memmap\n start_time = time.time()\n with mp.Pool(workers) as p:\n build_status = p.map(partial(_build_memmap_index_files, newline_int), dataset_paths)\n\n logging.info(\n f'Time building {sum(build_status)} / {len(build_status)} mem-mapped files: {datetime.timedelta(seconds=time.time() - start_time)}'\n )\n", "path": "nemo/collections/nlp/data/language_modeling/text_memmap_dataset.py"}], "after_files": [{"content": "# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport datetime\nimport multiprocessing as mp\nimport os\nimport pickle\nimport time\nfrom functools import partial\n\nimport numpy as np\nimport torch\n\nfrom nemo.core import Dataset\nfrom nemo.utils import logging\n\n__all__ = ['TextMemMapDataset', 'CSVMemMapDataset', 'build_index_files']\n__idx_version__ = '0.1' # index file version\n__idx_suffix__ = 'idx' # index file suffix\n\n\nclass TextMemMapDataset(Dataset):\n \"\"\"\n Allow per-line lazy access to multiple text files using numpy memmap.\n \"\"\"\n\n # FIXME: header_lines=0 by default\n def __init__(\n self, dataset_paths, newline_int=10, header_lines=0, workers=None, tokenizer=None, sort_dataset_paths=True,\n ):\n super().__init__()\n self.mdata_midx_list = []\n\n # Make a single string into a list\n if isinstance(dataset_paths, str):\n dataset_paths = [dataset_paths]\n\n if len(dataset_paths) < 1:\n raise ValueError(\"files_list must contain at leat one file name\")\n\n self._newline_int = newline_int\n # skip first N lines\n self._header_lines = header_lines\n self._files_list = dataset_paths\n self._worker = workers\n self.tokenizer = tokenizer\n self._sort_dataset_paths = sort_dataset_paths\n\n if sort_dataset_paths:\n self._files_list = sorted(self._files_list)\n\n logging.info(f\"Building data files\")\n # load all files into memmap\n is_ditributed = torch.distributed.is_available() and torch.distributed.is_initialized()\n\n if not is_ditributed or (is_ditributed and torch.distributed.get_rank() == 0):\n build_index_files(dataset_paths, newline_int, workers=self._worker)\n\n if is_ditributed:\n torch.distributed.barrier()\n\n logging.info(f\"Loading data files\")\n start_time = time.time()\n mdata_midx_list = [self.load_file(fn) for fn in self._files_list]\n logging.info(\n f'Time loading {len(mdata_midx_list)} mem-mapped files: {datetime.timedelta(seconds=time.time() - start_time)}'\n )\n\n logging.info(\"Computing global indices\")\n midx_bins = np.cumsum([(len(midx) - header_lines) for _, midx in mdata_midx_list])\n\n self.midx_bins = midx_bins\n self.mdata_midx_list = mdata_midx_list\n\n def __del__(self):\n if self.mdata_midx_list:\n for mdata, midx in self.mdata_midx_list:\n mdata._mmap.close()\n\n def __len__(self):\n return self.midx_bins[-1]\n\n def __getitem__(self, idx):\n \"\"\"\n Return a string from binary memmap\n \"\"\"\n if idx >= self.midx_bins[-1]:\n raise IndexError(f\"Index {idx} if out of dataset range with {len(self)} samples\")\n\n # Identify the file containing the record\n file_id = np.digitize(idx, self.midx_bins, right=False)\n base_idx = self.midx_bins[file_id - 1] if file_id > 0 else 0\n file_idx = idx - base_idx + self._header_lines\n mdata, midx = self.mdata_midx_list[file_id]\n # load sample\n if file_idx == 0:\n i = 0\n j = midx[0]\n else:\n i = midx[file_idx - 1] + 1 # ignore newline\n j = midx[file_idx]\n\n text = mdata[i:j].tobytes().decode(\"utf-8\")\n\n # parse raw text (e.g., tokenize)\n data = self._build_data_from_text(text)\n\n return data\n\n def _build_data_from_text(self, text):\n \"\"\"Allows child-classes to modify the parsing of raw text, prior to tokenization\"\"\"\n # tokenize text if tokenizer is given\n if self.tokenizer is not None:\n data = self.tokenizer.text_to_ids(text)\n else:\n data = text\n\n return data\n\n def load_file(self, fn):\n \"\"\"\n Loads a text file as np.int8.\n\n Returns:\n mdata - memorymap of np.int8\n midx - indices pointing to the end-of-line (or end of file) position\n size - number of lines in file\n \"\"\"\n logging.info(f\"Loading {fn}\")\n idx_fn = f\"{fn}.{__idx_suffix__}\"\n\n # create data map\n mdata = np.memmap(fn, dtype=np.uint8, mode='r')\n\n if os.path.exists(idx_fn):\n idx_dict = pickle.load(open(idx_fn, 'rb'))\n midx = idx_dict['midx']\n # test for header\n if len(midx) < self._header_lines:\n raise RuntimeError(f\"Missing header, expected {self._header_lines} header lines\")\n\n # test for mismatch in expected newline_int\n if 'newline_int' in idx_dict:\n newline_int = idx_dict['newline_int']\n if self._newline_int != newline_int:\n logging.warning(\n f\"Mismatch in newline_int, expected = {self._newline_int} but loaded {newline_int}\"\n )\n\n # test for version mismatch (useful to force recreation of index files)\n idx_version = idx_dict.get('version', '0.0')\n if __idx_version__ != idx_version:\n raise RuntimeError(\n f\"Version mismatch: Please delete existing '.{__idx_suffix__}' files. Expected version = {__idx_version__}, but file version = {idx_version}. File path = {idx_fn}\"\n )\n else:\n raise ValueError(f'Memory Map for {fn} is not found')\n\n return (mdata, midx)\n\n\nclass CSVMemMapDataset(TextMemMapDataset):\n \"\"\"\n Allow per-line lazy access to multiple text files using numpy memmap.\n \"\"\"\n\n def __init__(\n self,\n dataset_paths,\n newline_int=10,\n header_lines=1,\n workers=None,\n tokenizer=None,\n sort_dataset_paths=True,\n data_col=1,\n data_sep=',',\n ):\n super().__init__(\n dataset_paths=dataset_paths,\n newline_int=newline_int,\n header_lines=header_lines,\n workers=workers,\n tokenizer=tokenizer,\n sort_dataset_paths=sort_dataset_paths,\n )\n self._data_col = data_col\n self._data_sep = data_sep\n\n def _build_data_from_text(self, text):\n \"\"\"Return a CSV field from text\"\"\"\n # get CSV field\n text = text.split(self._data_sep)[self._data_col]\n # tokenize\n return super()._build_data_from_text(text)\n\n\ndef _build_memmap_index_files(newline_int, fn):\n \"\"\"Helper function to build an index file\"\"\"\n idx_fn = f\"{fn}.{__idx_suffix__}\"\n\n # create data map\n mdata = np.memmap(fn, dtype=np.uint8, mode='r')\n if os.path.exists(idx_fn):\n return False\n else:\n logging.info(f\"Building idx file = {idx_fn}\")\n midx = np.where(mdata == newline_int)[0]\n midx_dtype = midx.dtype\n # add last item in case there is no new-line\n if (len(midx) == 0) or (midx[-1] + 1 != len(mdata)):\n midx = np.asarray(midx.tolist() + [len(midx) + 1], dtype=midx_dtype)\n\n # remove empty lines from end of file\n midx = midx.tolist()\n while len(midx) > 1 and (midx[-1] - midx[-2]) < 2:\n midx.pop(-1)\n midx = np.asarray(midx, dtype=midx_dtype)\n\n data = dict(midx=midx, newline_int=newline_int, version=__idx_version__)\n pickle.dump(data, open(idx_fn, \"wb\"))\n mdata._mmap.close()\n del mdata\n\n return True\n\n\ndef build_index_files(dataset_paths, newline_int, workers=None):\n \"\"\"Auxiliary method to build multiple index files\"\"\"\n if len(dataset_paths) < 1:\n raise ValueError(\"files_list must contain at leat one file name\")\n\n if workers is None:\n workers = max(1, os.cpu_count() // 2)\n\n logging.info(f\"Processing {len(dataset_paths)} data files using {workers} workers\")\n # load all files into memmap\n start_time = time.time()\n with mp.Pool(workers) as p:\n build_status = p.map(partial(_build_memmap_index_files, newline_int), dataset_paths)\n\n logging.info(\n f'Time building {sum(build_status)} / {len(build_status)} mem-mapped files: {datetime.timedelta(seconds=time.time() - start_time)}'\n )\n", "path": "nemo/collections/nlp/data/language_modeling/text_memmap_dataset.py"}]}
| 3,135 | 204 |
gh_patches_debug_20618
|
rasdani/github-patches
|
git_diff
|
celery__celery-6134
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Django celery fixup doesn't respect Django settings for PostgreSQL connections
When using the Django-Celery fixup to run background tasks for a Django web service, the tasks in the background do not respect the settings in Django for PostgreSQL (possibly other) connections. Every task will always create a new connection no matter the Django settings. Although it is possible to bypass this with the environment variable CELERY_DB_REUSE_MAX, it is preferred for it to follow the settings given in Django.
## Checklist
- [ ] I have included the output of ``celery -A proj report`` in the issue.
(if you are not able to do this, then at least specify the Celery
version affected).
Celery 4.0.2 with potentially all versions of Django (tested on 1.10.3 and 1.11.2)
- [X] I have verified that the issue exists against the `master` branch of Celery.
This line causes this "issue":
https://github.com/celery/celery/blob/master/celery/fixups/django.py#L186
## Steps to reproduce
Note that these steps require some monitoring service to be used, we have New Relic.
Note also that we use Heroku for this app in question.
1) Have a web facing process with Django that connects to your PostgreSQL database for ORM purposes
2) Have a worker process that also connects to the PostgreSQL for ORM purposes
3) Have the DATABASES['default']['CONN_MAX_AGE'] setting set to anything that isn't 0 (easiest to see with `None` for persistent connections)
4) Make multiple requests to the web portion of Django to cause some ORM activity (easiest to see if it happens on every request)
5) Get multiple tasks to execute on the worker that will cause some ORM activity (easiest to see if it happens on every task)
6) Use your monitoring service (New Relic in our case) to view a breakdown of all of the requests and worker activity. In New Relic you can check this using the transaction tracing; select the endpoint/task that made the db queries and check the breakdown.
## Expected behavior
psycopg2:connect would occur rarely with an average calls per transaction <<< 1
## Actual behavior
psycopg2:connect occurs very rarely with an average calls per transaction of <<< 1 for the web processes.
psycopg2:connect occurs every time with an average calls per transaction of 1 for the worker processes.
## Potential Resolution
With my limited knowledge of Celery's inner workings, it feels like a fairly simple fix that I could make on a PR myself, but I wanted some input before I spend the time setting that all up.
This fix seems to work when monkey patched into the `DjangoWorkerFixup` class.
``` Python
def _close_database(self):
try:
# Use Django's built in method of closing old connections.
# This ensures that the database settings are respected.
self._db.close_old_connections()
except AttributeError:
# Legacy functionality if we can't use the old connections for whatever reason.
for conn in self._db.connections.all():
try:
conn.close()
except self.interface_errors:
pass
except self.DatabaseError as exc:
str_exc = str(exc)
if 'closed' not in str_exc and 'not connected' not in str_exc:
raise
celery.fixups.django.DjangoWorkerFixup._close_database = _close_database
```
Django celery fixup doesn't respect Django settings for PostgreSQL connections
When using the Django-Celery fixup to run background tasks for a Django web service, the tasks in the background do not respect the settings in Django for PostgreSQL (possibly other) connections. Every task will always create a new connection no matter the Django settings. Although it is possible to bypass this with the environment variable CELERY_DB_REUSE_MAX, it is preferred for it to follow the settings given in Django.
## Checklist
- [ ] I have included the output of ``celery -A proj report`` in the issue.
(if you are not able to do this, then at least specify the Celery
version affected).
Celery 4.0.2 with potentially all versions of Django (tested on 1.10.3 and 1.11.2)
- [X] I have verified that the issue exists against the `master` branch of Celery.
This line causes this "issue":
https://github.com/celery/celery/blob/master/celery/fixups/django.py#L186
## Steps to reproduce
Note that these steps require some monitoring service to be used, we have New Relic.
Note also that we use Heroku for this app in question.
1) Have a web facing process with Django that connects to your PostgreSQL database for ORM purposes
2) Have a worker process that also connects to the PostgreSQL for ORM purposes
3) Have the DATABASES['default']['CONN_MAX_AGE'] setting set to anything that isn't 0 (easiest to see with `None` for persistent connections)
4) Make multiple requests to the web portion of Django to cause some ORM activity (easiest to see if it happens on every request)
5) Get multiple tasks to execute on the worker that will cause some ORM activity (easiest to see if it happens on every task)
6) Use your monitoring service (New Relic in our case) to view a breakdown of all of the requests and worker activity. In New Relic you can check this using the transaction tracing; select the endpoint/task that made the db queries and check the breakdown.
## Expected behavior
psycopg2:connect would occur rarely with an average calls per transaction <<< 1
## Actual behavior
psycopg2:connect occurs very rarely with an average calls per transaction of <<< 1 for the web processes.
psycopg2:connect occurs every time with an average calls per transaction of 1 for the worker processes.
## Potential Resolution
With my limited knowledge of Celery's inner workings, it feels like a fairly simple fix that I could make on a PR myself, but I wanted some input before I spend the time setting that all up.
This fix seems to work when monkey patched into the `DjangoWorkerFixup` class.
``` Python
def _close_database(self):
try:
# Use Django's built in method of closing old connections.
# This ensures that the database settings are respected.
self._db.close_old_connections()
except AttributeError:
# Legacy functionality if we can't use the old connections for whatever reason.
for conn in self._db.connections.all():
try:
conn.close()
except self.interface_errors:
pass
except self.DatabaseError as exc:
str_exc = str(exc)
if 'closed' not in str_exc and 'not connected' not in str_exc:
raise
celery.fixups.django.DjangoWorkerFixup._close_database = _close_database
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `celery/fixups/django.py`
Content:
```
1 """Django-specific customization."""
2 from __future__ import absolute_import, unicode_literals
3
4 import os
5 import sys
6 import warnings
7 from datetime import datetime
8 from importlib import import_module
9
10 from kombu.utils.imports import symbol_by_name
11 from kombu.utils.objects import cached_property
12
13 from celery import _state, signals
14 from celery.exceptions import FixupWarning, ImproperlyConfigured
15
16 __all__ = ('DjangoFixup', 'fixup')
17
18 ERR_NOT_INSTALLED = """\
19 Environment variable DJANGO_SETTINGS_MODULE is defined
20 but Django isn't installed. Won't apply Django fix-ups!
21 """
22
23
24 def _maybe_close_fd(fh):
25 try:
26 os.close(fh.fileno())
27 except (AttributeError, OSError, TypeError):
28 # TypeError added for celery#962
29 pass
30
31
32 def _verify_django_version(django):
33 if django.VERSION < (1, 11):
34 raise ImproperlyConfigured('Celery 4.x requires Django 1.11 or later.')
35
36
37 def fixup(app, env='DJANGO_SETTINGS_MODULE'):
38 """Install Django fixup if settings module environment is set."""
39 SETTINGS_MODULE = os.environ.get(env)
40 if SETTINGS_MODULE and 'django' not in app.loader_cls.lower():
41 try:
42 import django # noqa
43 except ImportError:
44 warnings.warn(FixupWarning(ERR_NOT_INSTALLED))
45 else:
46 _verify_django_version(django)
47 return DjangoFixup(app).install()
48
49
50 class DjangoFixup(object):
51 """Fixup installed when using Django."""
52
53 def __init__(self, app):
54 self.app = app
55 if _state.default_app is None:
56 self.app.set_default()
57 self._worker_fixup = None
58
59 def install(self):
60 # Need to add project directory to path.
61 # The project directory has precedence over system modules,
62 # so we prepend it to the path.
63 sys.path.insert(0, os.getcwd())
64
65 self._settings = symbol_by_name('django.conf:settings')
66 self.app.loader.now = self.now
67
68 signals.import_modules.connect(self.on_import_modules)
69 signals.worker_init.connect(self.on_worker_init)
70 return self
71
72 @property
73 def worker_fixup(self):
74 if self._worker_fixup is None:
75 self._worker_fixup = DjangoWorkerFixup(self.app)
76 return self._worker_fixup
77
78 @worker_fixup.setter
79 def worker_fixup(self, value):
80 self._worker_fixup = value
81
82 def on_import_modules(self, **kwargs):
83 # call django.setup() before task modules are imported
84 self.worker_fixup.validate_models()
85
86 def on_worker_init(self, **kwargs):
87 self.worker_fixup.install()
88
89 def now(self, utc=False):
90 return datetime.utcnow() if utc else self._now()
91
92 def autodiscover_tasks(self):
93 from django.apps import apps
94 return [config.name for config in apps.get_app_configs()]
95
96 @cached_property
97 def _now(self):
98 return symbol_by_name('django.utils.timezone:now')
99
100
101 class DjangoWorkerFixup(object):
102 _db_recycles = 0
103
104 def __init__(self, app):
105 self.app = app
106 self.db_reuse_max = self.app.conf.get('CELERY_DB_REUSE_MAX', None)
107 self._db = import_module('django.db')
108 self._cache = import_module('django.core.cache')
109 self._settings = symbol_by_name('django.conf:settings')
110
111 self.interface_errors = (
112 symbol_by_name('django.db.utils.InterfaceError'),
113 )
114 self.DatabaseError = symbol_by_name('django.db:DatabaseError')
115
116 def django_setup(self):
117 import django
118 django.setup()
119
120 def validate_models(self):
121 from django.core.checks import run_checks
122 self.django_setup()
123 run_checks()
124
125 def install(self):
126 signals.beat_embedded_init.connect(self.close_database)
127 signals.worker_ready.connect(self.on_worker_ready)
128 signals.task_prerun.connect(self.on_task_prerun)
129 signals.task_postrun.connect(self.on_task_postrun)
130 signals.worker_process_init.connect(self.on_worker_process_init)
131 self.close_database()
132 self.close_cache()
133 return self
134
135 def on_worker_process_init(self, **kwargs):
136 # Child process must validate models again if on Windows,
137 # or if they were started using execv.
138 if os.environ.get('FORKED_BY_MULTIPROCESSING'):
139 self.validate_models()
140
141 # close connections:
142 # the parent process may have established these,
143 # so need to close them.
144
145 # calling db.close() on some DB connections will cause
146 # the inherited DB conn to also get broken in the parent
147 # process so we need to remove it without triggering any
148 # network IO that close() might cause.
149 for c in self._db.connections.all():
150 if c and c.connection:
151 self._maybe_close_db_fd(c.connection)
152
153 # use the _ version to avoid DB_REUSE preventing the conn.close() call
154 self._close_database()
155 self.close_cache()
156
157 def _maybe_close_db_fd(self, fd):
158 try:
159 _maybe_close_fd(fd)
160 except self.interface_errors:
161 pass
162
163 def on_task_prerun(self, sender, **kwargs):
164 """Called before every task."""
165 if not getattr(sender.request, 'is_eager', False):
166 self.close_database()
167
168 def on_task_postrun(self, sender, **kwargs):
169 # See https://groups.google.com/group/django-users/
170 # browse_thread/thread/78200863d0c07c6d/
171 if not getattr(sender.request, 'is_eager', False):
172 self.close_database()
173 self.close_cache()
174
175 def close_database(self, **kwargs):
176 if not self.db_reuse_max:
177 return self._close_database()
178 if self._db_recycles >= self.db_reuse_max * 2:
179 self._db_recycles = 0
180 self._close_database()
181 self._db_recycles += 1
182
183 def _close_database(self):
184 for conn in self._db.connections.all():
185 try:
186 conn.close()
187 except self.interface_errors:
188 pass
189 except self.DatabaseError as exc:
190 str_exc = str(exc)
191 if 'closed' not in str_exc and 'not connected' not in str_exc:
192 raise
193
194 def close_cache(self):
195 try:
196 self._cache.close_caches()
197 except (TypeError, AttributeError):
198 pass
199
200 def on_worker_ready(self, **kwargs):
201 if self._settings.DEBUG:
202 warnings.warn('''Using settings.DEBUG leads to a memory
203 leak, never use this setting in production environments!''')
204
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/celery/fixups/django.py b/celery/fixups/django.py
--- a/celery/fixups/django.py
+++ b/celery/fixups/django.py
@@ -151,7 +151,7 @@
self._maybe_close_db_fd(c.connection)
# use the _ version to avoid DB_REUSE preventing the conn.close() call
- self._close_database()
+ self._close_database(force=True)
self.close_cache()
def _maybe_close_db_fd(self, fd):
@@ -180,10 +180,13 @@
self._close_database()
self._db_recycles += 1
- def _close_database(self):
+ def _close_database(self, force=False):
for conn in self._db.connections.all():
try:
- conn.close()
+ if force:
+ conn.close()
+ else:
+ conn.close_if_unusable_or_obsolete()
except self.interface_errors:
pass
except self.DatabaseError as exc:
|
{"golden_diff": "diff --git a/celery/fixups/django.py b/celery/fixups/django.py\n--- a/celery/fixups/django.py\n+++ b/celery/fixups/django.py\n@@ -151,7 +151,7 @@\n self._maybe_close_db_fd(c.connection)\n \n # use the _ version to avoid DB_REUSE preventing the conn.close() call\n- self._close_database()\n+ self._close_database(force=True)\n self.close_cache()\n \n def _maybe_close_db_fd(self, fd):\n@@ -180,10 +180,13 @@\n self._close_database()\n self._db_recycles += 1\n \n- def _close_database(self):\n+ def _close_database(self, force=False):\n for conn in self._db.connections.all():\n try:\n- conn.close()\n+ if force:\n+ conn.close()\n+ else:\n+ conn.close_if_unusable_or_obsolete()\n except self.interface_errors:\n pass\n except self.DatabaseError as exc:\n", "issue": "Django celery fixup doesn't respect Django settings for PostgreSQL connections\nWhen using the Django-Celery fixup to run background tasks for a Django web service, the tasks in the background do not respect the settings in Django for PostgreSQL (possibly other) connections. Every task will always create a new connection no matter the Django settings. Although it is possible to bypass this with the environment variable CELERY_DB_REUSE_MAX, it is preferred for it to follow the settings given in Django.\r\n\r\n## Checklist\r\n\r\n- [ ] I have included the output of ``celery -A proj report`` in the issue.\r\n (if you are not able to do this, then at least specify the Celery\r\n version affected).\r\nCelery 4.0.2 with potentially all versions of Django (tested on 1.10.3 and 1.11.2)\r\n- [X] I have verified that the issue exists against the `master` branch of Celery.\r\n\r\nThis line causes this \"issue\":\r\nhttps://github.com/celery/celery/blob/master/celery/fixups/django.py#L186\r\n\r\n## Steps to reproduce\r\nNote that these steps require some monitoring service to be used, we have New Relic.\r\nNote also that we use Heroku for this app in question.\r\n1) Have a web facing process with Django that connects to your PostgreSQL database for ORM purposes\r\n2) Have a worker process that also connects to the PostgreSQL for ORM purposes\r\n3) Have the DATABASES['default']['CONN_MAX_AGE'] setting set to anything that isn't 0 (easiest to see with `None` for persistent connections)\r\n4) Make multiple requests to the web portion of Django to cause some ORM activity (easiest to see if it happens on every request)\r\n5) Get multiple tasks to execute on the worker that will cause some ORM activity (easiest to see if it happens on every task)\r\n6) Use your monitoring service (New Relic in our case) to view a breakdown of all of the requests and worker activity. In New Relic you can check this using the transaction tracing; select the endpoint/task that made the db queries and check the breakdown.\r\n\r\n## Expected behavior\r\npsycopg2:connect would occur rarely with an average calls per transaction <<< 1\r\n\r\n## Actual behavior\r\npsycopg2:connect occurs very rarely with an average calls per transaction of <<< 1 for the web processes.\r\npsycopg2:connect occurs every time with an average calls per transaction of 1 for the worker processes.\r\n\r\n## Potential Resolution\r\nWith my limited knowledge of Celery's inner workings, it feels like a fairly simple fix that I could make on a PR myself, but I wanted some input before I spend the time setting that all up.\r\nThis fix seems to work when monkey patched into the `DjangoWorkerFixup` class.\r\n``` Python\r\n\r\ndef _close_database(self):\r\n try:\r\n # Use Django's built in method of closing old connections.\r\n # This ensures that the database settings are respected.\r\n self._db.close_old_connections()\r\n except AttributeError:\r\n # Legacy functionality if we can't use the old connections for whatever reason.\r\n for conn in self._db.connections.all():\r\n try:\r\n conn.close()\r\n except self.interface_errors:\r\n pass\r\n except self.DatabaseError as exc:\r\n str_exc = str(exc)\r\n if 'closed' not in str_exc and 'not connected' not in str_exc:\r\n raise\r\n\r\ncelery.fixups.django.DjangoWorkerFixup._close_database = _close_database\r\n```\nDjango celery fixup doesn't respect Django settings for PostgreSQL connections\nWhen using the Django-Celery fixup to run background tasks for a Django web service, the tasks in the background do not respect the settings in Django for PostgreSQL (possibly other) connections. Every task will always create a new connection no matter the Django settings. Although it is possible to bypass this with the environment variable CELERY_DB_REUSE_MAX, it is preferred for it to follow the settings given in Django.\r\n\r\n## Checklist\r\n\r\n- [ ] I have included the output of ``celery -A proj report`` in the issue.\r\n (if you are not able to do this, then at least specify the Celery\r\n version affected).\r\nCelery 4.0.2 with potentially all versions of Django (tested on 1.10.3 and 1.11.2)\r\n- [X] I have verified that the issue exists against the `master` branch of Celery.\r\n\r\nThis line causes this \"issue\":\r\nhttps://github.com/celery/celery/blob/master/celery/fixups/django.py#L186\r\n\r\n## Steps to reproduce\r\nNote that these steps require some monitoring service to be used, we have New Relic.\r\nNote also that we use Heroku for this app in question.\r\n1) Have a web facing process with Django that connects to your PostgreSQL database for ORM purposes\r\n2) Have a worker process that also connects to the PostgreSQL for ORM purposes\r\n3) Have the DATABASES['default']['CONN_MAX_AGE'] setting set to anything that isn't 0 (easiest to see with `None` for persistent connections)\r\n4) Make multiple requests to the web portion of Django to cause some ORM activity (easiest to see if it happens on every request)\r\n5) Get multiple tasks to execute on the worker that will cause some ORM activity (easiest to see if it happens on every task)\r\n6) Use your monitoring service (New Relic in our case) to view a breakdown of all of the requests and worker activity. In New Relic you can check this using the transaction tracing; select the endpoint/task that made the db queries and check the breakdown.\r\n\r\n## Expected behavior\r\npsycopg2:connect would occur rarely with an average calls per transaction <<< 1\r\n\r\n## Actual behavior\r\npsycopg2:connect occurs very rarely with an average calls per transaction of <<< 1 for the web processes.\r\npsycopg2:connect occurs every time with an average calls per transaction of 1 for the worker processes.\r\n\r\n## Potential Resolution\r\nWith my limited knowledge of Celery's inner workings, it feels like a fairly simple fix that I could make on a PR myself, but I wanted some input before I spend the time setting that all up.\r\nThis fix seems to work when monkey patched into the `DjangoWorkerFixup` class.\r\n``` Python\r\n\r\ndef _close_database(self):\r\n try:\r\n # Use Django's built in method of closing old connections.\r\n # This ensures that the database settings are respected.\r\n self._db.close_old_connections()\r\n except AttributeError:\r\n # Legacy functionality if we can't use the old connections for whatever reason.\r\n for conn in self._db.connections.all():\r\n try:\r\n conn.close()\r\n except self.interface_errors:\r\n pass\r\n except self.DatabaseError as exc:\r\n str_exc = str(exc)\r\n if 'closed' not in str_exc and 'not connected' not in str_exc:\r\n raise\r\n\r\ncelery.fixups.django.DjangoWorkerFixup._close_database = _close_database\r\n```\n", "before_files": [{"content": "\"\"\"Django-specific customization.\"\"\"\nfrom __future__ import absolute_import, unicode_literals\n\nimport os\nimport sys\nimport warnings\nfrom datetime import datetime\nfrom importlib import import_module\n\nfrom kombu.utils.imports import symbol_by_name\nfrom kombu.utils.objects import cached_property\n\nfrom celery import _state, signals\nfrom celery.exceptions import FixupWarning, ImproperlyConfigured\n\n__all__ = ('DjangoFixup', 'fixup')\n\nERR_NOT_INSTALLED = \"\"\"\\\nEnvironment variable DJANGO_SETTINGS_MODULE is defined\nbut Django isn't installed. Won't apply Django fix-ups!\n\"\"\"\n\n\ndef _maybe_close_fd(fh):\n try:\n os.close(fh.fileno())\n except (AttributeError, OSError, TypeError):\n # TypeError added for celery#962\n pass\n\n\ndef _verify_django_version(django):\n if django.VERSION < (1, 11):\n raise ImproperlyConfigured('Celery 4.x requires Django 1.11 or later.')\n\n\ndef fixup(app, env='DJANGO_SETTINGS_MODULE'):\n \"\"\"Install Django fixup if settings module environment is set.\"\"\"\n SETTINGS_MODULE = os.environ.get(env)\n if SETTINGS_MODULE and 'django' not in app.loader_cls.lower():\n try:\n import django # noqa\n except ImportError:\n warnings.warn(FixupWarning(ERR_NOT_INSTALLED))\n else:\n _verify_django_version(django)\n return DjangoFixup(app).install()\n\n\nclass DjangoFixup(object):\n \"\"\"Fixup installed when using Django.\"\"\"\n\n def __init__(self, app):\n self.app = app\n if _state.default_app is None:\n self.app.set_default()\n self._worker_fixup = None\n\n def install(self):\n # Need to add project directory to path.\n # The project directory has precedence over system modules,\n # so we prepend it to the path.\n sys.path.insert(0, os.getcwd())\n\n self._settings = symbol_by_name('django.conf:settings')\n self.app.loader.now = self.now\n\n signals.import_modules.connect(self.on_import_modules)\n signals.worker_init.connect(self.on_worker_init)\n return self\n\n @property\n def worker_fixup(self):\n if self._worker_fixup is None:\n self._worker_fixup = DjangoWorkerFixup(self.app)\n return self._worker_fixup\n\n @worker_fixup.setter\n def worker_fixup(self, value):\n self._worker_fixup = value\n\n def on_import_modules(self, **kwargs):\n # call django.setup() before task modules are imported\n self.worker_fixup.validate_models()\n\n def on_worker_init(self, **kwargs):\n self.worker_fixup.install()\n\n def now(self, utc=False):\n return datetime.utcnow() if utc else self._now()\n\n def autodiscover_tasks(self):\n from django.apps import apps\n return [config.name for config in apps.get_app_configs()]\n\n @cached_property\n def _now(self):\n return symbol_by_name('django.utils.timezone:now')\n\n\nclass DjangoWorkerFixup(object):\n _db_recycles = 0\n\n def __init__(self, app):\n self.app = app\n self.db_reuse_max = self.app.conf.get('CELERY_DB_REUSE_MAX', None)\n self._db = import_module('django.db')\n self._cache = import_module('django.core.cache')\n self._settings = symbol_by_name('django.conf:settings')\n\n self.interface_errors = (\n symbol_by_name('django.db.utils.InterfaceError'),\n )\n self.DatabaseError = symbol_by_name('django.db:DatabaseError')\n\n def django_setup(self):\n import django\n django.setup()\n\n def validate_models(self):\n from django.core.checks import run_checks\n self.django_setup()\n run_checks()\n\n def install(self):\n signals.beat_embedded_init.connect(self.close_database)\n signals.worker_ready.connect(self.on_worker_ready)\n signals.task_prerun.connect(self.on_task_prerun)\n signals.task_postrun.connect(self.on_task_postrun)\n signals.worker_process_init.connect(self.on_worker_process_init)\n self.close_database()\n self.close_cache()\n return self\n\n def on_worker_process_init(self, **kwargs):\n # Child process must validate models again if on Windows,\n # or if they were started using execv.\n if os.environ.get('FORKED_BY_MULTIPROCESSING'):\n self.validate_models()\n\n # close connections:\n # the parent process may have established these,\n # so need to close them.\n\n # calling db.close() on some DB connections will cause\n # the inherited DB conn to also get broken in the parent\n # process so we need to remove it without triggering any\n # network IO that close() might cause.\n for c in self._db.connections.all():\n if c and c.connection:\n self._maybe_close_db_fd(c.connection)\n\n # use the _ version to avoid DB_REUSE preventing the conn.close() call\n self._close_database()\n self.close_cache()\n\n def _maybe_close_db_fd(self, fd):\n try:\n _maybe_close_fd(fd)\n except self.interface_errors:\n pass\n\n def on_task_prerun(self, sender, **kwargs):\n \"\"\"Called before every task.\"\"\"\n if not getattr(sender.request, 'is_eager', False):\n self.close_database()\n\n def on_task_postrun(self, sender, **kwargs):\n # See https://groups.google.com/group/django-users/\n # browse_thread/thread/78200863d0c07c6d/\n if not getattr(sender.request, 'is_eager', False):\n self.close_database()\n self.close_cache()\n\n def close_database(self, **kwargs):\n if not self.db_reuse_max:\n return self._close_database()\n if self._db_recycles >= self.db_reuse_max * 2:\n self._db_recycles = 0\n self._close_database()\n self._db_recycles += 1\n\n def _close_database(self):\n for conn in self._db.connections.all():\n try:\n conn.close()\n except self.interface_errors:\n pass\n except self.DatabaseError as exc:\n str_exc = str(exc)\n if 'closed' not in str_exc and 'not connected' not in str_exc:\n raise\n\n def close_cache(self):\n try:\n self._cache.close_caches()\n except (TypeError, AttributeError):\n pass\n\n def on_worker_ready(self, **kwargs):\n if self._settings.DEBUG:\n warnings.warn('''Using settings.DEBUG leads to a memory\n leak, never use this setting in production environments!''')\n", "path": "celery/fixups/django.py"}], "after_files": [{"content": "\"\"\"Django-specific customization.\"\"\"\nfrom __future__ import absolute_import, unicode_literals\n\nimport os\nimport sys\nimport warnings\nfrom datetime import datetime\nfrom importlib import import_module\n\nfrom kombu.utils.imports import symbol_by_name\nfrom kombu.utils.objects import cached_property\n\nfrom celery import _state, signals\nfrom celery.exceptions import FixupWarning, ImproperlyConfigured\n\n__all__ = ('DjangoFixup', 'fixup')\n\nERR_NOT_INSTALLED = \"\"\"\\\nEnvironment variable DJANGO_SETTINGS_MODULE is defined\nbut Django isn't installed. Won't apply Django fix-ups!\n\"\"\"\n\n\ndef _maybe_close_fd(fh):\n try:\n os.close(fh.fileno())\n except (AttributeError, OSError, TypeError):\n # TypeError added for celery#962\n pass\n\n\ndef _verify_django_version(django):\n if django.VERSION < (1, 11):\n raise ImproperlyConfigured('Celery 4.x requires Django 1.11 or later.')\n\n\ndef fixup(app, env='DJANGO_SETTINGS_MODULE'):\n \"\"\"Install Django fixup if settings module environment is set.\"\"\"\n SETTINGS_MODULE = os.environ.get(env)\n if SETTINGS_MODULE and 'django' not in app.loader_cls.lower():\n try:\n import django # noqa\n except ImportError:\n warnings.warn(FixupWarning(ERR_NOT_INSTALLED))\n else:\n _verify_django_version(django)\n return DjangoFixup(app).install()\n\n\nclass DjangoFixup(object):\n \"\"\"Fixup installed when using Django.\"\"\"\n\n def __init__(self, app):\n self.app = app\n if _state.default_app is None:\n self.app.set_default()\n self._worker_fixup = None\n\n def install(self):\n # Need to add project directory to path.\n # The project directory has precedence over system modules,\n # so we prepend it to the path.\n sys.path.insert(0, os.getcwd())\n\n self._settings = symbol_by_name('django.conf:settings')\n self.app.loader.now = self.now\n\n signals.import_modules.connect(self.on_import_modules)\n signals.worker_init.connect(self.on_worker_init)\n return self\n\n @property\n def worker_fixup(self):\n if self._worker_fixup is None:\n self._worker_fixup = DjangoWorkerFixup(self.app)\n return self._worker_fixup\n\n @worker_fixup.setter\n def worker_fixup(self, value):\n self._worker_fixup = value\n\n def on_import_modules(self, **kwargs):\n # call django.setup() before task modules are imported\n self.worker_fixup.validate_models()\n\n def on_worker_init(self, **kwargs):\n self.worker_fixup.install()\n\n def now(self, utc=False):\n return datetime.utcnow() if utc else self._now()\n\n def autodiscover_tasks(self):\n from django.apps import apps\n return [config.name for config in apps.get_app_configs()]\n\n @cached_property\n def _now(self):\n return symbol_by_name('django.utils.timezone:now')\n\n\nclass DjangoWorkerFixup(object):\n _db_recycles = 0\n\n def __init__(self, app):\n self.app = app\n self.db_reuse_max = self.app.conf.get('CELERY_DB_REUSE_MAX', None)\n self._db = import_module('django.db')\n self._cache = import_module('django.core.cache')\n self._settings = symbol_by_name('django.conf:settings')\n\n self.interface_errors = (\n symbol_by_name('django.db.utils.InterfaceError'),\n )\n self.DatabaseError = symbol_by_name('django.db:DatabaseError')\n\n def django_setup(self):\n import django\n django.setup()\n\n def validate_models(self):\n from django.core.checks import run_checks\n self.django_setup()\n run_checks()\n\n def install(self):\n signals.beat_embedded_init.connect(self.close_database)\n signals.worker_ready.connect(self.on_worker_ready)\n signals.task_prerun.connect(self.on_task_prerun)\n signals.task_postrun.connect(self.on_task_postrun)\n signals.worker_process_init.connect(self.on_worker_process_init)\n self.close_database()\n self.close_cache()\n return self\n\n def on_worker_process_init(self, **kwargs):\n # Child process must validate models again if on Windows,\n # or if they were started using execv.\n if os.environ.get('FORKED_BY_MULTIPROCESSING'):\n self.validate_models()\n\n # close connections:\n # the parent process may have established these,\n # so need to close them.\n\n # calling db.close() on some DB connections will cause\n # the inherited DB conn to also get broken in the parent\n # process so we need to remove it without triggering any\n # network IO that close() might cause.\n for c in self._db.connections.all():\n if c and c.connection:\n self._maybe_close_db_fd(c.connection)\n\n # use the _ version to avoid DB_REUSE preventing the conn.close() call\n self._close_database(force=True)\n self.close_cache()\n\n def _maybe_close_db_fd(self, fd):\n try:\n _maybe_close_fd(fd)\n except self.interface_errors:\n pass\n\n def on_task_prerun(self, sender, **kwargs):\n \"\"\"Called before every task.\"\"\"\n if not getattr(sender.request, 'is_eager', False):\n self.close_database()\n\n def on_task_postrun(self, sender, **kwargs):\n # See https://groups.google.com/group/django-users/\n # browse_thread/thread/78200863d0c07c6d/\n if not getattr(sender.request, 'is_eager', False):\n self.close_database()\n self.close_cache()\n\n def close_database(self, **kwargs):\n if not self.db_reuse_max:\n return self._close_database()\n if self._db_recycles >= self.db_reuse_max * 2:\n self._db_recycles = 0\n self._close_database()\n self._db_recycles += 1\n\n def _close_database(self, force=False):\n for conn in self._db.connections.all():\n try:\n if force:\n conn.close()\n else:\n conn.close_if_unusable_or_obsolete()\n except self.interface_errors:\n pass\n except self.DatabaseError as exc:\n str_exc = str(exc)\n if 'closed' not in str_exc and 'not connected' not in str_exc:\n raise\n\n def close_cache(self):\n try:\n self._cache.close_caches()\n except (TypeError, AttributeError):\n pass\n\n def on_worker_ready(self, **kwargs):\n if self._settings.DEBUG:\n warnings.warn('''Using settings.DEBUG leads to a memory\n leak, never use this setting in production environments!''')\n", "path": "celery/fixups/django.py"}]}
| 3,704 | 234 |
gh_patches_debug_59731
|
rasdani/github-patches
|
git_diff
|
Textualize__textual-772
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Grid cell margin issue
Adding margin: 1 to the cells within this grid causes unexpected output:
<img width="752" alt="image" src="https://user-images.githubusercontent.com/5740731/190180955-3b10bd1f-60ca-4fda-9473-bc7d9a325b9d.png">
## `grid_buttons.py`
```python
from textual.app import App, ComposeResult
from textual.widgets import Button
class GridButtons(App):
def compose(self) -> ComposeResult:
yield Button.success("A")
yield Button.success("B")
yield Button.success("C")
yield Button.success("D")
yield Button.success("E")
yield Button.success("F")
app = GridButtons(css_path="grid_buttons.css")
if __name__ == '__main__':
app.run()
```
## `grid_buttons.css`
```scss
Screen {
layout: grid;
grid-size: 3;
grid-rows: 1fr;
grid-columns: 1fr;
}
Button {
margin: 1;
}
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/textual/layouts/grid.py`
Content:
```
1 from __future__ import annotations
2
3 from fractions import Fraction
4 from typing import TYPE_CHECKING, Iterable
5
6 from .._layout import ArrangeResult, Layout, WidgetPlacement
7 from .._resolve import resolve
8 from ..css.scalar import Scalar
9 from ..geometry import Region, Size, Spacing
10
11 if TYPE_CHECKING:
12 from ..widget import Widget
13
14
15 class GridLayout(Layout):
16 """Used to layout Widgets in to a grid."""
17
18 name = "grid"
19
20 def arrange(
21 self, parent: Widget, children: list[Widget], size: Size
22 ) -> ArrangeResult:
23 styles = parent.styles
24 row_scalars = styles.grid_rows or [Scalar.parse("1fr")]
25 column_scalars = styles.grid_columns or [Scalar.parse("1fr")]
26 gutter_horizontal = styles.grid_gutter_horizontal
27 gutter_vertical = styles.grid_gutter_vertical
28 table_size_columns = max(1, styles.grid_size_columns)
29 table_size_rows = styles.grid_size_rows
30 viewport = parent.screen.size
31
32 def cell_coords(column_count: int) -> Iterable[tuple[int, int]]:
33 """Iterate over table coordinates ad infinitum.
34
35 Args:
36 column_count (int): Number of columns
37
38 """
39 row = 0
40 while True:
41 for column in range(column_count):
42 yield (column, row)
43 row += 1
44
45 def widget_coords(
46 column_start: int, row_start: int, columns: int, rows: int
47 ) -> set[tuple[int, int]]:
48 """Get coords occupied by a cell.
49
50 Args:
51 column_start (int): Start column.
52 row_start (int): Start_row.
53 columns (int): Number of columns.
54 rows (int): Number of rows.
55
56 Returns:
57 set[tuple[int, int]]: Set of coords.
58 """
59 return {
60 (column, row)
61 for column in range(column_start, column_start + columns)
62 for row in range(row_start, row_start + rows)
63 }
64
65 def repeat_scalars(scalars: Iterable[Scalar], count: int) -> list[Scalar]:
66 """Repeat an iterable of scalars as many times as required to return
67 a list of `count` values.
68
69 Args:
70 scalars (Iterable[T]): Iterable of values.
71 count (int): Number of values to return.
72
73 Returns:
74 list[T]: A list of values.
75 """
76 limited_values = list(scalars)[:]
77 while len(limited_values) < count:
78 limited_values.extend(scalars)
79 return limited_values[:count]
80
81 cell_map: dict[tuple[int, int], tuple[Widget, bool]] = {}
82 cell_size_map: dict[Widget, tuple[int, int, int, int]] = {}
83
84 column_count = table_size_columns
85 next_coord = iter(cell_coords(column_count)).__next__
86 cell_coord = (0, 0)
87 column = row = 0
88
89 for child in children:
90 child_styles = child.styles
91 column_span = child_styles.column_span or 1
92 row_span = child_styles.row_span or 1
93 # Find a slot where this cell fits
94 # A cell on a previous row may have a row span
95 while True:
96 column, row = cell_coord
97 coords = widget_coords(column, row, column_span, row_span)
98 if cell_map.keys().isdisjoint(coords):
99 for coord in coords:
100 cell_map[coord] = (child, coord == cell_coord)
101 cell_size_map[child] = (
102 column,
103 row,
104 column_span - 1,
105 row_span - 1,
106 )
107 break
108 else:
109 cell_coord = next_coord()
110 continue
111 cell_coord = next_coord()
112
113 # Resolve columns / rows
114 columns = resolve(
115 repeat_scalars(column_scalars, table_size_columns),
116 size.width,
117 gutter_vertical,
118 size,
119 viewport,
120 )
121 rows = resolve(
122 repeat_scalars(
123 row_scalars, table_size_rows if table_size_rows else row + 1
124 ),
125 size.height,
126 gutter_horizontal,
127 size,
128 viewport,
129 )
130
131 placements: list[WidgetPlacement] = []
132 add_placement = placements.append
133 fraction_unit = Fraction(1)
134 widgets: list[Widget] = []
135 add_widget = widgets.append
136 max_column = len(columns) - 1
137 max_row = len(rows) - 1
138 margin = Spacing()
139 for widget, (column, row, column_span, row_span) in cell_size_map.items():
140 x = columns[column][0]
141 if row > max_row:
142 break
143 y = rows[row][0]
144 x2, cell_width = columns[min(max_column, column + column_span)]
145 y2, cell_height = rows[min(max_row, row + row_span)]
146 cell_size = Size(cell_width + x2 - x, cell_height + y2 - y)
147 width, height, margin = widget._get_box_model(
148 cell_size,
149 viewport,
150 fraction_unit,
151 )
152 region = (
153 Region(x, y, int(width), int(height))
154 .shrink(margin)
155 .clip_size(cell_size)
156 )
157 add_placement(WidgetPlacement(region, margin, widget))
158 add_widget(widget)
159
160 return (placements, set(widgets))
161
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/textual/layouts/grid.py b/src/textual/layouts/grid.py
--- a/src/textual/layouts/grid.py
+++ b/src/textual/layouts/grid.py
@@ -150,7 +150,7 @@
fraction_unit,
)
region = (
- Region(x, y, int(width), int(height))
+ Region(x, y, int(width + margin.width), int(height + margin.height))
.shrink(margin)
.clip_size(cell_size)
)
|
{"golden_diff": "diff --git a/src/textual/layouts/grid.py b/src/textual/layouts/grid.py\n--- a/src/textual/layouts/grid.py\n+++ b/src/textual/layouts/grid.py\n@@ -150,7 +150,7 @@\n fraction_unit,\n )\n region = (\n- Region(x, y, int(width), int(height))\n+ Region(x, y, int(width + margin.width), int(height + margin.height))\n .shrink(margin)\n .clip_size(cell_size)\n )\n", "issue": "Grid cell margin issue\nAdding margin: 1 to the cells within this grid causes unexpected output:\r\n\r\n<img width=\"752\" alt=\"image\" src=\"https://user-images.githubusercontent.com/5740731/190180955-3b10bd1f-60ca-4fda-9473-bc7d9a325b9d.png\">\r\n\r\n## `grid_buttons.py`\r\n\r\n```python\r\nfrom textual.app import App, ComposeResult\r\nfrom textual.widgets import Button\r\n\r\n\r\nclass GridButtons(App):\r\n def compose(self) -> ComposeResult:\r\n yield Button.success(\"A\")\r\n yield Button.success(\"B\")\r\n yield Button.success(\"C\")\r\n yield Button.success(\"D\")\r\n yield Button.success(\"E\")\r\n yield Button.success(\"F\")\r\n\r\n\r\napp = GridButtons(css_path=\"grid_buttons.css\")\r\nif __name__ == '__main__':\r\n app.run()\r\n```\r\n\r\n## `grid_buttons.css`\r\n\r\n```scss\r\nScreen {\r\n layout: grid;\r\n grid-size: 3;\r\n grid-rows: 1fr;\r\n grid-columns: 1fr;\r\n}\r\n\r\nButton {\r\n margin: 1;\r\n}\r\n```\n", "before_files": [{"content": "from __future__ import annotations\n\nfrom fractions import Fraction\nfrom typing import TYPE_CHECKING, Iterable\n\nfrom .._layout import ArrangeResult, Layout, WidgetPlacement\nfrom .._resolve import resolve\nfrom ..css.scalar import Scalar\nfrom ..geometry import Region, Size, Spacing\n\nif TYPE_CHECKING:\n from ..widget import Widget\n\n\nclass GridLayout(Layout):\n \"\"\"Used to layout Widgets in to a grid.\"\"\"\n\n name = \"grid\"\n\n def arrange(\n self, parent: Widget, children: list[Widget], size: Size\n ) -> ArrangeResult:\n styles = parent.styles\n row_scalars = styles.grid_rows or [Scalar.parse(\"1fr\")]\n column_scalars = styles.grid_columns or [Scalar.parse(\"1fr\")]\n gutter_horizontal = styles.grid_gutter_horizontal\n gutter_vertical = styles.grid_gutter_vertical\n table_size_columns = max(1, styles.grid_size_columns)\n table_size_rows = styles.grid_size_rows\n viewport = parent.screen.size\n\n def cell_coords(column_count: int) -> Iterable[tuple[int, int]]:\n \"\"\"Iterate over table coordinates ad infinitum.\n\n Args:\n column_count (int): Number of columns\n\n \"\"\"\n row = 0\n while True:\n for column in range(column_count):\n yield (column, row)\n row += 1\n\n def widget_coords(\n column_start: int, row_start: int, columns: int, rows: int\n ) -> set[tuple[int, int]]:\n \"\"\"Get coords occupied by a cell.\n\n Args:\n column_start (int): Start column.\n row_start (int): Start_row.\n columns (int): Number of columns.\n rows (int): Number of rows.\n\n Returns:\n set[tuple[int, int]]: Set of coords.\n \"\"\"\n return {\n (column, row)\n for column in range(column_start, column_start + columns)\n for row in range(row_start, row_start + rows)\n }\n\n def repeat_scalars(scalars: Iterable[Scalar], count: int) -> list[Scalar]:\n \"\"\"Repeat an iterable of scalars as many times as required to return\n a list of `count` values.\n\n Args:\n scalars (Iterable[T]): Iterable of values.\n count (int): Number of values to return.\n\n Returns:\n list[T]: A list of values.\n \"\"\"\n limited_values = list(scalars)[:]\n while len(limited_values) < count:\n limited_values.extend(scalars)\n return limited_values[:count]\n\n cell_map: dict[tuple[int, int], tuple[Widget, bool]] = {}\n cell_size_map: dict[Widget, tuple[int, int, int, int]] = {}\n\n column_count = table_size_columns\n next_coord = iter(cell_coords(column_count)).__next__\n cell_coord = (0, 0)\n column = row = 0\n\n for child in children:\n child_styles = child.styles\n column_span = child_styles.column_span or 1\n row_span = child_styles.row_span or 1\n # Find a slot where this cell fits\n # A cell on a previous row may have a row span\n while True:\n column, row = cell_coord\n coords = widget_coords(column, row, column_span, row_span)\n if cell_map.keys().isdisjoint(coords):\n for coord in coords:\n cell_map[coord] = (child, coord == cell_coord)\n cell_size_map[child] = (\n column,\n row,\n column_span - 1,\n row_span - 1,\n )\n break\n else:\n cell_coord = next_coord()\n continue\n cell_coord = next_coord()\n\n # Resolve columns / rows\n columns = resolve(\n repeat_scalars(column_scalars, table_size_columns),\n size.width,\n gutter_vertical,\n size,\n viewport,\n )\n rows = resolve(\n repeat_scalars(\n row_scalars, table_size_rows if table_size_rows else row + 1\n ),\n size.height,\n gutter_horizontal,\n size,\n viewport,\n )\n\n placements: list[WidgetPlacement] = []\n add_placement = placements.append\n fraction_unit = Fraction(1)\n widgets: list[Widget] = []\n add_widget = widgets.append\n max_column = len(columns) - 1\n max_row = len(rows) - 1\n margin = Spacing()\n for widget, (column, row, column_span, row_span) in cell_size_map.items():\n x = columns[column][0]\n if row > max_row:\n break\n y = rows[row][0]\n x2, cell_width = columns[min(max_column, column + column_span)]\n y2, cell_height = rows[min(max_row, row + row_span)]\n cell_size = Size(cell_width + x2 - x, cell_height + y2 - y)\n width, height, margin = widget._get_box_model(\n cell_size,\n viewport,\n fraction_unit,\n )\n region = (\n Region(x, y, int(width), int(height))\n .shrink(margin)\n .clip_size(cell_size)\n )\n add_placement(WidgetPlacement(region, margin, widget))\n add_widget(widget)\n\n return (placements, set(widgets))\n", "path": "src/textual/layouts/grid.py"}], "after_files": [{"content": "from __future__ import annotations\n\nfrom fractions import Fraction\nfrom typing import TYPE_CHECKING, Iterable\n\nfrom .._layout import ArrangeResult, Layout, WidgetPlacement\nfrom .._resolve import resolve\nfrom ..css.scalar import Scalar\nfrom ..geometry import Region, Size, Spacing\n\nif TYPE_CHECKING:\n from ..widget import Widget\n\n\nclass GridLayout(Layout):\n \"\"\"Used to layout Widgets in to a grid.\"\"\"\n\n name = \"grid\"\n\n def arrange(\n self, parent: Widget, children: list[Widget], size: Size\n ) -> ArrangeResult:\n styles = parent.styles\n row_scalars = styles.grid_rows or [Scalar.parse(\"1fr\")]\n column_scalars = styles.grid_columns or [Scalar.parse(\"1fr\")]\n gutter_horizontal = styles.grid_gutter_horizontal\n gutter_vertical = styles.grid_gutter_vertical\n table_size_columns = max(1, styles.grid_size_columns)\n table_size_rows = styles.grid_size_rows\n viewport = parent.screen.size\n\n def cell_coords(column_count: int) -> Iterable[tuple[int, int]]:\n \"\"\"Iterate over table coordinates ad infinitum.\n\n Args:\n column_count (int): Number of columns\n\n \"\"\"\n row = 0\n while True:\n for column in range(column_count):\n yield (column, row)\n row += 1\n\n def widget_coords(\n column_start: int, row_start: int, columns: int, rows: int\n ) -> set[tuple[int, int]]:\n \"\"\"Get coords occupied by a cell.\n\n Args:\n column_start (int): Start column.\n row_start (int): Start_row.\n columns (int): Number of columns.\n rows (int): Number of rows.\n\n Returns:\n set[tuple[int, int]]: Set of coords.\n \"\"\"\n return {\n (column, row)\n for column in range(column_start, column_start + columns)\n for row in range(row_start, row_start + rows)\n }\n\n def repeat_scalars(scalars: Iterable[Scalar], count: int) -> list[Scalar]:\n \"\"\"Repeat an iterable of scalars as many times as required to return\n a list of `count` values.\n\n Args:\n scalars (Iterable[T]): Iterable of values.\n count (int): Number of values to return.\n\n Returns:\n list[T]: A list of values.\n \"\"\"\n limited_values = list(scalars)[:]\n while len(limited_values) < count:\n limited_values.extend(scalars)\n return limited_values[:count]\n\n cell_map: dict[tuple[int, int], tuple[Widget, bool]] = {}\n cell_size_map: dict[Widget, tuple[int, int, int, int]] = {}\n\n column_count = table_size_columns\n next_coord = iter(cell_coords(column_count)).__next__\n cell_coord = (0, 0)\n column = row = 0\n\n for child in children:\n child_styles = child.styles\n column_span = child_styles.column_span or 1\n row_span = child_styles.row_span or 1\n # Find a slot where this cell fits\n # A cell on a previous row may have a row span\n while True:\n column, row = cell_coord\n coords = widget_coords(column, row, column_span, row_span)\n if cell_map.keys().isdisjoint(coords):\n for coord in coords:\n cell_map[coord] = (child, coord == cell_coord)\n cell_size_map[child] = (\n column,\n row,\n column_span - 1,\n row_span - 1,\n )\n break\n else:\n cell_coord = next_coord()\n continue\n cell_coord = next_coord()\n\n # Resolve columns / rows\n columns = resolve(\n repeat_scalars(column_scalars, table_size_columns),\n size.width,\n gutter_vertical,\n size,\n viewport,\n )\n rows = resolve(\n repeat_scalars(\n row_scalars, table_size_rows if table_size_rows else row + 1\n ),\n size.height,\n gutter_horizontal,\n size,\n viewport,\n )\n\n placements: list[WidgetPlacement] = []\n add_placement = placements.append\n fraction_unit = Fraction(1)\n widgets: list[Widget] = []\n add_widget = widgets.append\n max_column = len(columns) - 1\n max_row = len(rows) - 1\n margin = Spacing()\n for widget, (column, row, column_span, row_span) in cell_size_map.items():\n x = columns[column][0]\n if row > max_row:\n break\n y = rows[row][0]\n x2, cell_width = columns[min(max_column, column + column_span)]\n y2, cell_height = rows[min(max_row, row + row_span)]\n cell_size = Size(cell_width + x2 - x, cell_height + y2 - y)\n width, height, margin = widget._get_box_model(\n cell_size,\n viewport,\n fraction_unit,\n )\n region = (\n Region(x, y, int(width + margin.width), int(height + margin.height))\n .shrink(margin)\n .clip_size(cell_size)\n )\n add_placement(WidgetPlacement(region, margin, widget))\n add_widget(widget)\n\n return (placements, set(widgets))\n", "path": "src/textual/layouts/grid.py"}]}
| 2,043 | 108 |
gh_patches_debug_15060
|
rasdani/github-patches
|
git_diff
|
Mailu__Mailu-850
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Sender access fails with void addresses
Some MTA send bounces and or notifications using an empty address `MAIL FROM: <>`; this causes an error in the route `/internal/postfix/sender/access`:
```
[2019-01-18 21:23:32,211] ERROR in app: Exception on /internal/postfix/sender/access/<> [GET]
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/idna/core.py", line 271, in alabel
ulabel(label)
File "/usr/lib/python3.6/site-packages/idna/core.py", line 307, in ulabel
check_label(label)
File "/usr/lib/python3.6/site-packages/idna/core.py", line 261, in check_label
raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label)))
idna.core.InvalidCodepoint: Codepoint U+003C at position 1 of '<>' not allowed
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 1127, in _execute_context
context = constructor(dialect, self, conn, *args)
File "/usr/lib/python3.6/site-packages/sqlalchemy/engine/default.py", line 669, in _init_compiled
param.append(processors[key](compiled_params[key]))
File "/usr/lib/python3.6/site-packages/sqlalchemy/sql/type_api.py", line 1182, in process
return process_param(value, dialect)
File "/app/mailu/models.py", line 30, in process_bind_param
return idna.encode(value).decode("ascii").lower()
File "/usr/lib/python3.6/site-packages/idna/core.py", line 361, in encode
s = alabel(label)
File "/usr/lib/python3.6/site-packages/idna/core.py", line 273, in alabel
raise IDNAError('The label {0} is not a valid A-label'.format(label))
idna.core.IDNAError: The label b'<>' is not a valid A-label
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/flask/app.py", line 2292, in wsgi_app
response = self.full_dispatch_request()
File "/usr/lib/python3.6/site-packages/flask/app.py", line 1815, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/usr/lib/python3.6/site-packages/flask/app.py", line 1718, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/usr/lib/python3.6/site-packages/flask/_compat.py", line 35, in reraise
raise value
File "/usr/lib/python3.6/site-packages/flask/app.py", line 1813, in full_dispatch_request
rv = self.dispatch_request()
File "/usr/lib/python3.6/site-packages/flask/app.py", line 1799, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/app/mailu/internal/views/postfix.py", line 52, in postfix_sender_access
localpart, domain_name = models.Email.resolve_domain(sender)
File "/app/mailu/models.py", line 263, in resolve_domain
alternative = Alternative.query.get(domain_name)
File "/usr/lib/python3.6/site-packages/sqlalchemy/orm/query.py", line 924, in get
ident, loading.load_on_pk_identity)
File "/usr/lib/python3.6/site-packages/sqlalchemy/orm/query.py", line 1007, in _get_impl
return db_load_fn(self, primary_key_identity)
File "/usr/lib/python3.6/site-packages/sqlalchemy/orm/loading.py", line 250, in load_on_pk_identity
return q.one()
File "/usr/lib/python3.6/site-packages/sqlalchemy/orm/query.py", line 2954, in one
ret = self.one_or_none()
File "/usr/lib/python3.6/site-packages/sqlalchemy/orm/query.py", line 2924, in one_or_none
ret = list(self)
File "/usr/lib/python3.6/site-packages/sqlalchemy/orm/query.py", line 2995, in __iter__
return self._execute_and_instances(context)
File "/usr/lib/python3.6/site-packages/sqlalchemy/orm/query.py", line 3018, in _execute_and_instances
result = conn.execute(querycontext.statement, self._params)
File "/usr/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 948, in execute
return meth(self, multiparams, params)
File "/usr/lib/python3.6/site-packages/sqlalchemy/sql/elements.py", line 269, in _execute_on_connection
return connection._execute_clauseelement(self, multiparams, params)
File "/usr/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 1060, in _execute_clauseelement
compiled_sql, distilled_params
File "/usr/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 1132, in _execute_context
None, None)
File "/usr/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 1413, in _handle_dbapi_exception
exc_info
File "/usr/lib/python3.6/site-packages/sqlalchemy/util/compat.py", line 265, in raise_from_cause
reraise(type(exception), exception, tb=exc_tb, cause=cause)
File "/usr/lib/python3.6/site-packages/sqlalchemy/util/compat.py", line 248, in reraise
raise value.with_traceback(tb)
File "/usr/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 1127, in _execute_context
context = constructor(dialect, self, conn, *args)
File "/usr/lib/python3.6/site-packages/sqlalchemy/engine/default.py", line 669, in _init_compiled
param.append(processors[key](compiled_params[key]))
File "/usr/lib/python3.6/site-packages/sqlalchemy/sql/type_api.py", line 1182, in process
return process_param(value, dialect)
File "/app/mailu/models.py", line 30, in process_bind_param
return idna.encode(value).decode("ascii").lower()
File "/usr/lib/python3.6/site-packages/idna/core.py", line 361, in encode
s = alabel(label)
File "/usr/lib/python3.6/site-packages/idna/core.py", line 273, in alabel
raise IDNAError('The label {0} is not a valid A-label'.format(label))
sqlalchemy.exc.StatementError: (idna.core.IDNAError) The label b'<>' is not a valid A-label ...
172.17.0.35 - - [18/Jan/2019:21:23:32 +0000] "GET /internal/postfix/sender/access/%3C%3E HTTP/1.1" 500 291 "-" "Python/3.6 aiohttp/3.5.4"
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `core/admin/mailu/internal/views/postfix.py`
Content:
```
1 from mailu import models
2 from mailu.internal import internal
3
4 import flask
5
6
7 @internal.route("/postfix/domain/<domain_name>")
8 def postfix_mailbox_domain(domain_name):
9 domain = models.Domain.query.get(domain_name) or \
10 models.Alternative.query.get(domain_name) or \
11 flask.abort(404)
12 return flask.jsonify(domain.name)
13
14
15 @internal.route("/postfix/mailbox/<path:email>")
16 def postfix_mailbox_map(email):
17 user = models.User.query.get(email) or flask.abort(404)
18 return flask.jsonify(user.email)
19
20
21 @internal.route("/postfix/alias/<path:alias>")
22 def postfix_alias_map(alias):
23 localpart, domain_name = models.Email.resolve_domain(alias)
24 if localpart is None:
25 return flask.jsonify(domain_name)
26 destination = models.Email.resolve_destination(localpart, domain_name)
27 return flask.jsonify(",".join(destination)) if destination else flask.abort(404)
28
29
30 @internal.route("/postfix/transport/<path:email>")
31 def postfix_transport(email):
32 if email == '*':
33 return flask.abort(404)
34 localpart, domain_name = models.Email.resolve_domain(email)
35 relay = models.Relay.query.get(domain_name) or flask.abort(404)
36 return flask.jsonify("smtp:[{}]".format(relay.smtp))
37
38
39 @internal.route("/postfix/sender/login/<path:sender>")
40 def postfix_sender_login(sender):
41 localpart, domain_name = models.Email.resolve_domain(sender)
42 if localpart is None:
43 return flask.abort(404)
44 destination = models.Email.resolve_destination(localpart, domain_name, True)
45 return flask.jsonify(",".join(destination)) if destination else flask.abort(404)
46
47
48 @internal.route("/postfix/sender/access/<path:sender>")
49 def postfix_sender_access(sender):
50 """ Simply reject any sender that pretends to be from a local domain
51 """
52 localpart, domain_name = models.Email.resolve_domain(sender)
53 return flask.jsonify("REJECT") if models.Domain.query.get(domain_name) else flask.abort(404)
54
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/core/admin/mailu/internal/views/postfix.py b/core/admin/mailu/internal/views/postfix.py
--- a/core/admin/mailu/internal/views/postfix.py
+++ b/core/admin/mailu/internal/views/postfix.py
@@ -49,5 +49,18 @@
def postfix_sender_access(sender):
""" Simply reject any sender that pretends to be from a local domain
"""
- localpart, domain_name = models.Email.resolve_domain(sender)
- return flask.jsonify("REJECT") if models.Domain.query.get(domain_name) else flask.abort(404)
+ if not is_void_address(sender):
+ localpart, domain_name = models.Email.resolve_domain(sender)
+ return flask.jsonify("REJECT") if models.Domain.query.get(domain_name) else flask.abort(404)
+ else:
+ return flask.abort(404)
+
+
+def is_void_address(email):
+ '''True if the email is void (null) email address.
+ '''
+ if email.startswith('<') and email.endswith('>'):
+ email = email[1:-1]
+ # Some MTAs use things like '<MAILER-DAEMON>' instead of '<>'; so let's
+ # consider void any such thing.
+ return '@' not in email
|
{"golden_diff": "diff --git a/core/admin/mailu/internal/views/postfix.py b/core/admin/mailu/internal/views/postfix.py\n--- a/core/admin/mailu/internal/views/postfix.py\n+++ b/core/admin/mailu/internal/views/postfix.py\n@@ -49,5 +49,18 @@\n def postfix_sender_access(sender):\n \"\"\" Simply reject any sender that pretends to be from a local domain\n \"\"\"\n- localpart, domain_name = models.Email.resolve_domain(sender)\n- return flask.jsonify(\"REJECT\") if models.Domain.query.get(domain_name) else flask.abort(404)\n+ if not is_void_address(sender):\n+ localpart, domain_name = models.Email.resolve_domain(sender)\n+ return flask.jsonify(\"REJECT\") if models.Domain.query.get(domain_name) else flask.abort(404)\n+ else:\n+ return flask.abort(404)\n+\n+\n+def is_void_address(email):\n+ '''True if the email is void (null) email address.\n+ '''\n+ if email.startswith('<') and email.endswith('>'):\n+ email = email[1:-1]\n+ # Some MTAs use things like '<MAILER-DAEMON>' instead of '<>'; so let's\n+ # consider void any such thing.\n+ return '@' not in email\n", "issue": "Sender access fails with void addresses\nSome MTA send bounces and or notifications using an empty address `MAIL FROM: <>`; this causes an error in the route `/internal/postfix/sender/access`:\r\n\r\n```\r\n[2019-01-18 21:23:32,211] ERROR in app: Exception on /internal/postfix/sender/access/<> [GET]\r\nTraceback (most recent call last):\r\n File \"/usr/lib/python3.6/site-packages/idna/core.py\", line 271, in alabel\r\n ulabel(label)\r\n File \"/usr/lib/python3.6/site-packages/idna/core.py\", line 307, in ulabel\r\n check_label(label)\r\n File \"/usr/lib/python3.6/site-packages/idna/core.py\", line 261, in check_label\r\n raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label)))\r\nidna.core.InvalidCodepoint: Codepoint U+003C at position 1 of '<>' not allowed\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/engine/base.py\", line 1127, in _execute_context\r\n context = constructor(dialect, self, conn, *args)\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/engine/default.py\", line 669, in _init_compiled\r\n param.append(processors[key](compiled_params[key]))\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/sql/type_api.py\", line 1182, in process\r\n return process_param(value, dialect)\r\n File \"/app/mailu/models.py\", line 30, in process_bind_param\r\n return idna.encode(value).decode(\"ascii\").lower()\r\n File \"/usr/lib/python3.6/site-packages/idna/core.py\", line 361, in encode\r\n s = alabel(label)\r\n File \"/usr/lib/python3.6/site-packages/idna/core.py\", line 273, in alabel\r\n raise IDNAError('The label {0} is not a valid A-label'.format(label))\r\nidna.core.IDNAError: The label b'<>' is not a valid A-label\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File \"/usr/lib/python3.6/site-packages/flask/app.py\", line 2292, in wsgi_app\r\n response = self.full_dispatch_request()\r\n File \"/usr/lib/python3.6/site-packages/flask/app.py\", line 1815, in full_dispatch_request\r\n rv = self.handle_user_exception(e)\r\n File \"/usr/lib/python3.6/site-packages/flask/app.py\", line 1718, in handle_user_exception\r\n reraise(exc_type, exc_value, tb)\r\n File \"/usr/lib/python3.6/site-packages/flask/_compat.py\", line 35, in reraise\r\n raise value\r\n File \"/usr/lib/python3.6/site-packages/flask/app.py\", line 1813, in full_dispatch_request\r\n rv = self.dispatch_request()\r\n File \"/usr/lib/python3.6/site-packages/flask/app.py\", line 1799, in dispatch_request\r\n return self.view_functions[rule.endpoint](**req.view_args)\r\n File \"/app/mailu/internal/views/postfix.py\", line 52, in postfix_sender_access\r\n localpart, domain_name = models.Email.resolve_domain(sender)\r\n File \"/app/mailu/models.py\", line 263, in resolve_domain\r\n alternative = Alternative.query.get(domain_name)\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/orm/query.py\", line 924, in get\r\n ident, loading.load_on_pk_identity)\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/orm/query.py\", line 1007, in _get_impl\r\n return db_load_fn(self, primary_key_identity)\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/orm/loading.py\", line 250, in load_on_pk_identity\r\n return q.one()\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/orm/query.py\", line 2954, in one\r\n ret = self.one_or_none()\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/orm/query.py\", line 2924, in one_or_none\r\n ret = list(self)\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/orm/query.py\", line 2995, in __iter__\r\n return self._execute_and_instances(context)\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/orm/query.py\", line 3018, in _execute_and_instances\r\n result = conn.execute(querycontext.statement, self._params)\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/engine/base.py\", line 948, in execute\r\n return meth(self, multiparams, params)\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/sql/elements.py\", line 269, in _execute_on_connection\r\n return connection._execute_clauseelement(self, multiparams, params)\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/engine/base.py\", line 1060, in _execute_clauseelement\r\n compiled_sql, distilled_params\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/engine/base.py\", line 1132, in _execute_context\r\n None, None)\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/engine/base.py\", line 1413, in _handle_dbapi_exception\r\n exc_info\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/util/compat.py\", line 265, in raise_from_cause\r\n reraise(type(exception), exception, tb=exc_tb, cause=cause)\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/util/compat.py\", line 248, in reraise\r\n raise value.with_traceback(tb)\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/engine/base.py\", line 1127, in _execute_context\r\n context = constructor(dialect, self, conn, *args)\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/engine/default.py\", line 669, in _init_compiled\r\n param.append(processors[key](compiled_params[key]))\r\n File \"/usr/lib/python3.6/site-packages/sqlalchemy/sql/type_api.py\", line 1182, in process\r\n return process_param(value, dialect)\r\n File \"/app/mailu/models.py\", line 30, in process_bind_param\r\n return idna.encode(value).decode(\"ascii\").lower()\r\n File \"/usr/lib/python3.6/site-packages/idna/core.py\", line 361, in encode\r\n s = alabel(label)\r\n File \"/usr/lib/python3.6/site-packages/idna/core.py\", line 273, in alabel\r\n raise IDNAError('The label {0} is not a valid A-label'.format(label))\r\nsqlalchemy.exc.StatementError: (idna.core.IDNAError) The label b'<>' is not a valid A-label ...\r\n172.17.0.35 - - [18/Jan/2019:21:23:32 +0000] \"GET /internal/postfix/sender/access/%3C%3E HTTP/1.1\" 500 291 \"-\" \"Python/3.6 aiohttp/3.5.4\"\r\n```\n", "before_files": [{"content": "from mailu import models\nfrom mailu.internal import internal\n\nimport flask\n\n\[email protected](\"/postfix/domain/<domain_name>\")\ndef postfix_mailbox_domain(domain_name):\n domain = models.Domain.query.get(domain_name) or \\\n models.Alternative.query.get(domain_name) or \\\n flask.abort(404)\n return flask.jsonify(domain.name)\n\n\[email protected](\"/postfix/mailbox/<path:email>\")\ndef postfix_mailbox_map(email):\n user = models.User.query.get(email) or flask.abort(404)\n return flask.jsonify(user.email)\n\n\[email protected](\"/postfix/alias/<path:alias>\")\ndef postfix_alias_map(alias):\n localpart, domain_name = models.Email.resolve_domain(alias)\n if localpart is None:\n return flask.jsonify(domain_name)\n destination = models.Email.resolve_destination(localpart, domain_name)\n return flask.jsonify(\",\".join(destination)) if destination else flask.abort(404)\n\n\[email protected](\"/postfix/transport/<path:email>\")\ndef postfix_transport(email):\n if email == '*':\n return flask.abort(404)\n localpart, domain_name = models.Email.resolve_domain(email)\n relay = models.Relay.query.get(domain_name) or flask.abort(404)\n return flask.jsonify(\"smtp:[{}]\".format(relay.smtp))\n\n\[email protected](\"/postfix/sender/login/<path:sender>\")\ndef postfix_sender_login(sender):\n localpart, domain_name = models.Email.resolve_domain(sender)\n if localpart is None:\n return flask.abort(404)\n destination = models.Email.resolve_destination(localpart, domain_name, True)\n return flask.jsonify(\",\".join(destination)) if destination else flask.abort(404)\n\n\[email protected](\"/postfix/sender/access/<path:sender>\")\ndef postfix_sender_access(sender):\n \"\"\" Simply reject any sender that pretends to be from a local domain\n \"\"\"\n localpart, domain_name = models.Email.resolve_domain(sender)\n return flask.jsonify(\"REJECT\") if models.Domain.query.get(domain_name) else flask.abort(404)\n", "path": "core/admin/mailu/internal/views/postfix.py"}], "after_files": [{"content": "from mailu import models\nfrom mailu.internal import internal\n\nimport flask\n\n\[email protected](\"/postfix/domain/<domain_name>\")\ndef postfix_mailbox_domain(domain_name):\n domain = models.Domain.query.get(domain_name) or \\\n models.Alternative.query.get(domain_name) or \\\n flask.abort(404)\n return flask.jsonify(domain.name)\n\n\[email protected](\"/postfix/mailbox/<path:email>\")\ndef postfix_mailbox_map(email):\n user = models.User.query.get(email) or flask.abort(404)\n return flask.jsonify(user.email)\n\n\[email protected](\"/postfix/alias/<path:alias>\")\ndef postfix_alias_map(alias):\n localpart, domain_name = models.Email.resolve_domain(alias)\n if localpart is None:\n return flask.jsonify(domain_name)\n destination = models.Email.resolve_destination(localpart, domain_name)\n return flask.jsonify(\",\".join(destination)) if destination else flask.abort(404)\n\n\[email protected](\"/postfix/transport/<path:email>\")\ndef postfix_transport(email):\n if email == '*':\n return flask.abort(404)\n localpart, domain_name = models.Email.resolve_domain(email)\n relay = models.Relay.query.get(domain_name) or flask.abort(404)\n return flask.jsonify(\"smtp:[{}]\".format(relay.smtp))\n\n\[email protected](\"/postfix/sender/login/<path:sender>\")\ndef postfix_sender_login(sender):\n localpart, domain_name = models.Email.resolve_domain(sender)\n if localpart is None:\n return flask.abort(404)\n destination = models.Email.resolve_destination(localpart, domain_name, True)\n return flask.jsonify(\",\".join(destination)) if destination else flask.abort(404)\n\n\[email protected](\"/postfix/sender/access/<path:sender>\")\ndef postfix_sender_access(sender):\n \"\"\" Simply reject any sender that pretends to be from a local domain\n \"\"\"\n if not is_void_address(sender):\n localpart, domain_name = models.Email.resolve_domain(sender)\n return flask.jsonify(\"REJECT\") if models.Domain.query.get(domain_name) else flask.abort(404)\n else:\n return flask.abort(404)\n\n\ndef is_void_address(email):\n '''True if the email is void (null) email address.\n '''\n if email.startswith('<') and email.endswith('>'):\n email = email[1:-1]\n # Some MTAs use things like '<MAILER-DAEMON>' instead of '<>'; so let's\n # consider void any such thing.\n return '@' not in email\n", "path": "core/admin/mailu/internal/views/postfix.py"}]}
| 2,512 | 279 |
gh_patches_debug_48833
|
rasdani/github-patches
|
git_diff
|
LibraryOfCongress__concordia-535
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Set site time zone to US/Eastern
https://docs.djangoproject.com/en/2.1/ref/settings/#std:setting-TIME_ZONE
Use Django setting to change user-facing timestamps to use US/Eastern time zone.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `concordia/settings_template.py`
Content:
```
1 # TODO: use correct copyright header
2 import os
3
4 from django.contrib import messages
5
6 import raven
7
8 # Build paths inside the project like this: os.path.join(SITE_ROOT_DIR, ...)
9 CONCORDIA_APP_DIR = os.path.abspath(os.path.dirname(__file__))
10 SITE_ROOT_DIR = os.path.dirname(CONCORDIA_APP_DIR)
11
12 # SECURITY WARNING: keep the secret key used in production secret!
13 SECRET_KEY = "django-secret-key"
14
15 CONCORDIA_ENVIRONMENT = os.environ.get("CONCORDIA_ENVIRONMENT", "development")
16
17 # Optional SMTP authentication information for EMAIL_HOST.
18 EMAIL_HOST_USER = ""
19 EMAIL_HOST_PASSWORD = ""
20 EMAIL_USE_TLS = False
21 DEFAULT_FROM_EMAIL = "[email protected]"
22
23 ALLOWED_HOSTS = ["*"]
24
25 DEBUG = False
26 CSRF_COOKIE_SECURE = False
27
28 AUTH_PASSWORD_VALIDATORS = []
29 EMAIL_BACKEND = "django.core.mail.backends.filebased.EmailBackend"
30 # EMAIL_FILE_PATH = os.path.join(SITE_ROOT_DIR, 'emails')
31 EMAIL_HOST = "localhost"
32 EMAIL_PORT = 25
33 LANGUAGE_CODE = "en-us"
34 LOGIN_REDIRECT_URL = "/"
35 LOGOUT_REDIRECT_URL = "/"
36 ROOT_URLCONF = "concordia.urls"
37 STATIC_ROOT = "static-files"
38 STATIC_URL = "/static/"
39 STATICFILES_DIRS = [
40 os.path.join(CONCORDIA_APP_DIR, "static"),
41 os.path.join(SITE_ROOT_DIR, "static"),
42 ]
43 TEMPLATE_DEBUG = False
44 TIME_ZONE = "UTC"
45 USE_I18N = True
46 USE_L10N = True
47 USE_TZ = True
48 WSGI_APPLICATION = "concordia.wsgi.application"
49
50 ADMIN_SITE = {"site_header": "Concordia Admin", "site_title": "Concordia"}
51
52 DATABASES = {
53 "default": {
54 "ENGINE": "django.db.backends.postgresql",
55 "NAME": "concordia",
56 "USER": "concordia",
57 "PASSWORD": os.getenv("POSTGRESQL_PW"),
58 "HOST": os.getenv("POSTGRESQL_HOST", "localhost"),
59 "PORT": "5432",
60 "CONN_MAX_AGE": 15 * 60, # Keep database connections open for 15 minutes
61 }
62 }
63
64
65 INSTALLED_APPS = [
66 "django.contrib.admin",
67 "django.contrib.auth",
68 "django.contrib.contenttypes",
69 "django.contrib.humanize",
70 "django.contrib.sessions",
71 "django.contrib.messages",
72 "django.contrib.sites",
73 "django.contrib.staticfiles",
74 "raven.contrib.django.raven_compat",
75 "maintenance_mode",
76 "bootstrap4",
77 "bittersweet",
78 "concordia.apps.ConcordiaAppConfig",
79 "exporter",
80 "importer",
81 "captcha",
82 "django_prometheus_metrics",
83 "robots",
84 ]
85
86 if DEBUG:
87 INSTALLED_APPS += ["django_extensions"]
88 INSTALLED_APPS += ["kombu.transport"]
89
90
91 MIDDLEWARE = [
92 "django_prometheus_metrics.middleware.PrometheusBeforeMiddleware",
93 "django.middleware.security.SecurityMiddleware",
94 # WhiteNoise serves static files efficiently:
95 "whitenoise.middleware.WhiteNoiseMiddleware",
96 "django.contrib.sessions.middleware.SessionMiddleware",
97 "django.middleware.common.CommonMiddleware",
98 "django.middleware.csrf.CsrfViewMiddleware",
99 "django.contrib.auth.middleware.AuthenticationMiddleware",
100 "django.contrib.messages.middleware.MessageMiddleware",
101 "django.middleware.clickjacking.XFrameOptionsMiddleware",
102 "maintenance_mode.middleware.MaintenanceModeMiddleware",
103 ]
104
105 TEMPLATES = [
106 {
107 "BACKEND": "django.template.backends.django.DjangoTemplates",
108 "DIRS": [
109 os.path.join(SITE_ROOT_DIR, "templates"),
110 os.path.join(CONCORDIA_APP_DIR, "templates"),
111 ],
112 "OPTIONS": {
113 "context_processors": [
114 "django.template.context_processors.debug",
115 "django.template.context_processors.request",
116 "django.contrib.auth.context_processors.auth",
117 "django.contrib.messages.context_processors.messages",
118 "django.template.context_processors.media",
119 # Concordia
120 "concordia.context_processors.system_configuration",
121 "concordia.context_processors.site_navigation",
122 ],
123 "loaders": [
124 "django.template.loaders.filesystem.Loader",
125 "django.template.loaders.app_directories.Loader",
126 ],
127 },
128 }
129 ]
130
131 MEMCACHED_ADDRESS = os.getenv("MEMCACHED_ADDRESS", "")
132 MEMCACHED_PORT = os.getenv("MEMCACHED_PORT", "")
133
134 CACHES = {
135 "default": {
136 "BACKEND": "django.core.cache.backends.memcached.MemcachedCache",
137 "LOCATION": "{}:{}".format(MEMCACHED_ADDRESS, MEMCACHED_PORT),
138 }
139 }
140
141 HAYSTACK_CONNECTIONS = {
142 "default": {
143 "ENGINE": "haystack.backends.whoosh_backend.WhooshEngine",
144 "PATH": os.path.join(os.path.dirname(__file__), "whoosh_index"),
145 }
146 }
147
148 # Celery settings
149 CELERY_BROKER_URL = "pyamqp://guest@rabbit"
150 CELERY_RESULT_BACKEND = "rpc://"
151
152 CELERY_ACCEPT_CONTENT = ["json"]
153 CELERY_TASK_SERIALIZER = "json"
154 CELERY_IMPORTS = ("importer.tasks",)
155
156 CELERY_BROKER_HEARTBEAT = 0
157 CELERY_BROKER_TRANSPORT_OPTIONS = {
158 "confirm_publish": True,
159 "max_retries": 3,
160 "interval_start": 0,
161 "interval_step": 0.2,
162 "interval_max": 0.5,
163 }
164
165 LOGGING = {
166 "version": 1,
167 "disable_existing_loggers": False,
168 "formatters": {
169 "long": {
170 "format": "[{asctime} {levelname} {name}:{lineno}] {message}",
171 "datefmt": "%Y-%m-%dT%H:%M:%S",
172 "style": "{",
173 },
174 "short": {
175 "format": "[{levelname} {name}] {message}",
176 "datefmt": "%Y-%m-%dT%H:%M:%S",
177 "style": "{",
178 },
179 },
180 "handlers": {
181 "stream": {
182 "class": "logging.StreamHandler",
183 "level": "INFO",
184 "formatter": "long",
185 },
186 "null": {"level": "DEBUG", "class": "logging.NullHandler"},
187 "file": {
188 "class": "logging.handlers.TimedRotatingFileHandler",
189 "level": "DEBUG",
190 "formatter": "long",
191 "filename": "{}/logs/concordia.log".format(SITE_ROOT_DIR),
192 "when": "H",
193 "interval": 3,
194 "backupCount": 16,
195 },
196 "celery": {
197 "level": "DEBUG",
198 "class": "logging.handlers.RotatingFileHandler",
199 "filename": "{}/logs/celery.log".format(SITE_ROOT_DIR),
200 "formatter": "long",
201 "maxBytes": 1024 * 1024 * 100, # 100 mb
202 },
203 "sentry": {
204 "level": "WARNING",
205 "class": "raven.contrib.django.raven_compat.handlers.SentryHandler",
206 },
207 },
208 "loggers": {
209 "django": {"handlers": ["file", "stream"], "level": "DEBUG", "propagate": True},
210 "celery": {"handlers": ["celery", "stream"], "level": "DEBUG"},
211 "sentry.errors": {"level": "INFO", "handlers": ["stream"], "propagate": False},
212 },
213 }
214
215
216 ################################################################################
217 # Django-specific settings above
218 ################################################################################
219
220 ACCOUNT_ACTIVATION_DAYS = 7
221
222 MEDIA_URL = "/media/"
223 MEDIA_ROOT = os.path.join(SITE_ROOT_DIR, "media")
224
225 LOGIN_URL = "login"
226
227 PASSWORD_VALIDATOR = (
228 "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
229 )
230
231 AUTH_PASSWORD_VALIDATORS = [
232 {"NAME": PASSWORD_VALIDATOR},
233 {
234 "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
235 "OPTIONS": {"min_length": 8},
236 },
237 {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
238 {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
239 {"NAME": "concordia.validators.complexity"},
240 ]
241
242 AUTHENTICATION_BACKENDS = [
243 "concordia.email_username_backend.EmailOrUsernameModelBackend"
244 ]
245
246 CAPTCHA_CHALLENGE_FUNCT = "captcha.helpers.random_char_challenge"
247 #: Anonymous sessions require captcha validation every day by default:
248 ANONYMOUS_CAPTCHA_VALIDATION_INTERVAL = 86400
249
250 STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
251 WHITENOISE_ROOT = os.path.join(SITE_ROOT_DIR, "static")
252
253 PASSWORD_RESET_TIMEOUT_DAYS = 1
254 ACCOUNT_ACTIVATION_DAYS = 1
255 REGISTRATION_OPEN = True # set to false to temporarily disable registrations
256
257 MESSAGE_STORAGE = "django.contrib.messages.storage.session.SessionStorage"
258
259 MESSAGE_TAGS = {messages.ERROR: "danger"}
260
261 SENTRY_DSN = os.environ.get("SENTRY_DSN", "")
262 SENTRY_PUBLIC_DSN = os.environ.get("SENTRY_PUBLIC_DSN", "")
263
264 RAVEN_CONFIG = {
265 "dsn": SENTRY_DSN,
266 "environment": CONCORDIA_ENVIRONMENT,
267 "release": raven.fetch_git_sha(SITE_ROOT_DIR),
268 }
269
270 # When the MAINTENANCE_MODE setting is true, this template will be used to
271 # generate a 503 response:
272 MAINTENANCE_MODE_TEMPLATE = "maintenance-mode.html"
273
274 # Names of special django.auth Groups
275 COMMUNITY_MANAGER_GROUP_NAME = "Community Managers"
276 NEWSLETTER_GROUP_NAME = "Newsletter"
277
278 # Django sites framework setting
279 SITE_ID = 1
280 ROBOTS_USE_SITEMAP = False
281 ROBOTS_USE_HOST = False
282
283 # django-bootstrap4 customization:
284 BOOTSTRAP4 = {"required_css_class": "form-group-required"}
285
286 # Transcription-related settings
287
288 #: Number of seconds an asset reservation is valid for
289 TRANSCRIPTION_RESERVATION_SECONDS = 5 * 60
290
291 #: Web cache policy settings
292 DEFAULT_PAGE_TTL = 5 * 60
293
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/concordia/settings_template.py b/concordia/settings_template.py
--- a/concordia/settings_template.py
+++ b/concordia/settings_template.py
@@ -41,7 +41,7 @@
os.path.join(SITE_ROOT_DIR, "static"),
]
TEMPLATE_DEBUG = False
-TIME_ZONE = "UTC"
+TIME_ZONE = "America/New_York"
USE_I18N = True
USE_L10N = True
USE_TZ = True
|
{"golden_diff": "diff --git a/concordia/settings_template.py b/concordia/settings_template.py\n--- a/concordia/settings_template.py\n+++ b/concordia/settings_template.py\n@@ -41,7 +41,7 @@\n os.path.join(SITE_ROOT_DIR, \"static\"),\n ]\n TEMPLATE_DEBUG = False\n-TIME_ZONE = \"UTC\"\n+TIME_ZONE = \"America/New_York\"\n USE_I18N = True\n USE_L10N = True\n USE_TZ = True\n", "issue": "Set site time zone to US/Eastern\nhttps://docs.djangoproject.com/en/2.1/ref/settings/#std:setting-TIME_ZONE\r\n\r\nUse Django setting to change user-facing timestamps to use US/Eastern time zone.\n", "before_files": [{"content": "# TODO: use correct copyright header\nimport os\n\nfrom django.contrib import messages\n\nimport raven\n\n# Build paths inside the project like this: os.path.join(SITE_ROOT_DIR, ...)\nCONCORDIA_APP_DIR = os.path.abspath(os.path.dirname(__file__))\nSITE_ROOT_DIR = os.path.dirname(CONCORDIA_APP_DIR)\n\n# SECURITY WARNING: keep the secret key used in production secret!\nSECRET_KEY = \"django-secret-key\"\n\nCONCORDIA_ENVIRONMENT = os.environ.get(\"CONCORDIA_ENVIRONMENT\", \"development\")\n\n# Optional SMTP authentication information for EMAIL_HOST.\nEMAIL_HOST_USER = \"\"\nEMAIL_HOST_PASSWORD = \"\"\nEMAIL_USE_TLS = False\nDEFAULT_FROM_EMAIL = \"[email protected]\"\n\nALLOWED_HOSTS = [\"*\"]\n\nDEBUG = False\nCSRF_COOKIE_SECURE = False\n\nAUTH_PASSWORD_VALIDATORS = []\nEMAIL_BACKEND = \"django.core.mail.backends.filebased.EmailBackend\"\n# EMAIL_FILE_PATH = os.path.join(SITE_ROOT_DIR, 'emails')\nEMAIL_HOST = \"localhost\"\nEMAIL_PORT = 25\nLANGUAGE_CODE = \"en-us\"\nLOGIN_REDIRECT_URL = \"/\"\nLOGOUT_REDIRECT_URL = \"/\"\nROOT_URLCONF = \"concordia.urls\"\nSTATIC_ROOT = \"static-files\"\nSTATIC_URL = \"/static/\"\nSTATICFILES_DIRS = [\n os.path.join(CONCORDIA_APP_DIR, \"static\"),\n os.path.join(SITE_ROOT_DIR, \"static\"),\n]\nTEMPLATE_DEBUG = False\nTIME_ZONE = \"UTC\"\nUSE_I18N = True\nUSE_L10N = True\nUSE_TZ = True\nWSGI_APPLICATION = \"concordia.wsgi.application\"\n\nADMIN_SITE = {\"site_header\": \"Concordia Admin\", \"site_title\": \"Concordia\"}\n\nDATABASES = {\n \"default\": {\n \"ENGINE\": \"django.db.backends.postgresql\",\n \"NAME\": \"concordia\",\n \"USER\": \"concordia\",\n \"PASSWORD\": os.getenv(\"POSTGRESQL_PW\"),\n \"HOST\": os.getenv(\"POSTGRESQL_HOST\", \"localhost\"),\n \"PORT\": \"5432\",\n \"CONN_MAX_AGE\": 15 * 60, # Keep database connections open for 15 minutes\n }\n}\n\n\nINSTALLED_APPS = [\n \"django.contrib.admin\",\n \"django.contrib.auth\",\n \"django.contrib.contenttypes\",\n \"django.contrib.humanize\",\n \"django.contrib.sessions\",\n \"django.contrib.messages\",\n \"django.contrib.sites\",\n \"django.contrib.staticfiles\",\n \"raven.contrib.django.raven_compat\",\n \"maintenance_mode\",\n \"bootstrap4\",\n \"bittersweet\",\n \"concordia.apps.ConcordiaAppConfig\",\n \"exporter\",\n \"importer\",\n \"captcha\",\n \"django_prometheus_metrics\",\n \"robots\",\n]\n\nif DEBUG:\n INSTALLED_APPS += [\"django_extensions\"]\n INSTALLED_APPS += [\"kombu.transport\"]\n\n\nMIDDLEWARE = [\n \"django_prometheus_metrics.middleware.PrometheusBeforeMiddleware\",\n \"django.middleware.security.SecurityMiddleware\",\n # WhiteNoise serves static files efficiently:\n \"whitenoise.middleware.WhiteNoiseMiddleware\",\n \"django.contrib.sessions.middleware.SessionMiddleware\",\n \"django.middleware.common.CommonMiddleware\",\n \"django.middleware.csrf.CsrfViewMiddleware\",\n \"django.contrib.auth.middleware.AuthenticationMiddleware\",\n \"django.contrib.messages.middleware.MessageMiddleware\",\n \"django.middleware.clickjacking.XFrameOptionsMiddleware\",\n \"maintenance_mode.middleware.MaintenanceModeMiddleware\",\n]\n\nTEMPLATES = [\n {\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [\n os.path.join(SITE_ROOT_DIR, \"templates\"),\n os.path.join(CONCORDIA_APP_DIR, \"templates\"),\n ],\n \"OPTIONS\": {\n \"context_processors\": [\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.request\",\n \"django.contrib.auth.context_processors.auth\",\n \"django.contrib.messages.context_processors.messages\",\n \"django.template.context_processors.media\",\n # Concordia\n \"concordia.context_processors.system_configuration\",\n \"concordia.context_processors.site_navigation\",\n ],\n \"loaders\": [\n \"django.template.loaders.filesystem.Loader\",\n \"django.template.loaders.app_directories.Loader\",\n ],\n },\n }\n]\n\nMEMCACHED_ADDRESS = os.getenv(\"MEMCACHED_ADDRESS\", \"\")\nMEMCACHED_PORT = os.getenv(\"MEMCACHED_PORT\", \"\")\n\nCACHES = {\n \"default\": {\n \"BACKEND\": \"django.core.cache.backends.memcached.MemcachedCache\",\n \"LOCATION\": \"{}:{}\".format(MEMCACHED_ADDRESS, MEMCACHED_PORT),\n }\n}\n\nHAYSTACK_CONNECTIONS = {\n \"default\": {\n \"ENGINE\": \"haystack.backends.whoosh_backend.WhooshEngine\",\n \"PATH\": os.path.join(os.path.dirname(__file__), \"whoosh_index\"),\n }\n}\n\n# Celery settings\nCELERY_BROKER_URL = \"pyamqp://guest@rabbit\"\nCELERY_RESULT_BACKEND = \"rpc://\"\n\nCELERY_ACCEPT_CONTENT = [\"json\"]\nCELERY_TASK_SERIALIZER = \"json\"\nCELERY_IMPORTS = (\"importer.tasks\",)\n\nCELERY_BROKER_HEARTBEAT = 0\nCELERY_BROKER_TRANSPORT_OPTIONS = {\n \"confirm_publish\": True,\n \"max_retries\": 3,\n \"interval_start\": 0,\n \"interval_step\": 0.2,\n \"interval_max\": 0.5,\n}\n\nLOGGING = {\n \"version\": 1,\n \"disable_existing_loggers\": False,\n \"formatters\": {\n \"long\": {\n \"format\": \"[{asctime} {levelname} {name}:{lineno}] {message}\",\n \"datefmt\": \"%Y-%m-%dT%H:%M:%S\",\n \"style\": \"{\",\n },\n \"short\": {\n \"format\": \"[{levelname} {name}] {message}\",\n \"datefmt\": \"%Y-%m-%dT%H:%M:%S\",\n \"style\": \"{\",\n },\n },\n \"handlers\": {\n \"stream\": {\n \"class\": \"logging.StreamHandler\",\n \"level\": \"INFO\",\n \"formatter\": \"long\",\n },\n \"null\": {\"level\": \"DEBUG\", \"class\": \"logging.NullHandler\"},\n \"file\": {\n \"class\": \"logging.handlers.TimedRotatingFileHandler\",\n \"level\": \"DEBUG\",\n \"formatter\": \"long\",\n \"filename\": \"{}/logs/concordia.log\".format(SITE_ROOT_DIR),\n \"when\": \"H\",\n \"interval\": 3,\n \"backupCount\": 16,\n },\n \"celery\": {\n \"level\": \"DEBUG\",\n \"class\": \"logging.handlers.RotatingFileHandler\",\n \"filename\": \"{}/logs/celery.log\".format(SITE_ROOT_DIR),\n \"formatter\": \"long\",\n \"maxBytes\": 1024 * 1024 * 100, # 100 mb\n },\n \"sentry\": {\n \"level\": \"WARNING\",\n \"class\": \"raven.contrib.django.raven_compat.handlers.SentryHandler\",\n },\n },\n \"loggers\": {\n \"django\": {\"handlers\": [\"file\", \"stream\"], \"level\": \"DEBUG\", \"propagate\": True},\n \"celery\": {\"handlers\": [\"celery\", \"stream\"], \"level\": \"DEBUG\"},\n \"sentry.errors\": {\"level\": \"INFO\", \"handlers\": [\"stream\"], \"propagate\": False},\n },\n}\n\n\n################################################################################\n# Django-specific settings above\n################################################################################\n\nACCOUNT_ACTIVATION_DAYS = 7\n\nMEDIA_URL = \"/media/\"\nMEDIA_ROOT = os.path.join(SITE_ROOT_DIR, \"media\")\n\nLOGIN_URL = \"login\"\n\nPASSWORD_VALIDATOR = (\n \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\"\n)\n\nAUTH_PASSWORD_VALIDATORS = [\n {\"NAME\": PASSWORD_VALIDATOR},\n {\n \"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\",\n \"OPTIONS\": {\"min_length\": 8},\n },\n {\"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"},\n {\"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"},\n {\"NAME\": \"concordia.validators.complexity\"},\n]\n\nAUTHENTICATION_BACKENDS = [\n \"concordia.email_username_backend.EmailOrUsernameModelBackend\"\n]\n\nCAPTCHA_CHALLENGE_FUNCT = \"captcha.helpers.random_char_challenge\"\n#: Anonymous sessions require captcha validation every day by default:\nANONYMOUS_CAPTCHA_VALIDATION_INTERVAL = 86400\n\nSTATICFILES_STORAGE = \"whitenoise.storage.CompressedManifestStaticFilesStorage\"\nWHITENOISE_ROOT = os.path.join(SITE_ROOT_DIR, \"static\")\n\nPASSWORD_RESET_TIMEOUT_DAYS = 1\nACCOUNT_ACTIVATION_DAYS = 1\nREGISTRATION_OPEN = True # set to false to temporarily disable registrations\n\nMESSAGE_STORAGE = \"django.contrib.messages.storage.session.SessionStorage\"\n\nMESSAGE_TAGS = {messages.ERROR: \"danger\"}\n\nSENTRY_DSN = os.environ.get(\"SENTRY_DSN\", \"\")\nSENTRY_PUBLIC_DSN = os.environ.get(\"SENTRY_PUBLIC_DSN\", \"\")\n\nRAVEN_CONFIG = {\n \"dsn\": SENTRY_DSN,\n \"environment\": CONCORDIA_ENVIRONMENT,\n \"release\": raven.fetch_git_sha(SITE_ROOT_DIR),\n}\n\n# When the MAINTENANCE_MODE setting is true, this template will be used to\n# generate a 503 response:\nMAINTENANCE_MODE_TEMPLATE = \"maintenance-mode.html\"\n\n# Names of special django.auth Groups\nCOMMUNITY_MANAGER_GROUP_NAME = \"Community Managers\"\nNEWSLETTER_GROUP_NAME = \"Newsletter\"\n\n# Django sites framework setting\nSITE_ID = 1\nROBOTS_USE_SITEMAP = False\nROBOTS_USE_HOST = False\n\n# django-bootstrap4 customization:\nBOOTSTRAP4 = {\"required_css_class\": \"form-group-required\"}\n\n# Transcription-related settings\n\n#: Number of seconds an asset reservation is valid for\nTRANSCRIPTION_RESERVATION_SECONDS = 5 * 60\n\n#: Web cache policy settings\nDEFAULT_PAGE_TTL = 5 * 60\n", "path": "concordia/settings_template.py"}], "after_files": [{"content": "# TODO: use correct copyright header\nimport os\n\nfrom django.contrib import messages\n\nimport raven\n\n# Build paths inside the project like this: os.path.join(SITE_ROOT_DIR, ...)\nCONCORDIA_APP_DIR = os.path.abspath(os.path.dirname(__file__))\nSITE_ROOT_DIR = os.path.dirname(CONCORDIA_APP_DIR)\n\n# SECURITY WARNING: keep the secret key used in production secret!\nSECRET_KEY = \"django-secret-key\"\n\nCONCORDIA_ENVIRONMENT = os.environ.get(\"CONCORDIA_ENVIRONMENT\", \"development\")\n\n# Optional SMTP authentication information for EMAIL_HOST.\nEMAIL_HOST_USER = \"\"\nEMAIL_HOST_PASSWORD = \"\"\nEMAIL_USE_TLS = False\nDEFAULT_FROM_EMAIL = \"[email protected]\"\n\nALLOWED_HOSTS = [\"*\"]\n\nDEBUG = False\nCSRF_COOKIE_SECURE = False\n\nAUTH_PASSWORD_VALIDATORS = []\nEMAIL_BACKEND = \"django.core.mail.backends.filebased.EmailBackend\"\n# EMAIL_FILE_PATH = os.path.join(SITE_ROOT_DIR, 'emails')\nEMAIL_HOST = \"localhost\"\nEMAIL_PORT = 25\nLANGUAGE_CODE = \"en-us\"\nLOGIN_REDIRECT_URL = \"/\"\nLOGOUT_REDIRECT_URL = \"/\"\nROOT_URLCONF = \"concordia.urls\"\nSTATIC_ROOT = \"static-files\"\nSTATIC_URL = \"/static/\"\nSTATICFILES_DIRS = [\n os.path.join(CONCORDIA_APP_DIR, \"static\"),\n os.path.join(SITE_ROOT_DIR, \"static\"),\n]\nTEMPLATE_DEBUG = False\nTIME_ZONE = \"America/New_York\"\nUSE_I18N = True\nUSE_L10N = True\nUSE_TZ = True\nWSGI_APPLICATION = \"concordia.wsgi.application\"\n\nADMIN_SITE = {\"site_header\": \"Concordia Admin\", \"site_title\": \"Concordia\"}\n\nDATABASES = {\n \"default\": {\n \"ENGINE\": \"django.db.backends.postgresql\",\n \"NAME\": \"concordia\",\n \"USER\": \"concordia\",\n \"PASSWORD\": os.getenv(\"POSTGRESQL_PW\"),\n \"HOST\": os.getenv(\"POSTGRESQL_HOST\", \"localhost\"),\n \"PORT\": \"5432\",\n \"CONN_MAX_AGE\": 15 * 60, # Keep database connections open for 15 minutes\n }\n}\n\n\nINSTALLED_APPS = [\n \"django.contrib.admin\",\n \"django.contrib.auth\",\n \"django.contrib.contenttypes\",\n \"django.contrib.humanize\",\n \"django.contrib.sessions\",\n \"django.contrib.messages\",\n \"django.contrib.sites\",\n \"django.contrib.staticfiles\",\n \"raven.contrib.django.raven_compat\",\n \"maintenance_mode\",\n \"bootstrap4\",\n \"bittersweet\",\n \"concordia.apps.ConcordiaAppConfig\",\n \"exporter\",\n \"importer\",\n \"captcha\",\n \"django_prometheus_metrics\",\n \"robots\",\n]\n\nif DEBUG:\n INSTALLED_APPS += [\"django_extensions\"]\n INSTALLED_APPS += [\"kombu.transport\"]\n\n\nMIDDLEWARE = [\n \"django_prometheus_metrics.middleware.PrometheusBeforeMiddleware\",\n \"django.middleware.security.SecurityMiddleware\",\n # WhiteNoise serves static files efficiently:\n \"whitenoise.middleware.WhiteNoiseMiddleware\",\n \"django.contrib.sessions.middleware.SessionMiddleware\",\n \"django.middleware.common.CommonMiddleware\",\n \"django.middleware.csrf.CsrfViewMiddleware\",\n \"django.contrib.auth.middleware.AuthenticationMiddleware\",\n \"django.contrib.messages.middleware.MessageMiddleware\",\n \"django.middleware.clickjacking.XFrameOptionsMiddleware\",\n \"maintenance_mode.middleware.MaintenanceModeMiddleware\",\n]\n\nTEMPLATES = [\n {\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [\n os.path.join(SITE_ROOT_DIR, \"templates\"),\n os.path.join(CONCORDIA_APP_DIR, \"templates\"),\n ],\n \"OPTIONS\": {\n \"context_processors\": [\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.request\",\n \"django.contrib.auth.context_processors.auth\",\n \"django.contrib.messages.context_processors.messages\",\n \"django.template.context_processors.media\",\n # Concordia\n \"concordia.context_processors.system_configuration\",\n \"concordia.context_processors.site_navigation\",\n ],\n \"loaders\": [\n \"django.template.loaders.filesystem.Loader\",\n \"django.template.loaders.app_directories.Loader\",\n ],\n },\n }\n]\n\nMEMCACHED_ADDRESS = os.getenv(\"MEMCACHED_ADDRESS\", \"\")\nMEMCACHED_PORT = os.getenv(\"MEMCACHED_PORT\", \"\")\n\nCACHES = {\n \"default\": {\n \"BACKEND\": \"django.core.cache.backends.memcached.MemcachedCache\",\n \"LOCATION\": \"{}:{}\".format(MEMCACHED_ADDRESS, MEMCACHED_PORT),\n }\n}\n\nHAYSTACK_CONNECTIONS = {\n \"default\": {\n \"ENGINE\": \"haystack.backends.whoosh_backend.WhooshEngine\",\n \"PATH\": os.path.join(os.path.dirname(__file__), \"whoosh_index\"),\n }\n}\n\n# Celery settings\nCELERY_BROKER_URL = \"pyamqp://guest@rabbit\"\nCELERY_RESULT_BACKEND = \"rpc://\"\n\nCELERY_ACCEPT_CONTENT = [\"json\"]\nCELERY_TASK_SERIALIZER = \"json\"\nCELERY_IMPORTS = (\"importer.tasks\",)\n\nCELERY_BROKER_HEARTBEAT = 0\nCELERY_BROKER_TRANSPORT_OPTIONS = {\n \"confirm_publish\": True,\n \"max_retries\": 3,\n \"interval_start\": 0,\n \"interval_step\": 0.2,\n \"interval_max\": 0.5,\n}\n\nLOGGING = {\n \"version\": 1,\n \"disable_existing_loggers\": False,\n \"formatters\": {\n \"long\": {\n \"format\": \"[{asctime} {levelname} {name}:{lineno}] {message}\",\n \"datefmt\": \"%Y-%m-%dT%H:%M:%S\",\n \"style\": \"{\",\n },\n \"short\": {\n \"format\": \"[{levelname} {name}] {message}\",\n \"datefmt\": \"%Y-%m-%dT%H:%M:%S\",\n \"style\": \"{\",\n },\n },\n \"handlers\": {\n \"stream\": {\n \"class\": \"logging.StreamHandler\",\n \"level\": \"INFO\",\n \"formatter\": \"long\",\n },\n \"null\": {\"level\": \"DEBUG\", \"class\": \"logging.NullHandler\"},\n \"file\": {\n \"class\": \"logging.handlers.TimedRotatingFileHandler\",\n \"level\": \"DEBUG\",\n \"formatter\": \"long\",\n \"filename\": \"{}/logs/concordia.log\".format(SITE_ROOT_DIR),\n \"when\": \"H\",\n \"interval\": 3,\n \"backupCount\": 16,\n },\n \"celery\": {\n \"level\": \"DEBUG\",\n \"class\": \"logging.handlers.RotatingFileHandler\",\n \"filename\": \"{}/logs/celery.log\".format(SITE_ROOT_DIR),\n \"formatter\": \"long\",\n \"maxBytes\": 1024 * 1024 * 100, # 100 mb\n },\n \"sentry\": {\n \"level\": \"WARNING\",\n \"class\": \"raven.contrib.django.raven_compat.handlers.SentryHandler\",\n },\n },\n \"loggers\": {\n \"django\": {\"handlers\": [\"file\", \"stream\"], \"level\": \"DEBUG\", \"propagate\": True},\n \"celery\": {\"handlers\": [\"celery\", \"stream\"], \"level\": \"DEBUG\"},\n \"sentry.errors\": {\"level\": \"INFO\", \"handlers\": [\"stream\"], \"propagate\": False},\n },\n}\n\n\n################################################################################\n# Django-specific settings above\n################################################################################\n\nACCOUNT_ACTIVATION_DAYS = 7\n\nMEDIA_URL = \"/media/\"\nMEDIA_ROOT = os.path.join(SITE_ROOT_DIR, \"media\")\n\nLOGIN_URL = \"login\"\n\nPASSWORD_VALIDATOR = (\n \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\"\n)\n\nAUTH_PASSWORD_VALIDATORS = [\n {\"NAME\": PASSWORD_VALIDATOR},\n {\n \"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\",\n \"OPTIONS\": {\"min_length\": 8},\n },\n {\"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"},\n {\"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"},\n {\"NAME\": \"concordia.validators.complexity\"},\n]\n\nAUTHENTICATION_BACKENDS = [\n \"concordia.email_username_backend.EmailOrUsernameModelBackend\"\n]\n\nCAPTCHA_CHALLENGE_FUNCT = \"captcha.helpers.random_char_challenge\"\n#: Anonymous sessions require captcha validation every day by default:\nANONYMOUS_CAPTCHA_VALIDATION_INTERVAL = 86400\n\nSTATICFILES_STORAGE = \"whitenoise.storage.CompressedManifestStaticFilesStorage\"\nWHITENOISE_ROOT = os.path.join(SITE_ROOT_DIR, \"static\")\n\nPASSWORD_RESET_TIMEOUT_DAYS = 1\nACCOUNT_ACTIVATION_DAYS = 1\nREGISTRATION_OPEN = True # set to false to temporarily disable registrations\n\nMESSAGE_STORAGE = \"django.contrib.messages.storage.session.SessionStorage\"\n\nMESSAGE_TAGS = {messages.ERROR: \"danger\"}\n\nSENTRY_DSN = os.environ.get(\"SENTRY_DSN\", \"\")\nSENTRY_PUBLIC_DSN = os.environ.get(\"SENTRY_PUBLIC_DSN\", \"\")\n\nRAVEN_CONFIG = {\n \"dsn\": SENTRY_DSN,\n \"environment\": CONCORDIA_ENVIRONMENT,\n \"release\": raven.fetch_git_sha(SITE_ROOT_DIR),\n}\n\n# When the MAINTENANCE_MODE setting is true, this template will be used to\n# generate a 503 response:\nMAINTENANCE_MODE_TEMPLATE = \"maintenance-mode.html\"\n\n# Names of special django.auth Groups\nCOMMUNITY_MANAGER_GROUP_NAME = \"Community Managers\"\nNEWSLETTER_GROUP_NAME = \"Newsletter\"\n\n# Django sites framework setting\nSITE_ID = 1\nROBOTS_USE_SITEMAP = False\nROBOTS_USE_HOST = False\n\n# django-bootstrap4 customization:\nBOOTSTRAP4 = {\"required_css_class\": \"form-group-required\"}\n\n# Transcription-related settings\n\n#: Number of seconds an asset reservation is valid for\nTRANSCRIPTION_RESERVATION_SECONDS = 5 * 60\n\n#: Web cache policy settings\nDEFAULT_PAGE_TTL = 5 * 60\n", "path": "concordia/settings_template.py"}]}
| 3,248 | 105 |
gh_patches_debug_17050
|
rasdani/github-patches
|
git_diff
|
paperless-ngx__paperless-ngx-5516
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[BUG] App Config does not enforce permissions
### Description
Since Paperless 2.3 or 2.4 there's a new menu item named "Configuration" where it's possible to override the logo and a few OCR settings. I'm using a group named `humans` for everyday operations. The group has mere "view" permissions for type "admin":

And yet the users in that group can modify application configurations. I believe this is caused by the `/api/config/` endpoint not having authorization beyond `IsAuthenticated`:
https://github.com/paperless-ngx/paperless-ngx/blob/6d6650d5f6952f3129e9f4632cd149914b344767/src/paperless/views.py#L169
Removing the "view" permission bit, leaving the group without any for the "admin" type, only hides the UI elements. The underlying API is unaffected.
### Steps to reproduce
1. Authenticate with a user with no permissions for type "admin" other than "view" (the latter being optional if using the API directly).
1. Navigate to `/config` (Application Configuration).
1. Change values.
1. Save.
### Webserver logs
```bash
N/A
```
### Browser logs
_No response_
### Paperless-ngx version
2.4.0
### Host OS
Debian Linux
### Installation method
Docker - official image
### Browser
_No response_
### Configuration changes
_No response_
### Other
_No response_
### Please confirm the following
- [X] I believe this issue is a bug that affects all users of Paperless-ngx, not something specific to my installation.
- [X] I have already searched for relevant existing issues and discussions before opening this report.
- [X] I have updated the title field above with a concise description.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/paperless/views.py`
Content:
```
1 import os
2 from collections import OrderedDict
3
4 from django.contrib.auth.models import Group
5 from django.contrib.auth.models import User
6 from django.db.models.functions import Lower
7 from django.http import HttpResponse
8 from django.views.generic import View
9 from django_filters.rest_framework import DjangoFilterBackend
10 from rest_framework.authtoken.models import Token
11 from rest_framework.filters import OrderingFilter
12 from rest_framework.generics import GenericAPIView
13 from rest_framework.pagination import PageNumberPagination
14 from rest_framework.permissions import IsAuthenticated
15 from rest_framework.response import Response
16 from rest_framework.viewsets import ModelViewSet
17
18 from documents.permissions import PaperlessObjectPermissions
19 from paperless.filters import GroupFilterSet
20 from paperless.filters import UserFilterSet
21 from paperless.models import ApplicationConfiguration
22 from paperless.serialisers import ApplicationConfigurationSerializer
23 from paperless.serialisers import GroupSerializer
24 from paperless.serialisers import ProfileSerializer
25 from paperless.serialisers import UserSerializer
26
27
28 class StandardPagination(PageNumberPagination):
29 page_size = 25
30 page_size_query_param = "page_size"
31 max_page_size = 100000
32
33 def get_paginated_response(self, data):
34 return Response(
35 OrderedDict(
36 [
37 ("count", self.page.paginator.count),
38 ("next", self.get_next_link()),
39 ("previous", self.get_previous_link()),
40 ("all", self.get_all_result_ids()),
41 ("results", data),
42 ],
43 ),
44 )
45
46 def get_all_result_ids(self):
47 ids = []
48 if hasattr(self.page.paginator.object_list, "saved_results"):
49 results_page = self.page.paginator.object_list.saved_results[0]
50 if results_page is not None:
51 for i in range(len(results_page.results.docs())):
52 try:
53 fields = results_page.results.fields(i)
54 if "id" in fields:
55 ids.append(fields["id"])
56 except Exception:
57 pass
58 else:
59 ids = self.page.paginator.object_list.values_list("pk", flat=True)
60 return ids
61
62 def get_paginated_response_schema(self, schema):
63 response_schema = super().get_paginated_response_schema(schema)
64 response_schema["properties"]["all"] = {
65 "type": "array",
66 "example": "[1, 2, 3]",
67 }
68 return response_schema
69
70
71 class FaviconView(View):
72 def get(self, request, *args, **kwargs): # pragma: no cover
73 favicon = os.path.join(
74 os.path.dirname(__file__),
75 "static",
76 "paperless",
77 "img",
78 "favicon.ico",
79 )
80 with open(favicon, "rb") as f:
81 return HttpResponse(f, content_type="image/x-icon")
82
83
84 class UserViewSet(ModelViewSet):
85 model = User
86
87 queryset = User.objects.exclude(
88 username__in=["consumer", "AnonymousUser"],
89 ).order_by(Lower("username"))
90
91 serializer_class = UserSerializer
92 pagination_class = StandardPagination
93 permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
94 filter_backends = (DjangoFilterBackend, OrderingFilter)
95 filterset_class = UserFilterSet
96 ordering_fields = ("username",)
97
98
99 class GroupViewSet(ModelViewSet):
100 model = Group
101
102 queryset = Group.objects.order_by(Lower("name"))
103
104 serializer_class = GroupSerializer
105 pagination_class = StandardPagination
106 permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
107 filter_backends = (DjangoFilterBackend, OrderingFilter)
108 filterset_class = GroupFilterSet
109 ordering_fields = ("name",)
110
111
112 class ProfileView(GenericAPIView):
113 """
114 User profile view, only available when logged in
115 """
116
117 permission_classes = [IsAuthenticated]
118 serializer_class = ProfileSerializer
119
120 def get(self, request, *args, **kwargs):
121 user = self.request.user
122
123 serializer = self.get_serializer(data=request.data)
124 return Response(serializer.to_representation(user))
125
126 def patch(self, request, *args, **kwargs):
127 serializer = self.get_serializer(data=request.data)
128 serializer.is_valid(raise_exception=True)
129 user = self.request.user if hasattr(self.request, "user") else None
130
131 if len(serializer.validated_data.get("password").replace("*", "")) > 0:
132 user.set_password(serializer.validated_data.get("password"))
133 user.save()
134 serializer.validated_data.pop("password")
135
136 for key, value in serializer.validated_data.items():
137 setattr(user, key, value)
138 user.save()
139
140 return Response(serializer.to_representation(user))
141
142
143 class GenerateAuthTokenView(GenericAPIView):
144 """
145 Generates (or re-generates) an auth token, requires a logged in user
146 unlike the default DRF endpoint
147 """
148
149 permission_classes = [IsAuthenticated]
150
151 def post(self, request, *args, **kwargs):
152 user = self.request.user
153
154 existing_token = Token.objects.filter(user=user).first()
155 if existing_token is not None:
156 existing_token.delete()
157 token = Token.objects.create(user=user)
158 return Response(
159 token.key,
160 )
161
162
163 class ApplicationConfigurationViewSet(ModelViewSet):
164 model = ApplicationConfiguration
165
166 queryset = ApplicationConfiguration.objects
167
168 serializer_class = ApplicationConfigurationSerializer
169 permission_classes = (IsAuthenticated,)
170
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/paperless/views.py b/src/paperless/views.py
--- a/src/paperless/views.py
+++ b/src/paperless/views.py
@@ -11,6 +11,7 @@
from rest_framework.filters import OrderingFilter
from rest_framework.generics import GenericAPIView
from rest_framework.pagination import PageNumberPagination
+from rest_framework.permissions import DjangoObjectPermissions
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
@@ -166,4 +167,4 @@
queryset = ApplicationConfiguration.objects
serializer_class = ApplicationConfigurationSerializer
- permission_classes = (IsAuthenticated,)
+ permission_classes = (IsAuthenticated, DjangoObjectPermissions)
|
{"golden_diff": "diff --git a/src/paperless/views.py b/src/paperless/views.py\n--- a/src/paperless/views.py\n+++ b/src/paperless/views.py\n@@ -11,6 +11,7 @@\n from rest_framework.filters import OrderingFilter\n from rest_framework.generics import GenericAPIView\n from rest_framework.pagination import PageNumberPagination\n+from rest_framework.permissions import DjangoObjectPermissions\n from rest_framework.permissions import IsAuthenticated\n from rest_framework.response import Response\n from rest_framework.viewsets import ModelViewSet\n@@ -166,4 +167,4 @@\n queryset = ApplicationConfiguration.objects\n \n serializer_class = ApplicationConfigurationSerializer\n- permission_classes = (IsAuthenticated,)\n+ permission_classes = (IsAuthenticated, DjangoObjectPermissions)\n", "issue": "[BUG] App Config does not enforce permissions\n### Description\n\nSince Paperless 2.3 or 2.4 there's a new menu item named \"Configuration\" where it's possible to override the logo and a few OCR settings. I'm using a group named `humans` for everyday operations. The group has mere \"view\" permissions for type \"admin\":\r\n\r\n\r\n\r\nAnd yet the users in that group can modify application configurations. I believe this is caused by the `/api/config/` endpoint not having authorization beyond `IsAuthenticated`:\r\n\r\nhttps://github.com/paperless-ngx/paperless-ngx/blob/6d6650d5f6952f3129e9f4632cd149914b344767/src/paperless/views.py#L169\r\n\r\nRemoving the \"view\" permission bit, leaving the group without any for the \"admin\" type, only hides the UI elements. The underlying API is unaffected.\n\n### Steps to reproduce\n\n1. Authenticate with a user with no permissions for type \"admin\" other than \"view\" (the latter being optional if using the API directly).\r\n1. Navigate to `/config` (Application Configuration).\r\n1. Change values.\r\n1. Save.\n\n### Webserver logs\n\n```bash\nN/A\n```\n\n\n### Browser logs\n\n_No response_\n\n### Paperless-ngx version\n\n2.4.0\n\n### Host OS\n\nDebian Linux\n\n### Installation method\n\nDocker - official image\n\n### Browser\n\n_No response_\n\n### Configuration changes\n\n_No response_\n\n### Other\n\n_No response_\n\n### Please confirm the following\n\n- [X] I believe this issue is a bug that affects all users of Paperless-ngx, not something specific to my installation.\n- [X] I have already searched for relevant existing issues and discussions before opening this report.\n- [X] I have updated the title field above with a concise description.\n", "before_files": [{"content": "import os\nfrom collections import OrderedDict\n\nfrom django.contrib.auth.models import Group\nfrom django.contrib.auth.models import User\nfrom django.db.models.functions import Lower\nfrom django.http import HttpResponse\nfrom django.views.generic import View\nfrom django_filters.rest_framework import DjangoFilterBackend\nfrom rest_framework.authtoken.models import Token\nfrom rest_framework.filters import OrderingFilter\nfrom rest_framework.generics import GenericAPIView\nfrom rest_framework.pagination import PageNumberPagination\nfrom rest_framework.permissions import IsAuthenticated\nfrom rest_framework.response import Response\nfrom rest_framework.viewsets import ModelViewSet\n\nfrom documents.permissions import PaperlessObjectPermissions\nfrom paperless.filters import GroupFilterSet\nfrom paperless.filters import UserFilterSet\nfrom paperless.models import ApplicationConfiguration\nfrom paperless.serialisers import ApplicationConfigurationSerializer\nfrom paperless.serialisers import GroupSerializer\nfrom paperless.serialisers import ProfileSerializer\nfrom paperless.serialisers import UserSerializer\n\n\nclass StandardPagination(PageNumberPagination):\n page_size = 25\n page_size_query_param = \"page_size\"\n max_page_size = 100000\n\n def get_paginated_response(self, data):\n return Response(\n OrderedDict(\n [\n (\"count\", self.page.paginator.count),\n (\"next\", self.get_next_link()),\n (\"previous\", self.get_previous_link()),\n (\"all\", self.get_all_result_ids()),\n (\"results\", data),\n ],\n ),\n )\n\n def get_all_result_ids(self):\n ids = []\n if hasattr(self.page.paginator.object_list, \"saved_results\"):\n results_page = self.page.paginator.object_list.saved_results[0]\n if results_page is not None:\n for i in range(len(results_page.results.docs())):\n try:\n fields = results_page.results.fields(i)\n if \"id\" in fields:\n ids.append(fields[\"id\"])\n except Exception:\n pass\n else:\n ids = self.page.paginator.object_list.values_list(\"pk\", flat=True)\n return ids\n\n def get_paginated_response_schema(self, schema):\n response_schema = super().get_paginated_response_schema(schema)\n response_schema[\"properties\"][\"all\"] = {\n \"type\": \"array\",\n \"example\": \"[1, 2, 3]\",\n }\n return response_schema\n\n\nclass FaviconView(View):\n def get(self, request, *args, **kwargs): # pragma: no cover\n favicon = os.path.join(\n os.path.dirname(__file__),\n \"static\",\n \"paperless\",\n \"img\",\n \"favicon.ico\",\n )\n with open(favicon, \"rb\") as f:\n return HttpResponse(f, content_type=\"image/x-icon\")\n\n\nclass UserViewSet(ModelViewSet):\n model = User\n\n queryset = User.objects.exclude(\n username__in=[\"consumer\", \"AnonymousUser\"],\n ).order_by(Lower(\"username\"))\n\n serializer_class = UserSerializer\n pagination_class = StandardPagination\n permission_classes = (IsAuthenticated, PaperlessObjectPermissions)\n filter_backends = (DjangoFilterBackend, OrderingFilter)\n filterset_class = UserFilterSet\n ordering_fields = (\"username\",)\n\n\nclass GroupViewSet(ModelViewSet):\n model = Group\n\n queryset = Group.objects.order_by(Lower(\"name\"))\n\n serializer_class = GroupSerializer\n pagination_class = StandardPagination\n permission_classes = (IsAuthenticated, PaperlessObjectPermissions)\n filter_backends = (DjangoFilterBackend, OrderingFilter)\n filterset_class = GroupFilterSet\n ordering_fields = (\"name\",)\n\n\nclass ProfileView(GenericAPIView):\n \"\"\"\n User profile view, only available when logged in\n \"\"\"\n\n permission_classes = [IsAuthenticated]\n serializer_class = ProfileSerializer\n\n def get(self, request, *args, **kwargs):\n user = self.request.user\n\n serializer = self.get_serializer(data=request.data)\n return Response(serializer.to_representation(user))\n\n def patch(self, request, *args, **kwargs):\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n user = self.request.user if hasattr(self.request, \"user\") else None\n\n if len(serializer.validated_data.get(\"password\").replace(\"*\", \"\")) > 0:\n user.set_password(serializer.validated_data.get(\"password\"))\n user.save()\n serializer.validated_data.pop(\"password\")\n\n for key, value in serializer.validated_data.items():\n setattr(user, key, value)\n user.save()\n\n return Response(serializer.to_representation(user))\n\n\nclass GenerateAuthTokenView(GenericAPIView):\n \"\"\"\n Generates (or re-generates) an auth token, requires a logged in user\n unlike the default DRF endpoint\n \"\"\"\n\n permission_classes = [IsAuthenticated]\n\n def post(self, request, *args, **kwargs):\n user = self.request.user\n\n existing_token = Token.objects.filter(user=user).first()\n if existing_token is not None:\n existing_token.delete()\n token = Token.objects.create(user=user)\n return Response(\n token.key,\n )\n\n\nclass ApplicationConfigurationViewSet(ModelViewSet):\n model = ApplicationConfiguration\n\n queryset = ApplicationConfiguration.objects\n\n serializer_class = ApplicationConfigurationSerializer\n permission_classes = (IsAuthenticated,)\n", "path": "src/paperless/views.py"}], "after_files": [{"content": "import os\nfrom collections import OrderedDict\n\nfrom django.contrib.auth.models import Group\nfrom django.contrib.auth.models import User\nfrom django.db.models.functions import Lower\nfrom django.http import HttpResponse\nfrom django.views.generic import View\nfrom django_filters.rest_framework import DjangoFilterBackend\nfrom rest_framework.authtoken.models import Token\nfrom rest_framework.filters import OrderingFilter\nfrom rest_framework.generics import GenericAPIView\nfrom rest_framework.pagination import PageNumberPagination\nfrom rest_framework.permissions import DjangoObjectPermissions\nfrom rest_framework.permissions import IsAuthenticated\nfrom rest_framework.response import Response\nfrom rest_framework.viewsets import ModelViewSet\n\nfrom documents.permissions import PaperlessObjectPermissions\nfrom paperless.filters import GroupFilterSet\nfrom paperless.filters import UserFilterSet\nfrom paperless.models import ApplicationConfiguration\nfrom paperless.serialisers import ApplicationConfigurationSerializer\nfrom paperless.serialisers import GroupSerializer\nfrom paperless.serialisers import ProfileSerializer\nfrom paperless.serialisers import UserSerializer\n\n\nclass StandardPagination(PageNumberPagination):\n page_size = 25\n page_size_query_param = \"page_size\"\n max_page_size = 100000\n\n def get_paginated_response(self, data):\n return Response(\n OrderedDict(\n [\n (\"count\", self.page.paginator.count),\n (\"next\", self.get_next_link()),\n (\"previous\", self.get_previous_link()),\n (\"all\", self.get_all_result_ids()),\n (\"results\", data),\n ],\n ),\n )\n\n def get_all_result_ids(self):\n ids = []\n if hasattr(self.page.paginator.object_list, \"saved_results\"):\n results_page = self.page.paginator.object_list.saved_results[0]\n if results_page is not None:\n for i in range(len(results_page.results.docs())):\n try:\n fields = results_page.results.fields(i)\n if \"id\" in fields:\n ids.append(fields[\"id\"])\n except Exception:\n pass\n else:\n ids = self.page.paginator.object_list.values_list(\"pk\", flat=True)\n return ids\n\n def get_paginated_response_schema(self, schema):\n response_schema = super().get_paginated_response_schema(schema)\n response_schema[\"properties\"][\"all\"] = {\n \"type\": \"array\",\n \"example\": \"[1, 2, 3]\",\n }\n return response_schema\n\n\nclass FaviconView(View):\n def get(self, request, *args, **kwargs): # pragma: no cover\n favicon = os.path.join(\n os.path.dirname(__file__),\n \"static\",\n \"paperless\",\n \"img\",\n \"favicon.ico\",\n )\n with open(favicon, \"rb\") as f:\n return HttpResponse(f, content_type=\"image/x-icon\")\n\n\nclass UserViewSet(ModelViewSet):\n model = User\n\n queryset = User.objects.exclude(\n username__in=[\"consumer\", \"AnonymousUser\"],\n ).order_by(Lower(\"username\"))\n\n serializer_class = UserSerializer\n pagination_class = StandardPagination\n permission_classes = (IsAuthenticated, PaperlessObjectPermissions)\n filter_backends = (DjangoFilterBackend, OrderingFilter)\n filterset_class = UserFilterSet\n ordering_fields = (\"username\",)\n\n\nclass GroupViewSet(ModelViewSet):\n model = Group\n\n queryset = Group.objects.order_by(Lower(\"name\"))\n\n serializer_class = GroupSerializer\n pagination_class = StandardPagination\n permission_classes = (IsAuthenticated, PaperlessObjectPermissions)\n filter_backends = (DjangoFilterBackend, OrderingFilter)\n filterset_class = GroupFilterSet\n ordering_fields = (\"name\",)\n\n\nclass ProfileView(GenericAPIView):\n \"\"\"\n User profile view, only available when logged in\n \"\"\"\n\n permission_classes = [IsAuthenticated]\n serializer_class = ProfileSerializer\n\n def get(self, request, *args, **kwargs):\n user = self.request.user\n\n serializer = self.get_serializer(data=request.data)\n return Response(serializer.to_representation(user))\n\n def patch(self, request, *args, **kwargs):\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n user = self.request.user if hasattr(self.request, \"user\") else None\n\n if len(serializer.validated_data.get(\"password\").replace(\"*\", \"\")) > 0:\n user.set_password(serializer.validated_data.get(\"password\"))\n user.save()\n serializer.validated_data.pop(\"password\")\n\n for key, value in serializer.validated_data.items():\n setattr(user, key, value)\n user.save()\n\n return Response(serializer.to_representation(user))\n\n\nclass GenerateAuthTokenView(GenericAPIView):\n \"\"\"\n Generates (or re-generates) an auth token, requires a logged in user\n unlike the default DRF endpoint\n \"\"\"\n\n permission_classes = [IsAuthenticated]\n\n def post(self, request, *args, **kwargs):\n user = self.request.user\n\n existing_token = Token.objects.filter(user=user).first()\n if existing_token is not None:\n existing_token.delete()\n token = Token.objects.create(user=user)\n return Response(\n token.key,\n )\n\n\nclass ApplicationConfigurationViewSet(ModelViewSet):\n model = ApplicationConfiguration\n\n queryset = ApplicationConfiguration.objects\n\n serializer_class = ApplicationConfigurationSerializer\n permission_classes = (IsAuthenticated, DjangoObjectPermissions)\n", "path": "src/paperless/views.py"}]}
| 2,237 | 159 |
gh_patches_debug_28362
|
rasdani/github-patches
|
git_diff
|
encode__starlette-404
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Generic interface for storing per-request state.
From this: https://github.com/encode/starlette/issues/374#issuecomment-461684385
I think we should probably support a public interface for storing arbitary per-request state, within the ASGI scope.
That could either be:
1. `request['whatever'] = ...` - Expose the ASGI scope, and let the user handle it directly if they really need to.
2. `request.scope['whatever'] = ...` - Same as before, but let's move it into a different bit of public API.
3. `request.state.whatever = ...` - Store arbitrary state, and always keep it isolated from the rest of the namespace in the ASGI scope. (Implementation wise, store it all in a scope['state'] dictionary)
With (1) and (2) we might need to think a bit about mutability and it's implications. Eg. If the scope is modified, do we update the `request.url`, `request.headers` or whatever else it might affect? Probably we just put a disclaimer on "request.scope" gives you access to the underlying ASGI scope - make sure you've got a bit of an idea what you're doing.
Incidentally, I think I prefer (2) over the "request is dict-like" interface that we currently have.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `starlette/requests.py`
Content:
```
1 import asyncio
2 import http.cookies
3 import json
4 import typing
5 from collections.abc import Mapping
6
7 from starlette.datastructures import URL, Address, FormData, Headers, QueryParams
8 from starlette.formparsers import FormParser, MultiPartParser
9 from starlette.types import Message, Receive, Scope
10
11 try:
12 from multipart.multipart import parse_options_header
13 except ImportError: # pragma: nocover
14 parse_options_header = None # type: ignore
15
16
17 class ClientDisconnect(Exception):
18 pass
19
20
21 class HTTPConnection(Mapping):
22 """
23 A base class for incoming HTTP connections, that is used to provide
24 any functionality that is common to both `Request` and `WebSocket`.
25 """
26
27 def __init__(self, scope: Scope, receive: Receive = None) -> None:
28 assert scope["type"] in ("http", "websocket")
29 self._scope = scope
30
31 def __getitem__(self, key: str) -> str:
32 return self._scope[key]
33
34 def __iter__(self) -> typing.Iterator[str]:
35 return iter(self._scope)
36
37 def __len__(self) -> int:
38 return len(self._scope)
39
40 @property
41 def app(self) -> typing.Any:
42 return self._scope["app"]
43
44 @property
45 def url(self) -> URL:
46 if not hasattr(self, "_url"):
47 self._url = URL(scope=self._scope)
48 return self._url
49
50 @property
51 def headers(self) -> Headers:
52 if not hasattr(self, "_headers"):
53 self._headers = Headers(scope=self._scope)
54 return self._headers
55
56 @property
57 def query_params(self) -> QueryParams:
58 if not hasattr(self, "_query_params"):
59 self._query_params = QueryParams(self._scope["query_string"])
60 return self._query_params
61
62 @property
63 def path_params(self) -> dict:
64 return self._scope.get("path_params", {})
65
66 @property
67 def cookies(self) -> typing.Dict[str, str]:
68 if not hasattr(self, "_cookies"):
69 cookies = {}
70 cookie_header = self.headers.get("cookie")
71 if cookie_header:
72 cookie = http.cookies.SimpleCookie()
73 cookie.load(cookie_header)
74 for key, morsel in cookie.items():
75 cookies[key] = morsel.value
76 self._cookies = cookies
77 return self._cookies
78
79 @property
80 def client(self) -> Address:
81 host, port = self._scope.get("client") or (None, None)
82 return Address(host=host, port=port)
83
84 @property
85 def session(self) -> dict:
86 assert (
87 "session" in self._scope
88 ), "SessionMiddleware must be installed to access request.session"
89 return self._scope["session"]
90
91 @property
92 def database(self) -> typing.Any: # pragma: no cover
93 # NOTE: Pending deprecation. You probably want to look at the
94 # stand-alone `databases` package instead.
95 # https://github.com/encode/databases
96 assert (
97 "database" in self._scope
98 ), "DatabaseMiddleware must be installed to access request.database"
99 return self._scope["database"]
100
101 @property
102 def auth(self) -> typing.Any:
103 assert (
104 "auth" in self._scope
105 ), "AuthenticationMiddleware must be installed to access request.auth"
106 return self._scope["auth"]
107
108 @property
109 def user(self) -> typing.Any:
110 assert (
111 "user" in self._scope
112 ), "AuthenticationMiddleware must be installed to access request.user"
113 return self._scope["user"]
114
115 def url_for(self, name: str, **path_params: typing.Any) -> str:
116 router = self._scope["router"]
117 url_path = router.url_path_for(name, **path_params)
118 return url_path.make_absolute_url(base_url=self.url)
119
120
121 async def empty_receive() -> Message:
122 raise RuntimeError("Receive channel has not been made available")
123
124
125 class Request(HTTPConnection):
126 def __init__(self, scope: Scope, receive: Receive = empty_receive):
127 super().__init__(scope)
128 assert scope["type"] == "http"
129 self._receive = receive
130 self._stream_consumed = False
131 self._is_disconnected = False
132
133 @property
134 def method(self) -> str:
135 return self._scope["method"]
136
137 @property
138 def receive(self) -> Receive:
139 return self._receive
140
141 async def stream(self) -> typing.AsyncGenerator[bytes, None]:
142 if hasattr(self, "_body"):
143 yield self._body
144 yield b""
145 return
146
147 if self._stream_consumed:
148 raise RuntimeError("Stream consumed")
149
150 self._stream_consumed = True
151 while True:
152 message = await self._receive()
153 if message["type"] == "http.request":
154 body = message.get("body", b"")
155 if body:
156 yield body
157 if not message.get("more_body", False):
158 break
159 elif message["type"] == "http.disconnect":
160 self._is_disconnected = True
161 raise ClientDisconnect()
162 yield b""
163
164 async def body(self) -> bytes:
165 if not hasattr(self, "_body"):
166 body = b""
167 async for chunk in self.stream():
168 body += chunk
169 self._body = body
170 return self._body
171
172 async def json(self) -> typing.Any:
173 if not hasattr(self, "_json"):
174 body = await self.body()
175 self._json = json.loads(body)
176 return self._json
177
178 async def form(self) -> FormData:
179 if not hasattr(self, "_form"):
180 assert (
181 parse_options_header is not None
182 ), "The `python-multipart` library must be installed to use form parsing."
183 content_type_header = self.headers.get("Content-Type")
184 content_type, options = parse_options_header(content_type_header)
185 if content_type == b"multipart/form-data":
186 multipart_parser = MultiPartParser(self.headers, self.stream())
187 self._form = await multipart_parser.parse()
188 elif content_type == b"application/x-www-form-urlencoded":
189 form_parser = FormParser(self.headers, self.stream())
190 self._form = await form_parser.parse()
191 else:
192 self._form = FormData()
193 return self._form
194
195 async def close(self) -> None:
196 if hasattr(self, "_form"):
197 await self._form.close()
198
199 async def is_disconnected(self) -> bool:
200 if not self._is_disconnected:
201 try:
202 message = await asyncio.wait_for(self._receive(), timeout=0.0000001)
203 except asyncio.TimeoutError as exc:
204 message = {}
205
206 if message.get("type") == "http.disconnect":
207 self._is_disconnected = True
208
209 return self._is_disconnected
210
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/starlette/requests.py b/starlette/requests.py
--- a/starlette/requests.py
+++ b/starlette/requests.py
@@ -18,6 +18,10 @@
pass
+class State:
+ pass
+
+
class HTTPConnection(Mapping):
"""
A base class for incoming HTTP connections, that is used to provide
@@ -88,16 +92,6 @@
), "SessionMiddleware must be installed to access request.session"
return self._scope["session"]
- @property
- def database(self) -> typing.Any: # pragma: no cover
- # NOTE: Pending deprecation. You probably want to look at the
- # stand-alone `databases` package instead.
- # https://github.com/encode/databases
- assert (
- "database" in self._scope
- ), "DatabaseMiddleware must be installed to access request.database"
- return self._scope["database"]
-
@property
def auth(self) -> typing.Any:
assert (
@@ -112,6 +106,12 @@
), "AuthenticationMiddleware must be installed to access request.user"
return self._scope["user"]
+ @property
+ def state(self) -> State:
+ if "state" not in self._scope:
+ self._scope["state"] = State()
+ return self._scope["state"]
+
def url_for(self, name: str, **path_params: typing.Any) -> str:
router = self._scope["router"]
url_path = router.url_path_for(name, **path_params)
|
{"golden_diff": "diff --git a/starlette/requests.py b/starlette/requests.py\n--- a/starlette/requests.py\n+++ b/starlette/requests.py\n@@ -18,6 +18,10 @@\n pass\n \n \n+class State:\n+ pass\n+\n+\n class HTTPConnection(Mapping):\n \"\"\"\n A base class for incoming HTTP connections, that is used to provide\n@@ -88,16 +92,6 @@\n ), \"SessionMiddleware must be installed to access request.session\"\n return self._scope[\"session\"]\n \n- @property\n- def database(self) -> typing.Any: # pragma: no cover\n- # NOTE: Pending deprecation. You probably want to look at the\n- # stand-alone `databases` package instead.\n- # https://github.com/encode/databases\n- assert (\n- \"database\" in self._scope\n- ), \"DatabaseMiddleware must be installed to access request.database\"\n- return self._scope[\"database\"]\n-\n @property\n def auth(self) -> typing.Any:\n assert (\n@@ -112,6 +106,12 @@\n ), \"AuthenticationMiddleware must be installed to access request.user\"\n return self._scope[\"user\"]\n \n+ @property\n+ def state(self) -> State:\n+ if \"state\" not in self._scope:\n+ self._scope[\"state\"] = State()\n+ return self._scope[\"state\"]\n+\n def url_for(self, name: str, **path_params: typing.Any) -> str:\n router = self._scope[\"router\"]\n url_path = router.url_path_for(name, **path_params)\n", "issue": "Generic interface for storing per-request state.\nFrom this: https://github.com/encode/starlette/issues/374#issuecomment-461684385\r\n\r\nI think we should probably support a public interface for storing arbitary per-request state, within the ASGI scope.\r\n\r\nThat could either be:\r\n\r\n1. `request['whatever'] = ...` - Expose the ASGI scope, and let the user handle it directly if they really need to.\r\n2. `request.scope['whatever'] = ...` - Same as before, but let's move it into a different bit of public API.\r\n3. `request.state.whatever = ...` - Store arbitrary state, and always keep it isolated from the rest of the namespace in the ASGI scope. (Implementation wise, store it all in a scope['state'] dictionary)\r\n\r\nWith (1) and (2) we might need to think a bit about mutability and it's implications. Eg. If the scope is modified, do we update the `request.url`, `request.headers` or whatever else it might affect? Probably we just put a disclaimer on \"request.scope\" gives you access to the underlying ASGI scope - make sure you've got a bit of an idea what you're doing.\r\n\r\nIncidentally, I think I prefer (2) over the \"request is dict-like\" interface that we currently have.\n", "before_files": [{"content": "import asyncio\nimport http.cookies\nimport json\nimport typing\nfrom collections.abc import Mapping\n\nfrom starlette.datastructures import URL, Address, FormData, Headers, QueryParams\nfrom starlette.formparsers import FormParser, MultiPartParser\nfrom starlette.types import Message, Receive, Scope\n\ntry:\n from multipart.multipart import parse_options_header\nexcept ImportError: # pragma: nocover\n parse_options_header = None # type: ignore\n\n\nclass ClientDisconnect(Exception):\n pass\n\n\nclass HTTPConnection(Mapping):\n \"\"\"\n A base class for incoming HTTP connections, that is used to provide\n any functionality that is common to both `Request` and `WebSocket`.\n \"\"\"\n\n def __init__(self, scope: Scope, receive: Receive = None) -> None:\n assert scope[\"type\"] in (\"http\", \"websocket\")\n self._scope = scope\n\n def __getitem__(self, key: str) -> str:\n return self._scope[key]\n\n def __iter__(self) -> typing.Iterator[str]:\n return iter(self._scope)\n\n def __len__(self) -> int:\n return len(self._scope)\n\n @property\n def app(self) -> typing.Any:\n return self._scope[\"app\"]\n\n @property\n def url(self) -> URL:\n if not hasattr(self, \"_url\"):\n self._url = URL(scope=self._scope)\n return self._url\n\n @property\n def headers(self) -> Headers:\n if not hasattr(self, \"_headers\"):\n self._headers = Headers(scope=self._scope)\n return self._headers\n\n @property\n def query_params(self) -> QueryParams:\n if not hasattr(self, \"_query_params\"):\n self._query_params = QueryParams(self._scope[\"query_string\"])\n return self._query_params\n\n @property\n def path_params(self) -> dict:\n return self._scope.get(\"path_params\", {})\n\n @property\n def cookies(self) -> typing.Dict[str, str]:\n if not hasattr(self, \"_cookies\"):\n cookies = {}\n cookie_header = self.headers.get(\"cookie\")\n if cookie_header:\n cookie = http.cookies.SimpleCookie()\n cookie.load(cookie_header)\n for key, morsel in cookie.items():\n cookies[key] = morsel.value\n self._cookies = cookies\n return self._cookies\n\n @property\n def client(self) -> Address:\n host, port = self._scope.get(\"client\") or (None, None)\n return Address(host=host, port=port)\n\n @property\n def session(self) -> dict:\n assert (\n \"session\" in self._scope\n ), \"SessionMiddleware must be installed to access request.session\"\n return self._scope[\"session\"]\n\n @property\n def database(self) -> typing.Any: # pragma: no cover\n # NOTE: Pending deprecation. You probably want to look at the\n # stand-alone `databases` package instead.\n # https://github.com/encode/databases\n assert (\n \"database\" in self._scope\n ), \"DatabaseMiddleware must be installed to access request.database\"\n return self._scope[\"database\"]\n\n @property\n def auth(self) -> typing.Any:\n assert (\n \"auth\" in self._scope\n ), \"AuthenticationMiddleware must be installed to access request.auth\"\n return self._scope[\"auth\"]\n\n @property\n def user(self) -> typing.Any:\n assert (\n \"user\" in self._scope\n ), \"AuthenticationMiddleware must be installed to access request.user\"\n return self._scope[\"user\"]\n\n def url_for(self, name: str, **path_params: typing.Any) -> str:\n router = self._scope[\"router\"]\n url_path = router.url_path_for(name, **path_params)\n return url_path.make_absolute_url(base_url=self.url)\n\n\nasync def empty_receive() -> Message:\n raise RuntimeError(\"Receive channel has not been made available\")\n\n\nclass Request(HTTPConnection):\n def __init__(self, scope: Scope, receive: Receive = empty_receive):\n super().__init__(scope)\n assert scope[\"type\"] == \"http\"\n self._receive = receive\n self._stream_consumed = False\n self._is_disconnected = False\n\n @property\n def method(self) -> str:\n return self._scope[\"method\"]\n\n @property\n def receive(self) -> Receive:\n return self._receive\n\n async def stream(self) -> typing.AsyncGenerator[bytes, None]:\n if hasattr(self, \"_body\"):\n yield self._body\n yield b\"\"\n return\n\n if self._stream_consumed:\n raise RuntimeError(\"Stream consumed\")\n\n self._stream_consumed = True\n while True:\n message = await self._receive()\n if message[\"type\"] == \"http.request\":\n body = message.get(\"body\", b\"\")\n if body:\n yield body\n if not message.get(\"more_body\", False):\n break\n elif message[\"type\"] == \"http.disconnect\":\n self._is_disconnected = True\n raise ClientDisconnect()\n yield b\"\"\n\n async def body(self) -> bytes:\n if not hasattr(self, \"_body\"):\n body = b\"\"\n async for chunk in self.stream():\n body += chunk\n self._body = body\n return self._body\n\n async def json(self) -> typing.Any:\n if not hasattr(self, \"_json\"):\n body = await self.body()\n self._json = json.loads(body)\n return self._json\n\n async def form(self) -> FormData:\n if not hasattr(self, \"_form\"):\n assert (\n parse_options_header is not None\n ), \"The `python-multipart` library must be installed to use form parsing.\"\n content_type_header = self.headers.get(\"Content-Type\")\n content_type, options = parse_options_header(content_type_header)\n if content_type == b\"multipart/form-data\":\n multipart_parser = MultiPartParser(self.headers, self.stream())\n self._form = await multipart_parser.parse()\n elif content_type == b\"application/x-www-form-urlencoded\":\n form_parser = FormParser(self.headers, self.stream())\n self._form = await form_parser.parse()\n else:\n self._form = FormData()\n return self._form\n\n async def close(self) -> None:\n if hasattr(self, \"_form\"):\n await self._form.close()\n\n async def is_disconnected(self) -> bool:\n if not self._is_disconnected:\n try:\n message = await asyncio.wait_for(self._receive(), timeout=0.0000001)\n except asyncio.TimeoutError as exc:\n message = {}\n\n if message.get(\"type\") == \"http.disconnect\":\n self._is_disconnected = True\n\n return self._is_disconnected\n", "path": "starlette/requests.py"}], "after_files": [{"content": "import asyncio\nimport http.cookies\nimport json\nimport typing\nfrom collections.abc import Mapping\n\nfrom starlette.datastructures import URL, Address, FormData, Headers, QueryParams\nfrom starlette.formparsers import FormParser, MultiPartParser\nfrom starlette.types import Message, Receive, Scope\n\ntry:\n from multipart.multipart import parse_options_header\nexcept ImportError: # pragma: nocover\n parse_options_header = None # type: ignore\n\n\nclass ClientDisconnect(Exception):\n pass\n\n\nclass State:\n pass\n\n\nclass HTTPConnection(Mapping):\n \"\"\"\n A base class for incoming HTTP connections, that is used to provide\n any functionality that is common to both `Request` and `WebSocket`.\n \"\"\"\n\n def __init__(self, scope: Scope, receive: Receive = None) -> None:\n assert scope[\"type\"] in (\"http\", \"websocket\")\n self._scope = scope\n\n def __getitem__(self, key: str) -> str:\n return self._scope[key]\n\n def __iter__(self) -> typing.Iterator[str]:\n return iter(self._scope)\n\n def __len__(self) -> int:\n return len(self._scope)\n\n @property\n def app(self) -> typing.Any:\n return self._scope[\"app\"]\n\n @property\n def url(self) -> URL:\n if not hasattr(self, \"_url\"):\n self._url = URL(scope=self._scope)\n return self._url\n\n @property\n def headers(self) -> Headers:\n if not hasattr(self, \"_headers\"):\n self._headers = Headers(scope=self._scope)\n return self._headers\n\n @property\n def query_params(self) -> QueryParams:\n if not hasattr(self, \"_query_params\"):\n self._query_params = QueryParams(self._scope[\"query_string\"])\n return self._query_params\n\n @property\n def path_params(self) -> dict:\n return self._scope.get(\"path_params\", {})\n\n @property\n def cookies(self) -> typing.Dict[str, str]:\n if not hasattr(self, \"_cookies\"):\n cookies = {}\n cookie_header = self.headers.get(\"cookie\")\n if cookie_header:\n cookie = http.cookies.SimpleCookie()\n cookie.load(cookie_header)\n for key, morsel in cookie.items():\n cookies[key] = morsel.value\n self._cookies = cookies\n return self._cookies\n\n @property\n def client(self) -> Address:\n host, port = self._scope.get(\"client\") or (None, None)\n return Address(host=host, port=port)\n\n @property\n def session(self) -> dict:\n assert (\n \"session\" in self._scope\n ), \"SessionMiddleware must be installed to access request.session\"\n return self._scope[\"session\"]\n\n @property\n def auth(self) -> typing.Any:\n assert (\n \"auth\" in self._scope\n ), \"AuthenticationMiddleware must be installed to access request.auth\"\n return self._scope[\"auth\"]\n\n @property\n def user(self) -> typing.Any:\n assert (\n \"user\" in self._scope\n ), \"AuthenticationMiddleware must be installed to access request.user\"\n return self._scope[\"user\"]\n\n @property\n def state(self) -> State:\n if \"state\" not in self._scope:\n self._scope[\"state\"] = State()\n return self._scope[\"state\"]\n\n def url_for(self, name: str, **path_params: typing.Any) -> str:\n router = self._scope[\"router\"]\n url_path = router.url_path_for(name, **path_params)\n return url_path.make_absolute_url(base_url=self.url)\n\n\nasync def empty_receive() -> Message:\n raise RuntimeError(\"Receive channel has not been made available\")\n\n\nclass Request(HTTPConnection):\n def __init__(self, scope: Scope, receive: Receive = empty_receive):\n super().__init__(scope)\n assert scope[\"type\"] == \"http\"\n self._receive = receive\n self._stream_consumed = False\n self._is_disconnected = False\n\n @property\n def method(self) -> str:\n return self._scope[\"method\"]\n\n @property\n def receive(self) -> Receive:\n return self._receive\n\n async def stream(self) -> typing.AsyncGenerator[bytes, None]:\n if hasattr(self, \"_body\"):\n yield self._body\n yield b\"\"\n return\n\n if self._stream_consumed:\n raise RuntimeError(\"Stream consumed\")\n\n self._stream_consumed = True\n while True:\n message = await self._receive()\n if message[\"type\"] == \"http.request\":\n body = message.get(\"body\", b\"\")\n if body:\n yield body\n if not message.get(\"more_body\", False):\n break\n elif message[\"type\"] == \"http.disconnect\":\n self._is_disconnected = True\n raise ClientDisconnect()\n yield b\"\"\n\n async def body(self) -> bytes:\n if not hasattr(self, \"_body\"):\n body = b\"\"\n async for chunk in self.stream():\n body += chunk\n self._body = body\n return self._body\n\n async def json(self) -> typing.Any:\n if not hasattr(self, \"_json\"):\n body = await self.body()\n self._json = json.loads(body)\n return self._json\n\n async def form(self) -> FormData:\n if not hasattr(self, \"_form\"):\n assert (\n parse_options_header is not None\n ), \"The `python-multipart` library must be installed to use form parsing.\"\n content_type_header = self.headers.get(\"Content-Type\")\n content_type, options = parse_options_header(content_type_header)\n if content_type == b\"multipart/form-data\":\n multipart_parser = MultiPartParser(self.headers, self.stream())\n self._form = await multipart_parser.parse()\n elif content_type == b\"application/x-www-form-urlencoded\":\n form_parser = FormParser(self.headers, self.stream())\n self._form = await form_parser.parse()\n else:\n self._form = FormData()\n return self._form\n\n async def close(self) -> None:\n if hasattr(self, \"_form\"):\n await self._form.close()\n\n async def is_disconnected(self) -> bool:\n if not self._is_disconnected:\n try:\n message = await asyncio.wait_for(self._receive(), timeout=0.0000001)\n except asyncio.TimeoutError as exc:\n message = {}\n\n if message.get(\"type\") == \"http.disconnect\":\n self._is_disconnected = True\n\n return self._is_disconnected\n", "path": "starlette/requests.py"}]}
| 2,550 | 361 |
gh_patches_debug_698
|
rasdani/github-patches
|
git_diff
|
open-mmlab__mmdetection-6034
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Missing '**kwargs' parameters passing to imshow_bboxes() in show_result() of rpn.py
https://github.com/open-mmlab/mmdetection/blob/bde7b4b7eea9dd6ee91a486c6996b2d68662366d/mmdet/models/detectors/rpn.py#L155
'**kwargs' parameters haven't passed to mmcv.imshow_bboxes() in show_result() of mmdetection/mmdet/models/detectors/rpn.py
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mmdet/models/detectors/rpn.py`
Content:
```
1 # Copyright (c) OpenMMLab. All rights reserved.
2 import warnings
3
4 import mmcv
5 import torch
6 from mmcv.image import tensor2imgs
7
8 from mmdet.core import bbox_mapping
9 from ..builder import DETECTORS, build_backbone, build_head, build_neck
10 from .base import BaseDetector
11
12
13 @DETECTORS.register_module()
14 class RPN(BaseDetector):
15 """Implementation of Region Proposal Network."""
16
17 def __init__(self,
18 backbone,
19 neck,
20 rpn_head,
21 train_cfg,
22 test_cfg,
23 pretrained=None,
24 init_cfg=None):
25 super(RPN, self).__init__(init_cfg)
26 if pretrained:
27 warnings.warn('DeprecationWarning: pretrained is deprecated, '
28 'please use "init_cfg" instead')
29 backbone.pretrained = pretrained
30 self.backbone = build_backbone(backbone)
31 self.neck = build_neck(neck) if neck is not None else None
32 rpn_train_cfg = train_cfg.rpn if train_cfg is not None else None
33 rpn_head.update(train_cfg=rpn_train_cfg)
34 rpn_head.update(test_cfg=test_cfg.rpn)
35 self.rpn_head = build_head(rpn_head)
36 self.train_cfg = train_cfg
37 self.test_cfg = test_cfg
38
39 def extract_feat(self, img):
40 """Extract features.
41
42 Args:
43 img (torch.Tensor): Image tensor with shape (n, c, h ,w).
44
45 Returns:
46 list[torch.Tensor]: Multi-level features that may have
47 different resolutions.
48 """
49 x = self.backbone(img)
50 if self.with_neck:
51 x = self.neck(x)
52 return x
53
54 def forward_dummy(self, img):
55 """Dummy forward function."""
56 x = self.extract_feat(img)
57 rpn_outs = self.rpn_head(x)
58 return rpn_outs
59
60 def forward_train(self,
61 img,
62 img_metas,
63 gt_bboxes=None,
64 gt_bboxes_ignore=None):
65 """
66 Args:
67 img (Tensor): Input images of shape (N, C, H, W).
68 Typically these should be mean centered and std scaled.
69 img_metas (list[dict]): A List of image info dict where each dict
70 has: 'img_shape', 'scale_factor', 'flip', and may also contain
71 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'.
72 For details on the values of these keys see
73 :class:`mmdet.datasets.pipelines.Collect`.
74 gt_bboxes (list[Tensor]): Each item are the truth boxes for each
75 image in [tl_x, tl_y, br_x, br_y] format.
76 gt_bboxes_ignore (None | list[Tensor]): Specify which bounding
77 boxes can be ignored when computing the loss.
78
79 Returns:
80 dict[str, Tensor]: A dictionary of loss components.
81 """
82 if (isinstance(self.train_cfg.rpn, dict)
83 and self.train_cfg.rpn.get('debug', False)):
84 self.rpn_head.debug_imgs = tensor2imgs(img)
85
86 x = self.extract_feat(img)
87 losses = self.rpn_head.forward_train(x, img_metas, gt_bboxes, None,
88 gt_bboxes_ignore)
89 return losses
90
91 def simple_test(self, img, img_metas, rescale=False):
92 """Test function without test time augmentation.
93
94 Args:
95 imgs (list[torch.Tensor]): List of multiple images
96 img_metas (list[dict]): List of image information.
97 rescale (bool, optional): Whether to rescale the results.
98 Defaults to False.
99
100 Returns:
101 list[np.ndarray]: proposals
102 """
103 x = self.extract_feat(img)
104 # get origin input shape to onnx dynamic input shape
105 if torch.onnx.is_in_onnx_export():
106 img_shape = torch._shape_as_tensor(img)[2:]
107 img_metas[0]['img_shape_for_onnx'] = img_shape
108 proposal_list = self.rpn_head.simple_test_rpn(x, img_metas)
109 if rescale:
110 for proposals, meta in zip(proposal_list, img_metas):
111 proposals[:, :4] /= proposals.new_tensor(meta['scale_factor'])
112 if torch.onnx.is_in_onnx_export():
113 return proposal_list
114
115 return [proposal.cpu().numpy() for proposal in proposal_list]
116
117 def aug_test(self, imgs, img_metas, rescale=False):
118 """Test function with test time augmentation.
119
120 Args:
121 imgs (list[torch.Tensor]): List of multiple images
122 img_metas (list[dict]): List of image information.
123 rescale (bool, optional): Whether to rescale the results.
124 Defaults to False.
125
126 Returns:
127 list[np.ndarray]: proposals
128 """
129 proposal_list = self.rpn_head.aug_test_rpn(
130 self.extract_feats(imgs), img_metas)
131 if not rescale:
132 for proposals, img_meta in zip(proposal_list, img_metas[0]):
133 img_shape = img_meta['img_shape']
134 scale_factor = img_meta['scale_factor']
135 flip = img_meta['flip']
136 flip_direction = img_meta['flip_direction']
137 proposals[:, :4] = bbox_mapping(proposals[:, :4], img_shape,
138 scale_factor, flip,
139 flip_direction)
140 return [proposal.cpu().numpy() for proposal in proposal_list]
141
142 def show_result(self, data, result, top_k=20, **kwargs):
143 """Show RPN proposals on the image.
144
145 Args:
146 data (str or np.ndarray): Image filename or loaded image.
147 result (Tensor or tuple): The results to draw over `img`
148 bbox_result or (bbox_result, segm_result).
149 top_k (int): Plot the first k bboxes only
150 if set positive. Default: 20
151
152 Returns:
153 np.ndarray: The image with bboxes drawn on it.
154 """
155 mmcv.imshow_bboxes(data, result, top_k=top_k)
156
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/mmdet/models/detectors/rpn.py b/mmdet/models/detectors/rpn.py
--- a/mmdet/models/detectors/rpn.py
+++ b/mmdet/models/detectors/rpn.py
@@ -152,4 +152,4 @@
Returns:
np.ndarray: The image with bboxes drawn on it.
"""
- mmcv.imshow_bboxes(data, result, top_k=top_k)
+ mmcv.imshow_bboxes(data, result, top_k=top_k, **kwargs)
|
{"golden_diff": "diff --git a/mmdet/models/detectors/rpn.py b/mmdet/models/detectors/rpn.py\n--- a/mmdet/models/detectors/rpn.py\n+++ b/mmdet/models/detectors/rpn.py\n@@ -152,4 +152,4 @@\n Returns:\n np.ndarray: The image with bboxes drawn on it.\n \"\"\"\n- mmcv.imshow_bboxes(data, result, top_k=top_k)\n+ mmcv.imshow_bboxes(data, result, top_k=top_k, **kwargs)\n", "issue": "Missing '**kwargs' parameters passing to imshow_bboxes() in show_result() of rpn.py\nhttps://github.com/open-mmlab/mmdetection/blob/bde7b4b7eea9dd6ee91a486c6996b2d68662366d/mmdet/models/detectors/rpn.py#L155\r\n\r\n'**kwargs' parameters haven't passed to mmcv.imshow_bboxes() in show_result() of mmdetection/mmdet/models/detectors/rpn.py\r\n\n", "before_files": [{"content": "# Copyright (c) OpenMMLab. All rights reserved.\nimport warnings\n\nimport mmcv\nimport torch\nfrom mmcv.image import tensor2imgs\n\nfrom mmdet.core import bbox_mapping\nfrom ..builder import DETECTORS, build_backbone, build_head, build_neck\nfrom .base import BaseDetector\n\n\[email protected]_module()\nclass RPN(BaseDetector):\n \"\"\"Implementation of Region Proposal Network.\"\"\"\n\n def __init__(self,\n backbone,\n neck,\n rpn_head,\n train_cfg,\n test_cfg,\n pretrained=None,\n init_cfg=None):\n super(RPN, self).__init__(init_cfg)\n if pretrained:\n warnings.warn('DeprecationWarning: pretrained is deprecated, '\n 'please use \"init_cfg\" instead')\n backbone.pretrained = pretrained\n self.backbone = build_backbone(backbone)\n self.neck = build_neck(neck) if neck is not None else None\n rpn_train_cfg = train_cfg.rpn if train_cfg is not None else None\n rpn_head.update(train_cfg=rpn_train_cfg)\n rpn_head.update(test_cfg=test_cfg.rpn)\n self.rpn_head = build_head(rpn_head)\n self.train_cfg = train_cfg\n self.test_cfg = test_cfg\n\n def extract_feat(self, img):\n \"\"\"Extract features.\n\n Args:\n img (torch.Tensor): Image tensor with shape (n, c, h ,w).\n\n Returns:\n list[torch.Tensor]: Multi-level features that may have\n different resolutions.\n \"\"\"\n x = self.backbone(img)\n if self.with_neck:\n x = self.neck(x)\n return x\n\n def forward_dummy(self, img):\n \"\"\"Dummy forward function.\"\"\"\n x = self.extract_feat(img)\n rpn_outs = self.rpn_head(x)\n return rpn_outs\n\n def forward_train(self,\n img,\n img_metas,\n gt_bboxes=None,\n gt_bboxes_ignore=None):\n \"\"\"\n Args:\n img (Tensor): Input images of shape (N, C, H, W).\n Typically these should be mean centered and std scaled.\n img_metas (list[dict]): A List of image info dict where each dict\n has: 'img_shape', 'scale_factor', 'flip', and may also contain\n 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'.\n For details on the values of these keys see\n :class:`mmdet.datasets.pipelines.Collect`.\n gt_bboxes (list[Tensor]): Each item are the truth boxes for each\n image in [tl_x, tl_y, br_x, br_y] format.\n gt_bboxes_ignore (None | list[Tensor]): Specify which bounding\n boxes can be ignored when computing the loss.\n\n Returns:\n dict[str, Tensor]: A dictionary of loss components.\n \"\"\"\n if (isinstance(self.train_cfg.rpn, dict)\n and self.train_cfg.rpn.get('debug', False)):\n self.rpn_head.debug_imgs = tensor2imgs(img)\n\n x = self.extract_feat(img)\n losses = self.rpn_head.forward_train(x, img_metas, gt_bboxes, None,\n gt_bboxes_ignore)\n return losses\n\n def simple_test(self, img, img_metas, rescale=False):\n \"\"\"Test function without test time augmentation.\n\n Args:\n imgs (list[torch.Tensor]): List of multiple images\n img_metas (list[dict]): List of image information.\n rescale (bool, optional): Whether to rescale the results.\n Defaults to False.\n\n Returns:\n list[np.ndarray]: proposals\n \"\"\"\n x = self.extract_feat(img)\n # get origin input shape to onnx dynamic input shape\n if torch.onnx.is_in_onnx_export():\n img_shape = torch._shape_as_tensor(img)[2:]\n img_metas[0]['img_shape_for_onnx'] = img_shape\n proposal_list = self.rpn_head.simple_test_rpn(x, img_metas)\n if rescale:\n for proposals, meta in zip(proposal_list, img_metas):\n proposals[:, :4] /= proposals.new_tensor(meta['scale_factor'])\n if torch.onnx.is_in_onnx_export():\n return proposal_list\n\n return [proposal.cpu().numpy() for proposal in proposal_list]\n\n def aug_test(self, imgs, img_metas, rescale=False):\n \"\"\"Test function with test time augmentation.\n\n Args:\n imgs (list[torch.Tensor]): List of multiple images\n img_metas (list[dict]): List of image information.\n rescale (bool, optional): Whether to rescale the results.\n Defaults to False.\n\n Returns:\n list[np.ndarray]: proposals\n \"\"\"\n proposal_list = self.rpn_head.aug_test_rpn(\n self.extract_feats(imgs), img_metas)\n if not rescale:\n for proposals, img_meta in zip(proposal_list, img_metas[0]):\n img_shape = img_meta['img_shape']\n scale_factor = img_meta['scale_factor']\n flip = img_meta['flip']\n flip_direction = img_meta['flip_direction']\n proposals[:, :4] = bbox_mapping(proposals[:, :4], img_shape,\n scale_factor, flip,\n flip_direction)\n return [proposal.cpu().numpy() for proposal in proposal_list]\n\n def show_result(self, data, result, top_k=20, **kwargs):\n \"\"\"Show RPN proposals on the image.\n\n Args:\n data (str or np.ndarray): Image filename or loaded image.\n result (Tensor or tuple): The results to draw over `img`\n bbox_result or (bbox_result, segm_result).\n top_k (int): Plot the first k bboxes only\n if set positive. Default: 20\n\n Returns:\n np.ndarray: The image with bboxes drawn on it.\n \"\"\"\n mmcv.imshow_bboxes(data, result, top_k=top_k)\n", "path": "mmdet/models/detectors/rpn.py"}], "after_files": [{"content": "# Copyright (c) OpenMMLab. All rights reserved.\nimport warnings\n\nimport mmcv\nimport torch\nfrom mmcv.image import tensor2imgs\n\nfrom mmdet.core import bbox_mapping\nfrom ..builder import DETECTORS, build_backbone, build_head, build_neck\nfrom .base import BaseDetector\n\n\[email protected]_module()\nclass RPN(BaseDetector):\n \"\"\"Implementation of Region Proposal Network.\"\"\"\n\n def __init__(self,\n backbone,\n neck,\n rpn_head,\n train_cfg,\n test_cfg,\n pretrained=None,\n init_cfg=None):\n super(RPN, self).__init__(init_cfg)\n if pretrained:\n warnings.warn('DeprecationWarning: pretrained is deprecated, '\n 'please use \"init_cfg\" instead')\n backbone.pretrained = pretrained\n self.backbone = build_backbone(backbone)\n self.neck = build_neck(neck) if neck is not None else None\n rpn_train_cfg = train_cfg.rpn if train_cfg is not None else None\n rpn_head.update(train_cfg=rpn_train_cfg)\n rpn_head.update(test_cfg=test_cfg.rpn)\n self.rpn_head = build_head(rpn_head)\n self.train_cfg = train_cfg\n self.test_cfg = test_cfg\n\n def extract_feat(self, img):\n \"\"\"Extract features.\n\n Args:\n img (torch.Tensor): Image tensor with shape (n, c, h ,w).\n\n Returns:\n list[torch.Tensor]: Multi-level features that may have\n different resolutions.\n \"\"\"\n x = self.backbone(img)\n if self.with_neck:\n x = self.neck(x)\n return x\n\n def forward_dummy(self, img):\n \"\"\"Dummy forward function.\"\"\"\n x = self.extract_feat(img)\n rpn_outs = self.rpn_head(x)\n return rpn_outs\n\n def forward_train(self,\n img,\n img_metas,\n gt_bboxes=None,\n gt_bboxes_ignore=None):\n \"\"\"\n Args:\n img (Tensor): Input images of shape (N, C, H, W).\n Typically these should be mean centered and std scaled.\n img_metas (list[dict]): A List of image info dict where each dict\n has: 'img_shape', 'scale_factor', 'flip', and may also contain\n 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'.\n For details on the values of these keys see\n :class:`mmdet.datasets.pipelines.Collect`.\n gt_bboxes (list[Tensor]): Each item are the truth boxes for each\n image in [tl_x, tl_y, br_x, br_y] format.\n gt_bboxes_ignore (None | list[Tensor]): Specify which bounding\n boxes can be ignored when computing the loss.\n\n Returns:\n dict[str, Tensor]: A dictionary of loss components.\n \"\"\"\n if (isinstance(self.train_cfg.rpn, dict)\n and self.train_cfg.rpn.get('debug', False)):\n self.rpn_head.debug_imgs = tensor2imgs(img)\n\n x = self.extract_feat(img)\n losses = self.rpn_head.forward_train(x, img_metas, gt_bboxes, None,\n gt_bboxes_ignore)\n return losses\n\n def simple_test(self, img, img_metas, rescale=False):\n \"\"\"Test function without test time augmentation.\n\n Args:\n imgs (list[torch.Tensor]): List of multiple images\n img_metas (list[dict]): List of image information.\n rescale (bool, optional): Whether to rescale the results.\n Defaults to False.\n\n Returns:\n list[np.ndarray]: proposals\n \"\"\"\n x = self.extract_feat(img)\n # get origin input shape to onnx dynamic input shape\n if torch.onnx.is_in_onnx_export():\n img_shape = torch._shape_as_tensor(img)[2:]\n img_metas[0]['img_shape_for_onnx'] = img_shape\n proposal_list = self.rpn_head.simple_test_rpn(x, img_metas)\n if rescale:\n for proposals, meta in zip(proposal_list, img_metas):\n proposals[:, :4] /= proposals.new_tensor(meta['scale_factor'])\n if torch.onnx.is_in_onnx_export():\n return proposal_list\n\n return [proposal.cpu().numpy() for proposal in proposal_list]\n\n def aug_test(self, imgs, img_metas, rescale=False):\n \"\"\"Test function with test time augmentation.\n\n Args:\n imgs (list[torch.Tensor]): List of multiple images\n img_metas (list[dict]): List of image information.\n rescale (bool, optional): Whether to rescale the results.\n Defaults to False.\n\n Returns:\n list[np.ndarray]: proposals\n \"\"\"\n proposal_list = self.rpn_head.aug_test_rpn(\n self.extract_feats(imgs), img_metas)\n if not rescale:\n for proposals, img_meta in zip(proposal_list, img_metas[0]):\n img_shape = img_meta['img_shape']\n scale_factor = img_meta['scale_factor']\n flip = img_meta['flip']\n flip_direction = img_meta['flip_direction']\n proposals[:, :4] = bbox_mapping(proposals[:, :4], img_shape,\n scale_factor, flip,\n flip_direction)\n return [proposal.cpu().numpy() for proposal in proposal_list]\n\n def show_result(self, data, result, top_k=20, **kwargs):\n \"\"\"Show RPN proposals on the image.\n\n Args:\n data (str or np.ndarray): Image filename or loaded image.\n result (Tensor or tuple): The results to draw over `img`\n bbox_result or (bbox_result, segm_result).\n top_k (int): Plot the first k bboxes only\n if set positive. Default: 20\n\n Returns:\n np.ndarray: The image with bboxes drawn on it.\n \"\"\"\n mmcv.imshow_bboxes(data, result, top_k=top_k, **kwargs)\n", "path": "mmdet/models/detectors/rpn.py"}]}
| 2,039 | 119 |
gh_patches_debug_5207
|
rasdani/github-patches
|
git_diff
|
pytorch__ignite-3219
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add python 3.12 to CI
## 🚀 Feature
Add python 3.12 to CI: https://github.com/pytorch/ignite/blob/master/.github/workflows/unit-tests.yml
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/mnist/mnist.py`
Content:
```
1 from argparse import ArgumentParser
2
3 import torch
4 import torch.nn.functional as F
5 from torch import nn
6 from torch.optim import SGD
7 from torch.utils.data import DataLoader
8 from torchvision.datasets import MNIST
9 from torchvision.transforms import Compose, Normalize, ToTensor
10 from tqdm import tqdm
11
12 from ignite.engine import create_supervised_evaluator, create_supervised_trainer, Events
13 from ignite.metrics import Accuracy, Loss
14 from ignite.utils import setup_logger
15
16
17 class Net(nn.Module):
18 def __init__(self):
19 super(Net, self).__init__()
20 self.conv1 = nn.Conv2d(1, 10, kernel_size=5)
21 self.conv2 = nn.Conv2d(10, 20, kernel_size=5)
22 self.conv2_drop = nn.Dropout2d()
23 self.fc1 = nn.Linear(320, 50)
24 self.fc2 = nn.Linear(50, 10)
25
26 def forward(self, x):
27 x = F.relu(F.max_pool2d(self.conv1(x), 2))
28 x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))
29 x = x.view(-1, 320)
30 x = F.relu(self.fc1(x))
31 x = F.dropout(x, training=self.training)
32 x = self.fc2(x)
33 return F.log_softmax(x, dim=-1)
34
35
36 def get_data_loaders(train_batch_size, val_batch_size):
37 data_transform = Compose([ToTensor(), Normalize((0.1307,), (0.3081,))])
38
39 train_loader = DataLoader(
40 MNIST(download=True, root=".", transform=data_transform, train=True), batch_size=train_batch_size, shuffle=True
41 )
42
43 val_loader = DataLoader(
44 MNIST(download=False, root=".", transform=data_transform, train=False), batch_size=val_batch_size, shuffle=False
45 )
46 return train_loader, val_loader
47
48
49 def run(train_batch_size, val_batch_size, epochs, lr, momentum, log_interval):
50 train_loader, val_loader = get_data_loaders(train_batch_size, val_batch_size)
51 model = Net()
52 device = "cpu"
53
54 if torch.cuda.is_available():
55 device = "cuda"
56
57 model.to(device) # Move model before creating optimizer
58 optimizer = SGD(model.parameters(), lr=lr, momentum=momentum)
59 criterion = nn.NLLLoss()
60 trainer = create_supervised_trainer(model, optimizer, criterion, device=device)
61 trainer.logger = setup_logger("trainer")
62
63 val_metrics = {"accuracy": Accuracy(), "nll": Loss(criterion)}
64 evaluator = create_supervised_evaluator(model, metrics=val_metrics, device=device)
65 evaluator.logger = setup_logger("evaluator")
66
67 pbar = tqdm(initial=0, leave=False, total=len(train_loader), desc=f"ITERATION - loss: {0:.2f}")
68
69 @trainer.on(Events.ITERATION_COMPLETED(every=log_interval))
70 def log_training_loss(engine):
71 pbar.desc = f"ITERATION - loss: {engine.state.output:.2f}"
72 pbar.update(log_interval)
73
74 @trainer.on(Events.EPOCH_COMPLETED)
75 def log_training_results(engine):
76 pbar.refresh()
77 evaluator.run(train_loader)
78 metrics = evaluator.state.metrics
79 avg_accuracy = metrics["accuracy"]
80 avg_nll = metrics["nll"]
81 tqdm.write(
82 f"Training Results - Epoch: {engine.state.epoch} Avg accuracy: {avg_accuracy:.2f} Avg loss: {avg_nll:.2f}"
83 )
84
85 @trainer.on(Events.EPOCH_COMPLETED)
86 def log_validation_results(engine):
87 evaluator.run(val_loader)
88 metrics = evaluator.state.metrics
89 avg_accuracy = metrics["accuracy"]
90 avg_nll = metrics["nll"]
91 tqdm.write(
92 f"Validation Results - Epoch: {engine.state.epoch} Avg accuracy: {avg_accuracy:.2f} Avg loss: {avg_nll:.2f}"
93 )
94
95 pbar.n = pbar.last_print_n = 0
96
97 @trainer.on(Events.EPOCH_COMPLETED | Events.COMPLETED)
98 def log_time(engine):
99 tqdm.write(f"{trainer.last_event_name.name} took { trainer.state.times[trainer.last_event_name.name]} seconds")
100
101 trainer.run(train_loader, max_epochs=epochs)
102 pbar.close()
103
104
105 if __name__ == "__main__":
106 parser = ArgumentParser()
107 parser.add_argument("--batch_size", type=int, default=64, help="input batch size for training (default: 64)")
108 parser.add_argument(
109 "--val_batch_size", type=int, default=1000, help="input batch size for validation (default: 1000)"
110 )
111 parser.add_argument("--epochs", type=int, default=10, help="number of epochs to train (default: 10)")
112 parser.add_argument("--lr", type=float, default=0.01, help="learning rate (default: 0.01)")
113 parser.add_argument("--momentum", type=float, default=0.5, help="SGD momentum (default: 0.5)")
114 parser.add_argument(
115 "--log_interval", type=int, default=10, help="how many batches to wait before logging training status"
116 )
117
118 args = parser.parse_args()
119
120 run(args.batch_size, args.val_batch_size, args.epochs, args.lr, args.momentum, args.log_interval)
121
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/examples/mnist/mnist.py b/examples/mnist/mnist.py
--- a/examples/mnist/mnist.py
+++ b/examples/mnist/mnist.py
@@ -96,7 +96,7 @@
@trainer.on(Events.EPOCH_COMPLETED | Events.COMPLETED)
def log_time(engine):
- tqdm.write(f"{trainer.last_event_name.name} took { trainer.state.times[trainer.last_event_name.name]} seconds")
+ tqdm.write(f"{trainer.last_event_name.name} took {trainer.state.times[trainer.last_event_name.name]} seconds")
trainer.run(train_loader, max_epochs=epochs)
pbar.close()
|
{"golden_diff": "diff --git a/examples/mnist/mnist.py b/examples/mnist/mnist.py\n--- a/examples/mnist/mnist.py\n+++ b/examples/mnist/mnist.py\n@@ -96,7 +96,7 @@\n \n @trainer.on(Events.EPOCH_COMPLETED | Events.COMPLETED)\n def log_time(engine):\n- tqdm.write(f\"{trainer.last_event_name.name} took { trainer.state.times[trainer.last_event_name.name]} seconds\")\n+ tqdm.write(f\"{trainer.last_event_name.name} took {trainer.state.times[trainer.last_event_name.name]} seconds\")\n \n trainer.run(train_loader, max_epochs=epochs)\n pbar.close()\n", "issue": "Add python 3.12 to CI\n## \ud83d\ude80 Feature\r\n\r\nAdd python 3.12 to CI: https://github.com/pytorch/ignite/blob/master/.github/workflows/unit-tests.yml\r\n\n", "before_files": [{"content": "from argparse import ArgumentParser\n\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\nfrom torch.optim import SGD\nfrom torch.utils.data import DataLoader\nfrom torchvision.datasets import MNIST\nfrom torchvision.transforms import Compose, Normalize, ToTensor\nfrom tqdm import tqdm\n\nfrom ignite.engine import create_supervised_evaluator, create_supervised_trainer, Events\nfrom ignite.metrics import Accuracy, Loss\nfrom ignite.utils import setup_logger\n\n\nclass Net(nn.Module):\n def __init__(self):\n super(Net, self).__init__()\n self.conv1 = nn.Conv2d(1, 10, kernel_size=5)\n self.conv2 = nn.Conv2d(10, 20, kernel_size=5)\n self.conv2_drop = nn.Dropout2d()\n self.fc1 = nn.Linear(320, 50)\n self.fc2 = nn.Linear(50, 10)\n\n def forward(self, x):\n x = F.relu(F.max_pool2d(self.conv1(x), 2))\n x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))\n x = x.view(-1, 320)\n x = F.relu(self.fc1(x))\n x = F.dropout(x, training=self.training)\n x = self.fc2(x)\n return F.log_softmax(x, dim=-1)\n\n\ndef get_data_loaders(train_batch_size, val_batch_size):\n data_transform = Compose([ToTensor(), Normalize((0.1307,), (0.3081,))])\n\n train_loader = DataLoader(\n MNIST(download=True, root=\".\", transform=data_transform, train=True), batch_size=train_batch_size, shuffle=True\n )\n\n val_loader = DataLoader(\n MNIST(download=False, root=\".\", transform=data_transform, train=False), batch_size=val_batch_size, shuffle=False\n )\n return train_loader, val_loader\n\n\ndef run(train_batch_size, val_batch_size, epochs, lr, momentum, log_interval):\n train_loader, val_loader = get_data_loaders(train_batch_size, val_batch_size)\n model = Net()\n device = \"cpu\"\n\n if torch.cuda.is_available():\n device = \"cuda\"\n\n model.to(device) # Move model before creating optimizer\n optimizer = SGD(model.parameters(), lr=lr, momentum=momentum)\n criterion = nn.NLLLoss()\n trainer = create_supervised_trainer(model, optimizer, criterion, device=device)\n trainer.logger = setup_logger(\"trainer\")\n\n val_metrics = {\"accuracy\": Accuracy(), \"nll\": Loss(criterion)}\n evaluator = create_supervised_evaluator(model, metrics=val_metrics, device=device)\n evaluator.logger = setup_logger(\"evaluator\")\n\n pbar = tqdm(initial=0, leave=False, total=len(train_loader), desc=f\"ITERATION - loss: {0:.2f}\")\n\n @trainer.on(Events.ITERATION_COMPLETED(every=log_interval))\n def log_training_loss(engine):\n pbar.desc = f\"ITERATION - loss: {engine.state.output:.2f}\"\n pbar.update(log_interval)\n\n @trainer.on(Events.EPOCH_COMPLETED)\n def log_training_results(engine):\n pbar.refresh()\n evaluator.run(train_loader)\n metrics = evaluator.state.metrics\n avg_accuracy = metrics[\"accuracy\"]\n avg_nll = metrics[\"nll\"]\n tqdm.write(\n f\"Training Results - Epoch: {engine.state.epoch} Avg accuracy: {avg_accuracy:.2f} Avg loss: {avg_nll:.2f}\"\n )\n\n @trainer.on(Events.EPOCH_COMPLETED)\n def log_validation_results(engine):\n evaluator.run(val_loader)\n metrics = evaluator.state.metrics\n avg_accuracy = metrics[\"accuracy\"]\n avg_nll = metrics[\"nll\"]\n tqdm.write(\n f\"Validation Results - Epoch: {engine.state.epoch} Avg accuracy: {avg_accuracy:.2f} Avg loss: {avg_nll:.2f}\"\n )\n\n pbar.n = pbar.last_print_n = 0\n\n @trainer.on(Events.EPOCH_COMPLETED | Events.COMPLETED)\n def log_time(engine):\n tqdm.write(f\"{trainer.last_event_name.name} took { trainer.state.times[trainer.last_event_name.name]} seconds\")\n\n trainer.run(train_loader, max_epochs=epochs)\n pbar.close()\n\n\nif __name__ == \"__main__\":\n parser = ArgumentParser()\n parser.add_argument(\"--batch_size\", type=int, default=64, help=\"input batch size for training (default: 64)\")\n parser.add_argument(\n \"--val_batch_size\", type=int, default=1000, help=\"input batch size for validation (default: 1000)\"\n )\n parser.add_argument(\"--epochs\", type=int, default=10, help=\"number of epochs to train (default: 10)\")\n parser.add_argument(\"--lr\", type=float, default=0.01, help=\"learning rate (default: 0.01)\")\n parser.add_argument(\"--momentum\", type=float, default=0.5, help=\"SGD momentum (default: 0.5)\")\n parser.add_argument(\n \"--log_interval\", type=int, default=10, help=\"how many batches to wait before logging training status\"\n )\n\n args = parser.parse_args()\n\n run(args.batch_size, args.val_batch_size, args.epochs, args.lr, args.momentum, args.log_interval)\n", "path": "examples/mnist/mnist.py"}], "after_files": [{"content": "from argparse import ArgumentParser\n\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\nfrom torch.optim import SGD\nfrom torch.utils.data import DataLoader\nfrom torchvision.datasets import MNIST\nfrom torchvision.transforms import Compose, Normalize, ToTensor\nfrom tqdm import tqdm\n\nfrom ignite.engine import create_supervised_evaluator, create_supervised_trainer, Events\nfrom ignite.metrics import Accuracy, Loss\nfrom ignite.utils import setup_logger\n\n\nclass Net(nn.Module):\n def __init__(self):\n super(Net, self).__init__()\n self.conv1 = nn.Conv2d(1, 10, kernel_size=5)\n self.conv2 = nn.Conv2d(10, 20, kernel_size=5)\n self.conv2_drop = nn.Dropout2d()\n self.fc1 = nn.Linear(320, 50)\n self.fc2 = nn.Linear(50, 10)\n\n def forward(self, x):\n x = F.relu(F.max_pool2d(self.conv1(x), 2))\n x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))\n x = x.view(-1, 320)\n x = F.relu(self.fc1(x))\n x = F.dropout(x, training=self.training)\n x = self.fc2(x)\n return F.log_softmax(x, dim=-1)\n\n\ndef get_data_loaders(train_batch_size, val_batch_size):\n data_transform = Compose([ToTensor(), Normalize((0.1307,), (0.3081,))])\n\n train_loader = DataLoader(\n MNIST(download=True, root=\".\", transform=data_transform, train=True), batch_size=train_batch_size, shuffle=True\n )\n\n val_loader = DataLoader(\n MNIST(download=False, root=\".\", transform=data_transform, train=False), batch_size=val_batch_size, shuffle=False\n )\n return train_loader, val_loader\n\n\ndef run(train_batch_size, val_batch_size, epochs, lr, momentum, log_interval):\n train_loader, val_loader = get_data_loaders(train_batch_size, val_batch_size)\n model = Net()\n device = \"cpu\"\n\n if torch.cuda.is_available():\n device = \"cuda\"\n\n model.to(device) # Move model before creating optimizer\n optimizer = SGD(model.parameters(), lr=lr, momentum=momentum)\n criterion = nn.NLLLoss()\n trainer = create_supervised_trainer(model, optimizer, criterion, device=device)\n trainer.logger = setup_logger(\"trainer\")\n\n val_metrics = {\"accuracy\": Accuracy(), \"nll\": Loss(criterion)}\n evaluator = create_supervised_evaluator(model, metrics=val_metrics, device=device)\n evaluator.logger = setup_logger(\"evaluator\")\n\n pbar = tqdm(initial=0, leave=False, total=len(train_loader), desc=f\"ITERATION - loss: {0:.2f}\")\n\n @trainer.on(Events.ITERATION_COMPLETED(every=log_interval))\n def log_training_loss(engine):\n pbar.desc = f\"ITERATION - loss: {engine.state.output:.2f}\"\n pbar.update(log_interval)\n\n @trainer.on(Events.EPOCH_COMPLETED)\n def log_training_results(engine):\n pbar.refresh()\n evaluator.run(train_loader)\n metrics = evaluator.state.metrics\n avg_accuracy = metrics[\"accuracy\"]\n avg_nll = metrics[\"nll\"]\n tqdm.write(\n f\"Training Results - Epoch: {engine.state.epoch} Avg accuracy: {avg_accuracy:.2f} Avg loss: {avg_nll:.2f}\"\n )\n\n @trainer.on(Events.EPOCH_COMPLETED)\n def log_validation_results(engine):\n evaluator.run(val_loader)\n metrics = evaluator.state.metrics\n avg_accuracy = metrics[\"accuracy\"]\n avg_nll = metrics[\"nll\"]\n tqdm.write(\n f\"Validation Results - Epoch: {engine.state.epoch} Avg accuracy: {avg_accuracy:.2f} Avg loss: {avg_nll:.2f}\"\n )\n\n pbar.n = pbar.last_print_n = 0\n\n @trainer.on(Events.EPOCH_COMPLETED | Events.COMPLETED)\n def log_time(engine):\n tqdm.write(f\"{trainer.last_event_name.name} took {trainer.state.times[trainer.last_event_name.name]} seconds\")\n\n trainer.run(train_loader, max_epochs=epochs)\n pbar.close()\n\n\nif __name__ == \"__main__\":\n parser = ArgumentParser()\n parser.add_argument(\"--batch_size\", type=int, default=64, help=\"input batch size for training (default: 64)\")\n parser.add_argument(\n \"--val_batch_size\", type=int, default=1000, help=\"input batch size for validation (default: 1000)\"\n )\n parser.add_argument(\"--epochs\", type=int, default=10, help=\"number of epochs to train (default: 10)\")\n parser.add_argument(\"--lr\", type=float, default=0.01, help=\"learning rate (default: 0.01)\")\n parser.add_argument(\"--momentum\", type=float, default=0.5, help=\"SGD momentum (default: 0.5)\")\n parser.add_argument(\n \"--log_interval\", type=int, default=10, help=\"how many batches to wait before logging training status\"\n )\n\n args = parser.parse_args()\n\n run(args.batch_size, args.val_batch_size, args.epochs, args.lr, args.momentum, args.log_interval)\n", "path": "examples/mnist/mnist.py"}]}
| 1,739 | 138 |
gh_patches_debug_3288
|
rasdani/github-patches
|
git_diff
|
mozilla__bugbug-1740
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
AttributeError: 'IndexError' object has no attribute 'tb_frame' in Pulse Listener
```
Jul 29 09:37:20 bugbug app/web.1: Traceback (most recent call last):
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/bugbug_http/listener.py", line 69, in _on_message
Jul 29 09:37:20 bugbug app/web.1: user = body["payload"]["data"]["pushlog_pushes"][0]["user"]
Jul 29 09:37:20 bugbug app/web.1: IndexError: list index out of range
Jul 29 09:37:20 bugbug app/web.1: During handling of the above exception, another exception occurred:
Jul 29 09:37:20 bugbug app/web.1: Traceback (most recent call last):
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/bin/bugbug-http-pulse-listener", line 8, in <module>
Jul 29 09:37:20 bugbug app/web.1: sys.exit(main())
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/bugbug_http/listener.py", line 94, in main
Jul 29 09:37:20 bugbug app/web.1: consumer.run()
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/kombu/mixins.py", line 175, in run
Jul 29 09:37:20 bugbug app/web.1: for _ in self.consume(limit=None, **kwargs):
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/kombu/mixins.py", line 197, in consume
Jul 29 09:37:20 bugbug app/web.1: conn.drain_events(timeout=safety_interval)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/kombu/connection.py", line 324, in drain_events
Jul 29 09:37:20 bugbug app/web.1: return self.transport.drain_events(self.connection, **kwargs)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/kombu/transport/pyamqp.py", line 103, in drain_events
Jul 29 09:37:20 bugbug app/web.1: return connection.drain_events(**kwargs)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/amqp/connection.py", line 508, in drain_events
Jul 29 09:37:20 bugbug app/web.1: while not self.blocking_read(timeout):
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/amqp/connection.py", line 514, in blocking_read
Jul 29 09:37:20 bugbug app/web.1: return self.on_inbound_frame(frame)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/amqp/method_framing.py", line 79, in on_frame
Jul 29 09:37:20 bugbug app/web.1: callback(channel, msg.frame_method, msg.frame_args, msg)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/amqp/connection.py", line 520, in on_inbound_method
Jul 29 09:37:20 bugbug app/web.1: return self.channels[channel_id].dispatch_method(
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/amqp/abstract_channel.py", line 145, in dispatch_method
Jul 29 09:37:20 bugbug app/web.1: listener(*args)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/amqp/channel.py", line 1615, in _on_basic_deliver
Jul 29 09:37:20 bugbug app/web.1: fun(msg)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/kombu/messaging.py", line 624, in _receive_callback
Jul 29 09:37:20 bugbug app/web.1: return on_m(message) if on_m else self.receive(decoded, message)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/kombu/messaging.py", line 590, in receive
Jul 29 09:37:20 bugbug app/web.1: [callback(body, message) for callback in callbacks]
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/kombu/messaging.py", line 590, in <listcomp>
Jul 29 09:37:20 bugbug app/web.1: [callback(body, message) for callback in callbacks]
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/bugbug_http/listener.py", line 82, in _on_message
Jul 29 09:37:20 bugbug app/web.1: traceback.print_tb(e)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/traceback.py", line 53, in print_tb
Jul 29 09:37:20 bugbug app/web.1: print_list(extract_tb(tb, limit=limit), file=file)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/traceback.py", line 72, in extract_tb
Jul 29 09:37:20 bugbug app/web.1: return StackSummary.extract(walk_tb(tb), limit=limit)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/traceback.py", line 347, in extract
Jul 29 09:37:20 bugbug app/web.1: for f, lineno in frame_gen:
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/traceback.py", line 312, in walk_tb
Jul 29 09:37:20 bugbug app/web.1: yield tb.tb_frame, tb.tb_lineno
Jul 29 09:37:20 bugbug app/web.1: AttributeError: 'IndexError' object has no attribute 'tb_frame'
```
The pulse listener is killed and no longer listens for pushes.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `http_service/bugbug_http/listener.py`
Content:
```
1 #!/usr/bin/env python3
2 # -*- coding: utf-8 -*-
3 """
4 Generate your credentials in https://pulseguardian.mozilla.org
5
6 Call this script like:
7 export PULSE_USER=generated_username
8 export PULSE_PASSWORD=generated_username
9 # In case you want to hit the live server
10 export BUGBUG_HTTP_SERVER=https://bugbug.herokuapp.com
11 cd http_service && docker-compose run bugbug-http-service
12 """
13 import logging
14 import os
15 import traceback
16
17 import requests
18 from kombu import Connection, Exchange, Queue
19 from kombu.mixins import ConsumerMixin
20
21 from bugbug_http.sentry import setup_sentry
22
23 logging.basicConfig()
24 logger = logging.getLogger()
25 logger.setLevel(logging.INFO)
26
27 PORT = os.environ.get("PORT", 8000)
28 BUGBUG_HTTP_SERVER = os.environ.get("BUGBUG_HTTP_SERVER", f"http://localhost:{PORT}")
29 CONNECTION_URL = "amqp://{}:{}@pulse.mozilla.org:5671/?ssl=1"
30
31 if os.environ.get("SENTRY_DSN"):
32 setup_sentry(dsn=os.environ.get("SENTRY_DSN"))
33
34
35 class _GenericConsumer(ConsumerMixin):
36 def __init__(self, connection, queues, callback):
37 self.connection = connection
38 self.queues = queues
39 self.callback = callback
40
41 def get_consumers(self, Consumer, channel):
42 return [Consumer(queues=self.queues, callbacks=[self.callback])]
43
44
45 class HgPushesConsumer:
46 def __init__(self, user, password, callback):
47 self.connection = Connection(CONNECTION_URL.format(user, password))
48 self.queues = [
49 Queue(
50 name="queue/{}/pushes".format(user),
51 exchange=Exchange(
52 "exchange/hgpushes/v2", type="topic", no_declare=True,
53 ),
54 routing_key="#",
55 durable=True,
56 auto_delete=True,
57 )
58 ]
59 self.consumer = _GenericConsumer(self.connection, self.queues, callback)
60
61 def __enter__(self):
62 return self.consumer
63
64 def __exit__(self, type, value, traceback):
65 self.connection.close()
66
67
68 def _on_message(body, message):
69 try:
70 branch = body["payload"]["data"]["repo_url"].split("/")[-1]
71 rev = body["payload"]["data"]["heads"][0]
72
73 if branch in ["autoland", "try"]:
74 user = body["payload"]["data"]["pushlog_pushes"][0]["user"]
75 if user in ("reviewbot", "[email protected]"):
76 return
77
78 url = "{}/push/{}/{}/schedules".format(BUGBUG_HTTP_SERVER, branch, rev)
79 response = requests.get(url, headers={"X-Api-Key": "pulse_listener"})
80 if response.status_code == 202:
81 logger.info("Successfully requested {}/{}".format(branch, rev))
82 else:
83 logger.warning(
84 "We got status: {} for: {}".format(response.status_code, url)
85 )
86 except Exception as e:
87 traceback.print_tb(e)
88 finally:
89 message.ack()
90
91
92 def main():
93 # Generate user/password in https://pulseguardian.mozilla.org/
94 # Set PULSE_USER and PULSE_PASSWORD as env variables
95 user = os.environ.get("PULSE_USER")
96 password = os.environ.get("PULSE_PASSWORD")
97 if user and password:
98 with HgPushesConsumer(user, password, _on_message) as consumer:
99 consumer.run()
100 else:
101 logger.warning(
102 "The Pulse listener will be skipped unless you define PULSE_USER & PULSE_PASSWORD"
103 )
104
105
106 if __name__ == "__main__":
107 main()
108
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/http_service/bugbug_http/listener.py b/http_service/bugbug_http/listener.py
--- a/http_service/bugbug_http/listener.py
+++ b/http_service/bugbug_http/listener.py
@@ -83,8 +83,8 @@
logger.warning(
"We got status: {} for: {}".format(response.status_code, url)
)
- except Exception as e:
- traceback.print_tb(e)
+ except Exception:
+ traceback.print_exc()
finally:
message.ack()
|
{"golden_diff": "diff --git a/http_service/bugbug_http/listener.py b/http_service/bugbug_http/listener.py\n--- a/http_service/bugbug_http/listener.py\n+++ b/http_service/bugbug_http/listener.py\n@@ -83,8 +83,8 @@\n logger.warning(\n \"We got status: {} for: {}\".format(response.status_code, url)\n )\n- except Exception as e:\n- traceback.print_tb(e)\n+ except Exception:\n+ traceback.print_exc()\n finally:\n message.ack()\n", "issue": "AttributeError: 'IndexError' object has no attribute 'tb_frame' in Pulse Listener\n```\r\nJul 29 09:37:20 bugbug app/web.1: Traceback (most recent call last): \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/site-packages/bugbug_http/listener.py\", line 69, in _on_message \r\nJul 29 09:37:20 bugbug app/web.1: user = body[\"payload\"][\"data\"][\"pushlog_pushes\"][0][\"user\"] \r\nJul 29 09:37:20 bugbug app/web.1: IndexError: list index out of range \r\nJul 29 09:37:20 bugbug app/web.1: During handling of the above exception, another exception occurred: \r\nJul 29 09:37:20 bugbug app/web.1: Traceback (most recent call last): \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/bin/bugbug-http-pulse-listener\", line 8, in <module> \r\nJul 29 09:37:20 bugbug app/web.1: sys.exit(main()) \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/site-packages/bugbug_http/listener.py\", line 94, in main \r\nJul 29 09:37:20 bugbug app/web.1: consumer.run() \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/site-packages/kombu/mixins.py\", line 175, in run \r\nJul 29 09:37:20 bugbug app/web.1: for _ in self.consume(limit=None, **kwargs): \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/site-packages/kombu/mixins.py\", line 197, in consume \r\nJul 29 09:37:20 bugbug app/web.1: conn.drain_events(timeout=safety_interval) \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/site-packages/kombu/connection.py\", line 324, in drain_events \r\nJul 29 09:37:20 bugbug app/web.1: return self.transport.drain_events(self.connection, **kwargs) \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/site-packages/kombu/transport/pyamqp.py\", line 103, in drain_events \r\nJul 29 09:37:20 bugbug app/web.1: return connection.drain_events(**kwargs) \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/site-packages/amqp/connection.py\", line 508, in drain_events \r\nJul 29 09:37:20 bugbug app/web.1: while not self.blocking_read(timeout): \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/site-packages/amqp/connection.py\", line 514, in blocking_read \r\nJul 29 09:37:20 bugbug app/web.1: return self.on_inbound_frame(frame) \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/site-packages/amqp/method_framing.py\", line 79, in on_frame \r\nJul 29 09:37:20 bugbug app/web.1: callback(channel, msg.frame_method, msg.frame_args, msg) \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/site-packages/amqp/connection.py\", line 520, in on_inbound_method \r\nJul 29 09:37:20 bugbug app/web.1: return self.channels[channel_id].dispatch_method( \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/site-packages/amqp/abstract_channel.py\", line 145, in dispatch_method \r\nJul 29 09:37:20 bugbug app/web.1: listener(*args) \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/site-packages/amqp/channel.py\", line 1615, in _on_basic_deliver \r\nJul 29 09:37:20 bugbug app/web.1: fun(msg) \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/site-packages/kombu/messaging.py\", line 624, in _receive_callback \r\nJul 29 09:37:20 bugbug app/web.1: return on_m(message) if on_m else self.receive(decoded, message) \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/site-packages/kombu/messaging.py\", line 590, in receive \r\nJul 29 09:37:20 bugbug app/web.1: [callback(body, message) for callback in callbacks] \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/site-packages/kombu/messaging.py\", line 590, in <listcomp> \r\nJul 29 09:37:20 bugbug app/web.1: [callback(body, message) for callback in callbacks] \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/site-packages/bugbug_http/listener.py\", line 82, in _on_message \r\nJul 29 09:37:20 bugbug app/web.1: traceback.print_tb(e) \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/traceback.py\", line 53, in print_tb \r\nJul 29 09:37:20 bugbug app/web.1: print_list(extract_tb(tb, limit=limit), file=file) \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/traceback.py\", line 72, in extract_tb \r\nJul 29 09:37:20 bugbug app/web.1: return StackSummary.extract(walk_tb(tb), limit=limit) \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/traceback.py\", line 347, in extract \r\nJul 29 09:37:20 bugbug app/web.1: for f, lineno in frame_gen: \r\nJul 29 09:37:20 bugbug app/web.1: File \"/usr/local/lib/python3.8/traceback.py\", line 312, in walk_tb \r\nJul 29 09:37:20 bugbug app/web.1: yield tb.tb_frame, tb.tb_lineno \r\nJul 29 09:37:20 bugbug app/web.1: AttributeError: 'IndexError' object has no attribute 'tb_frame' \r\n```\r\n\r\nThe pulse listener is killed and no longer listens for pushes.\n", "before_files": [{"content": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nGenerate your credentials in https://pulseguardian.mozilla.org\n\nCall this script like:\n export PULSE_USER=generated_username\n export PULSE_PASSWORD=generated_username\n # In case you want to hit the live server\n export BUGBUG_HTTP_SERVER=https://bugbug.herokuapp.com\n cd http_service && docker-compose run bugbug-http-service\n\"\"\"\nimport logging\nimport os\nimport traceback\n\nimport requests\nfrom kombu import Connection, Exchange, Queue\nfrom kombu.mixins import ConsumerMixin\n\nfrom bugbug_http.sentry import setup_sentry\n\nlogging.basicConfig()\nlogger = logging.getLogger()\nlogger.setLevel(logging.INFO)\n\nPORT = os.environ.get(\"PORT\", 8000)\nBUGBUG_HTTP_SERVER = os.environ.get(\"BUGBUG_HTTP_SERVER\", f\"http://localhost:{PORT}\")\nCONNECTION_URL = \"amqp://{}:{}@pulse.mozilla.org:5671/?ssl=1\"\n\nif os.environ.get(\"SENTRY_DSN\"):\n setup_sentry(dsn=os.environ.get(\"SENTRY_DSN\"))\n\n\nclass _GenericConsumer(ConsumerMixin):\n def __init__(self, connection, queues, callback):\n self.connection = connection\n self.queues = queues\n self.callback = callback\n\n def get_consumers(self, Consumer, channel):\n return [Consumer(queues=self.queues, callbacks=[self.callback])]\n\n\nclass HgPushesConsumer:\n def __init__(self, user, password, callback):\n self.connection = Connection(CONNECTION_URL.format(user, password))\n self.queues = [\n Queue(\n name=\"queue/{}/pushes\".format(user),\n exchange=Exchange(\n \"exchange/hgpushes/v2\", type=\"topic\", no_declare=True,\n ),\n routing_key=\"#\",\n durable=True,\n auto_delete=True,\n )\n ]\n self.consumer = _GenericConsumer(self.connection, self.queues, callback)\n\n def __enter__(self):\n return self.consumer\n\n def __exit__(self, type, value, traceback):\n self.connection.close()\n\n\ndef _on_message(body, message):\n try:\n branch = body[\"payload\"][\"data\"][\"repo_url\"].split(\"/\")[-1]\n rev = body[\"payload\"][\"data\"][\"heads\"][0]\n\n if branch in [\"autoland\", \"try\"]:\n user = body[\"payload\"][\"data\"][\"pushlog_pushes\"][0][\"user\"]\n if user in (\"reviewbot\", \"[email protected]\"):\n return\n\n url = \"{}/push/{}/{}/schedules\".format(BUGBUG_HTTP_SERVER, branch, rev)\n response = requests.get(url, headers={\"X-Api-Key\": \"pulse_listener\"})\n if response.status_code == 202:\n logger.info(\"Successfully requested {}/{}\".format(branch, rev))\n else:\n logger.warning(\n \"We got status: {} for: {}\".format(response.status_code, url)\n )\n except Exception as e:\n traceback.print_tb(e)\n finally:\n message.ack()\n\n\ndef main():\n # Generate user/password in https://pulseguardian.mozilla.org/\n # Set PULSE_USER and PULSE_PASSWORD as env variables\n user = os.environ.get(\"PULSE_USER\")\n password = os.environ.get(\"PULSE_PASSWORD\")\n if user and password:\n with HgPushesConsumer(user, password, _on_message) as consumer:\n consumer.run()\n else:\n logger.warning(\n \"The Pulse listener will be skipped unless you define PULSE_USER & PULSE_PASSWORD\"\n )\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "http_service/bugbug_http/listener.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nGenerate your credentials in https://pulseguardian.mozilla.org\n\nCall this script like:\n export PULSE_USER=generated_username\n export PULSE_PASSWORD=generated_username\n # In case you want to hit the live server\n export BUGBUG_HTTP_SERVER=https://bugbug.herokuapp.com\n cd http_service && docker-compose run bugbug-http-service\n\"\"\"\nimport logging\nimport os\nimport traceback\n\nimport requests\nfrom kombu import Connection, Exchange, Queue\nfrom kombu.mixins import ConsumerMixin\n\nfrom bugbug_http.sentry import setup_sentry\n\nlogging.basicConfig()\nlogger = logging.getLogger()\nlogger.setLevel(logging.INFO)\n\nPORT = os.environ.get(\"PORT\", 8000)\nBUGBUG_HTTP_SERVER = os.environ.get(\"BUGBUG_HTTP_SERVER\", f\"http://localhost:{PORT}\")\nCONNECTION_URL = \"amqp://{}:{}@pulse.mozilla.org:5671/?ssl=1\"\n\nif os.environ.get(\"SENTRY_DSN\"):\n setup_sentry(dsn=os.environ.get(\"SENTRY_DSN\"))\n\n\nclass _GenericConsumer(ConsumerMixin):\n def __init__(self, connection, queues, callback):\n self.connection = connection\n self.queues = queues\n self.callback = callback\n\n def get_consumers(self, Consumer, channel):\n return [Consumer(queues=self.queues, callbacks=[self.callback])]\n\n\nclass HgPushesConsumer:\n def __init__(self, user, password, callback):\n self.connection = Connection(CONNECTION_URL.format(user, password))\n self.queues = [\n Queue(\n name=\"queue/{}/pushes\".format(user),\n exchange=Exchange(\n \"exchange/hgpushes/v2\", type=\"topic\", no_declare=True,\n ),\n routing_key=\"#\",\n durable=True,\n auto_delete=True,\n )\n ]\n self.consumer = _GenericConsumer(self.connection, self.queues, callback)\n\n def __enter__(self):\n return self.consumer\n\n def __exit__(self, type, value, traceback):\n self.connection.close()\n\n\ndef _on_message(body, message):\n try:\n branch = body[\"payload\"][\"data\"][\"repo_url\"].split(\"/\")[-1]\n rev = body[\"payload\"][\"data\"][\"heads\"][0]\n\n if branch in [\"autoland\", \"try\"]:\n user = body[\"payload\"][\"data\"][\"pushlog_pushes\"][0][\"user\"]\n if user in (\"reviewbot\", \"[email protected]\"):\n return\n\n url = \"{}/push/{}/{}/schedules\".format(BUGBUG_HTTP_SERVER, branch, rev)\n response = requests.get(url, headers={\"X-Api-Key\": \"pulse_listener\"})\n if response.status_code == 202:\n logger.info(\"Successfully requested {}/{}\".format(branch, rev))\n else:\n logger.warning(\n \"We got status: {} for: {}\".format(response.status_code, url)\n )\n except Exception:\n traceback.print_exc()\n finally:\n message.ack()\n\n\ndef main():\n # Generate user/password in https://pulseguardian.mozilla.org/\n # Set PULSE_USER and PULSE_PASSWORD as env variables\n user = os.environ.get(\"PULSE_USER\")\n password = os.environ.get(\"PULSE_PASSWORD\")\n if user and password:\n with HgPushesConsumer(user, password, _on_message) as consumer:\n consumer.run()\n else:\n logger.warning(\n \"The Pulse listener will be skipped unless you define PULSE_USER & PULSE_PASSWORD\"\n )\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "http_service/bugbug_http/listener.py"}]}
| 3,111 | 116 |
gh_patches_debug_61331
|
rasdani/github-patches
|
git_diff
|
nerfstudio-project__nerfstudio-913
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
tanh instead of tan bug
Hi,
please change tanh (hyperbolic tan) to tan
https://github.com/nerfstudio-project/nerfstudio/blob/1a24f3e58c544bc0211563e770d425426284256c/nerfstudio/data/dataparsers/instant_ngp_dataparser.py#L133
thanks
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nerfstudio/data/dataparsers/instant_ngp_dataparser.py`
Content:
```
1 # Copyright 2022 The Nerfstudio Team. All rights reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """Data parser for instant ngp data"""
16
17 from __future__ import annotations
18
19 from dataclasses import dataclass, field
20 from pathlib import Path
21 from typing import Dict, Tuple, Type
22
23 import numpy as np
24 import torch
25 from rich.console import Console
26
27 from nerfstudio.cameras import camera_utils
28 from nerfstudio.cameras.cameras import Cameras, CameraType
29 from nerfstudio.data.dataparsers.base_dataparser import (
30 DataParser,
31 DataParserConfig,
32 DataparserOutputs,
33 )
34 from nerfstudio.data.scene_box import SceneBox
35 from nerfstudio.utils.io import load_from_json
36
37 CONSOLE = Console(width=120)
38
39
40 @dataclass
41 class InstantNGPDataParserConfig(DataParserConfig):
42 """Instant-NGP dataset parser config"""
43
44 _target: Type = field(default_factory=lambda: InstantNGP)
45 """target class to instantiate"""
46 data: Path = Path("data/ours/posterv2")
47 """Directory specifying location of data."""
48 scale_factor: float = 1.0
49 """How much to scale the camera origins by."""
50 scene_scale: float = 0.33
51 """How much to scale the scene."""
52
53
54 @dataclass
55 class InstantNGP(DataParser):
56 """Instant NGP Dataset"""
57
58 config: InstantNGPDataParserConfig
59
60 def _generate_dataparser_outputs(self, split="train"):
61
62 meta = load_from_json(self.config.data / "transforms.json")
63 image_filenames = []
64 poses = []
65 num_skipped_image_filenames = 0
66 for frame in meta["frames"]:
67 fname = self.config.data / Path(frame["file_path"])
68 if not fname:
69 num_skipped_image_filenames += 1
70 else:
71 image_filenames.append(fname)
72 poses.append(np.array(frame["transform_matrix"]))
73 if num_skipped_image_filenames >= 0:
74 CONSOLE.print(f"Skipping {num_skipped_image_filenames} files in dataset split {split}.")
75 assert (
76 len(image_filenames) != 0
77 ), """
78 No image files found.
79 You should check the file_paths in the transforms.json file to make sure they are correct.
80 """
81 poses = np.array(poses).astype(np.float32)
82 poses[:, :3, 3] *= self.config.scene_scale
83
84 camera_to_world = torch.from_numpy(poses[:, :3]) # camera to world transform
85
86 distortion_params = camera_utils.get_distortion_params(
87 k1=float(meta["k1"]), k2=float(meta["k2"]), p1=float(meta["p1"]), p2=float(meta["p2"])
88 )
89
90 # in x,y,z order
91 # assumes that the scene is centered at the origin
92 aabb_scale = meta["aabb_scale"]
93 scene_box = SceneBox(
94 aabb=torch.tensor(
95 [[-aabb_scale, -aabb_scale, -aabb_scale], [aabb_scale, aabb_scale, aabb_scale]], dtype=torch.float32
96 )
97 )
98
99 fl_x, fl_y = InstantNGP.get_focal_lengths(meta)
100
101 cameras = Cameras(
102 fx=float(fl_x),
103 fy=float(fl_y),
104 cx=float(meta["cx"]),
105 cy=float(meta["cy"]),
106 distortion_params=distortion_params,
107 height=int(meta["h"]),
108 width=int(meta["w"]),
109 camera_to_worlds=camera_to_world,
110 camera_type=CameraType.PERSPECTIVE,
111 )
112
113 # TODO(ethan): add alpha background color
114 dataparser_outputs = DataparserOutputs(
115 image_filenames=image_filenames,
116 cameras=cameras,
117 scene_box=scene_box,
118 )
119
120 return dataparser_outputs
121
122 @classmethod
123 def get_focal_lengths(cls, meta: Dict) -> Tuple[float, float]:
124 """Reads or computes the focal length from transforms dict.
125 Args:
126 meta: metadata from transforms.json file.
127 Returns:
128 Focal lengths in the x and y directions. Error is raised if these cannot be calculated.
129 """
130 fl_x, fl_y = 0, 0
131
132 def fov_to_focal_length(rad, res):
133 return 0.5 * res / np.tanh(0.5 * rad)
134
135 if "fl_x" in meta:
136 fl_x = meta["fl_x"]
137 elif "x_fov" in meta:
138 fl_x = fov_to_focal_length(np.deg2rad(meta["x_fov"]), meta["w"])
139 elif "camera_angle_x" in meta:
140 fl_x = fov_to_focal_length(meta["camera_angle_x"], meta["w"])
141
142 if "fl_y" in meta:
143 fl_y = meta["fl_y"]
144 elif "y_fov" in meta:
145 fl_y = fov_to_focal_length(np.deg2rad(meta["y_fov"]), meta["h"])
146 elif "camera_angle_y" in meta:
147 fl_y = fov_to_focal_length(meta["camera_angle_y"], meta["h"])
148
149 if fl_x == 0 or fl_y == 0:
150 raise AttributeError("Focal length cannot be calculated from transforms.json (missing fields).")
151
152 return (fl_x, fl_y)
153
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/nerfstudio/data/dataparsers/instant_ngp_dataparser.py b/nerfstudio/data/dataparsers/instant_ngp_dataparser.py
--- a/nerfstudio/data/dataparsers/instant_ngp_dataparser.py
+++ b/nerfstudio/data/dataparsers/instant_ngp_dataparser.py
@@ -130,7 +130,7 @@
fl_x, fl_y = 0, 0
def fov_to_focal_length(rad, res):
- return 0.5 * res / np.tanh(0.5 * rad)
+ return 0.5 * res / np.tan(0.5 * rad)
if "fl_x" in meta:
fl_x = meta["fl_x"]
|
{"golden_diff": "diff --git a/nerfstudio/data/dataparsers/instant_ngp_dataparser.py b/nerfstudio/data/dataparsers/instant_ngp_dataparser.py\n--- a/nerfstudio/data/dataparsers/instant_ngp_dataparser.py\n+++ b/nerfstudio/data/dataparsers/instant_ngp_dataparser.py\n@@ -130,7 +130,7 @@\n fl_x, fl_y = 0, 0\n \n def fov_to_focal_length(rad, res):\n- return 0.5 * res / np.tanh(0.5 * rad)\n+ return 0.5 * res / np.tan(0.5 * rad)\n \n if \"fl_x\" in meta:\n fl_x = meta[\"fl_x\"]\n", "issue": "tanh instead of tan bug\nHi,\r\nplease change tanh (hyperbolic tan) to tan \r\n\r\nhttps://github.com/nerfstudio-project/nerfstudio/blob/1a24f3e58c544bc0211563e770d425426284256c/nerfstudio/data/dataparsers/instant_ngp_dataparser.py#L133\r\n\r\nthanks\n", "before_files": [{"content": "# Copyright 2022 The Nerfstudio Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Data parser for instant ngp data\"\"\"\n\nfrom __future__ import annotations\n\nfrom dataclasses import dataclass, field\nfrom pathlib import Path\nfrom typing import Dict, Tuple, Type\n\nimport numpy as np\nimport torch\nfrom rich.console import Console\n\nfrom nerfstudio.cameras import camera_utils\nfrom nerfstudio.cameras.cameras import Cameras, CameraType\nfrom nerfstudio.data.dataparsers.base_dataparser import (\n DataParser,\n DataParserConfig,\n DataparserOutputs,\n)\nfrom nerfstudio.data.scene_box import SceneBox\nfrom nerfstudio.utils.io import load_from_json\n\nCONSOLE = Console(width=120)\n\n\n@dataclass\nclass InstantNGPDataParserConfig(DataParserConfig):\n \"\"\"Instant-NGP dataset parser config\"\"\"\n\n _target: Type = field(default_factory=lambda: InstantNGP)\n \"\"\"target class to instantiate\"\"\"\n data: Path = Path(\"data/ours/posterv2\")\n \"\"\"Directory specifying location of data.\"\"\"\n scale_factor: float = 1.0\n \"\"\"How much to scale the camera origins by.\"\"\"\n scene_scale: float = 0.33\n \"\"\"How much to scale the scene.\"\"\"\n\n\n@dataclass\nclass InstantNGP(DataParser):\n \"\"\"Instant NGP Dataset\"\"\"\n\n config: InstantNGPDataParserConfig\n\n def _generate_dataparser_outputs(self, split=\"train\"):\n\n meta = load_from_json(self.config.data / \"transforms.json\")\n image_filenames = []\n poses = []\n num_skipped_image_filenames = 0\n for frame in meta[\"frames\"]:\n fname = self.config.data / Path(frame[\"file_path\"])\n if not fname:\n num_skipped_image_filenames += 1\n else:\n image_filenames.append(fname)\n poses.append(np.array(frame[\"transform_matrix\"]))\n if num_skipped_image_filenames >= 0:\n CONSOLE.print(f\"Skipping {num_skipped_image_filenames} files in dataset split {split}.\")\n assert (\n len(image_filenames) != 0\n ), \"\"\"\n No image files found. \n You should check the file_paths in the transforms.json file to make sure they are correct.\n \"\"\"\n poses = np.array(poses).astype(np.float32)\n poses[:, :3, 3] *= self.config.scene_scale\n\n camera_to_world = torch.from_numpy(poses[:, :3]) # camera to world transform\n\n distortion_params = camera_utils.get_distortion_params(\n k1=float(meta[\"k1\"]), k2=float(meta[\"k2\"]), p1=float(meta[\"p1\"]), p2=float(meta[\"p2\"])\n )\n\n # in x,y,z order\n # assumes that the scene is centered at the origin\n aabb_scale = meta[\"aabb_scale\"]\n scene_box = SceneBox(\n aabb=torch.tensor(\n [[-aabb_scale, -aabb_scale, -aabb_scale], [aabb_scale, aabb_scale, aabb_scale]], dtype=torch.float32\n )\n )\n\n fl_x, fl_y = InstantNGP.get_focal_lengths(meta)\n\n cameras = Cameras(\n fx=float(fl_x),\n fy=float(fl_y),\n cx=float(meta[\"cx\"]),\n cy=float(meta[\"cy\"]),\n distortion_params=distortion_params,\n height=int(meta[\"h\"]),\n width=int(meta[\"w\"]),\n camera_to_worlds=camera_to_world,\n camera_type=CameraType.PERSPECTIVE,\n )\n\n # TODO(ethan): add alpha background color\n dataparser_outputs = DataparserOutputs(\n image_filenames=image_filenames,\n cameras=cameras,\n scene_box=scene_box,\n )\n\n return dataparser_outputs\n\n @classmethod\n def get_focal_lengths(cls, meta: Dict) -> Tuple[float, float]:\n \"\"\"Reads or computes the focal length from transforms dict.\n Args:\n meta: metadata from transforms.json file.\n Returns:\n Focal lengths in the x and y directions. Error is raised if these cannot be calculated.\n \"\"\"\n fl_x, fl_y = 0, 0\n\n def fov_to_focal_length(rad, res):\n return 0.5 * res / np.tanh(0.5 * rad)\n\n if \"fl_x\" in meta:\n fl_x = meta[\"fl_x\"]\n elif \"x_fov\" in meta:\n fl_x = fov_to_focal_length(np.deg2rad(meta[\"x_fov\"]), meta[\"w\"])\n elif \"camera_angle_x\" in meta:\n fl_x = fov_to_focal_length(meta[\"camera_angle_x\"], meta[\"w\"])\n\n if \"fl_y\" in meta:\n fl_y = meta[\"fl_y\"]\n elif \"y_fov\" in meta:\n fl_y = fov_to_focal_length(np.deg2rad(meta[\"y_fov\"]), meta[\"h\"])\n elif \"camera_angle_y\" in meta:\n fl_y = fov_to_focal_length(meta[\"camera_angle_y\"], meta[\"h\"])\n\n if fl_x == 0 or fl_y == 0:\n raise AttributeError(\"Focal length cannot be calculated from transforms.json (missing fields).\")\n\n return (fl_x, fl_y)\n", "path": "nerfstudio/data/dataparsers/instant_ngp_dataparser.py"}], "after_files": [{"content": "# Copyright 2022 The Nerfstudio Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Data parser for instant ngp data\"\"\"\n\nfrom __future__ import annotations\n\nfrom dataclasses import dataclass, field\nfrom pathlib import Path\nfrom typing import Dict, Tuple, Type\n\nimport numpy as np\nimport torch\nfrom rich.console import Console\n\nfrom nerfstudio.cameras import camera_utils\nfrom nerfstudio.cameras.cameras import Cameras, CameraType\nfrom nerfstudio.data.dataparsers.base_dataparser import (\n DataParser,\n DataParserConfig,\n DataparserOutputs,\n)\nfrom nerfstudio.data.scene_box import SceneBox\nfrom nerfstudio.utils.io import load_from_json\n\nCONSOLE = Console(width=120)\n\n\n@dataclass\nclass InstantNGPDataParserConfig(DataParserConfig):\n \"\"\"Instant-NGP dataset parser config\"\"\"\n\n _target: Type = field(default_factory=lambda: InstantNGP)\n \"\"\"target class to instantiate\"\"\"\n data: Path = Path(\"data/ours/posterv2\")\n \"\"\"Directory specifying location of data.\"\"\"\n scale_factor: float = 1.0\n \"\"\"How much to scale the camera origins by.\"\"\"\n scene_scale: float = 0.33\n \"\"\"How much to scale the scene.\"\"\"\n\n\n@dataclass\nclass InstantNGP(DataParser):\n \"\"\"Instant NGP Dataset\"\"\"\n\n config: InstantNGPDataParserConfig\n\n def _generate_dataparser_outputs(self, split=\"train\"):\n\n meta = load_from_json(self.config.data / \"transforms.json\")\n image_filenames = []\n poses = []\n num_skipped_image_filenames = 0\n for frame in meta[\"frames\"]:\n fname = self.config.data / Path(frame[\"file_path\"])\n if not fname:\n num_skipped_image_filenames += 1\n else:\n image_filenames.append(fname)\n poses.append(np.array(frame[\"transform_matrix\"]))\n if num_skipped_image_filenames >= 0:\n CONSOLE.print(f\"Skipping {num_skipped_image_filenames} files in dataset split {split}.\")\n assert (\n len(image_filenames) != 0\n ), \"\"\"\n No image files found. \n You should check the file_paths in the transforms.json file to make sure they are correct.\n \"\"\"\n poses = np.array(poses).astype(np.float32)\n poses[:, :3, 3] *= self.config.scene_scale\n\n camera_to_world = torch.from_numpy(poses[:, :3]) # camera to world transform\n\n distortion_params = camera_utils.get_distortion_params(\n k1=float(meta[\"k1\"]), k2=float(meta[\"k2\"]), p1=float(meta[\"p1\"]), p2=float(meta[\"p2\"])\n )\n\n # in x,y,z order\n # assumes that the scene is centered at the origin\n aabb_scale = meta[\"aabb_scale\"]\n scene_box = SceneBox(\n aabb=torch.tensor(\n [[-aabb_scale, -aabb_scale, -aabb_scale], [aabb_scale, aabb_scale, aabb_scale]], dtype=torch.float32\n )\n )\n\n fl_x, fl_y = InstantNGP.get_focal_lengths(meta)\n\n cameras = Cameras(\n fx=float(fl_x),\n fy=float(fl_y),\n cx=float(meta[\"cx\"]),\n cy=float(meta[\"cy\"]),\n distortion_params=distortion_params,\n height=int(meta[\"h\"]),\n width=int(meta[\"w\"]),\n camera_to_worlds=camera_to_world,\n camera_type=CameraType.PERSPECTIVE,\n )\n\n # TODO(ethan): add alpha background color\n dataparser_outputs = DataparserOutputs(\n image_filenames=image_filenames,\n cameras=cameras,\n scene_box=scene_box,\n )\n\n return dataparser_outputs\n\n @classmethod\n def get_focal_lengths(cls, meta: Dict) -> Tuple[float, float]:\n \"\"\"Reads or computes the focal length from transforms dict.\n Args:\n meta: metadata from transforms.json file.\n Returns:\n Focal lengths in the x and y directions. Error is raised if these cannot be calculated.\n \"\"\"\n fl_x, fl_y = 0, 0\n\n def fov_to_focal_length(rad, res):\n return 0.5 * res / np.tan(0.5 * rad)\n\n if \"fl_x\" in meta:\n fl_x = meta[\"fl_x\"]\n elif \"x_fov\" in meta:\n fl_x = fov_to_focal_length(np.deg2rad(meta[\"x_fov\"]), meta[\"w\"])\n elif \"camera_angle_x\" in meta:\n fl_x = fov_to_focal_length(meta[\"camera_angle_x\"], meta[\"w\"])\n\n if \"fl_y\" in meta:\n fl_y = meta[\"fl_y\"]\n elif \"y_fov\" in meta:\n fl_y = fov_to_focal_length(np.deg2rad(meta[\"y_fov\"]), meta[\"h\"])\n elif \"camera_angle_y\" in meta:\n fl_y = fov_to_focal_length(meta[\"camera_angle_y\"], meta[\"h\"])\n\n if fl_x == 0 or fl_y == 0:\n raise AttributeError(\"Focal length cannot be calculated from transforms.json (missing fields).\")\n\n return (fl_x, fl_y)\n", "path": "nerfstudio/data/dataparsers/instant_ngp_dataparser.py"}]}
| 1,978 | 173 |
gh_patches_debug_23161
|
rasdani/github-patches
|
git_diff
|
getsentry__sentry-python-2359
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
sentry-sdk stopped working with falcon 3+ asgi app
### How do you use Sentry?
Sentry Saas (sentry.io)
### Version
1.16.0
### Steps to Reproduce
I created a test repo where the bug can be reproduced -> https://github.com/detoyz/sentry_falcon_asgi_bug . There are also details on how to setup project & reproduce the error.
However a small summary here:
1. Install sentry-sdk 1.16.0
2. Install falcon
3. Init `falcon.asgi.App()` and try to use it via [falcon-inspect-app](https://falcon.readthedocs.io/en/stable/api/inspect.html)
4. **See error from sentry's falcon patching middleware**
5. Downgrade sentry sdk to 1.15.0
6. Your `falcon.asgi.App()` and `falcon-inspect-app` are working as expected
### Expected Result
By using sentry-sdk anywhere in the code it should not break during runtime because of any `falcon.asgi.App()` isntances
### Actual Result
Runtime errors while using `falcon.asgi.App()` isntances with sentry-sdk client
```
TypeError: _patch_prepare_middleware.<locals>.sentry_patched_prepare_middleware() takes from 0 to 2 positional arguments but 3 were given
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sentry_sdk/integrations/falcon.py`
Content:
```
1 from __future__ import absolute_import
2
3 from sentry_sdk.hub import Hub
4 from sentry_sdk.integrations import Integration, DidNotEnable
5 from sentry_sdk.integrations._wsgi_common import RequestExtractor
6 from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
7 from sentry_sdk.tracing import SOURCE_FOR_STYLE
8 from sentry_sdk.utils import (
9 capture_internal_exceptions,
10 event_from_exception,
11 parse_version,
12 )
13
14 from sentry_sdk._types import TYPE_CHECKING
15
16 if TYPE_CHECKING:
17 from typing import Any
18 from typing import Dict
19 from typing import Optional
20
21 from sentry_sdk._types import EventProcessor
22
23 # In Falcon 3.0 `falcon.api_helpers` is renamed to `falcon.app_helpers`
24 # and `falcon.API` to `falcon.App`
25
26 try:
27 import falcon # type: ignore
28
29 from falcon import __version__ as FALCON_VERSION
30 except ImportError:
31 raise DidNotEnable("Falcon not installed")
32
33 try:
34 import falcon.app_helpers # type: ignore
35
36 falcon_helpers = falcon.app_helpers
37 falcon_app_class = falcon.App
38 FALCON3 = True
39 except ImportError:
40 import falcon.api_helpers # type: ignore
41
42 falcon_helpers = falcon.api_helpers
43 falcon_app_class = falcon.API
44 FALCON3 = False
45
46
47 class FalconRequestExtractor(RequestExtractor):
48 def env(self):
49 # type: () -> Dict[str, Any]
50 return self.request.env
51
52 def cookies(self):
53 # type: () -> Dict[str, Any]
54 return self.request.cookies
55
56 def form(self):
57 # type: () -> None
58 return None # No such concept in Falcon
59
60 def files(self):
61 # type: () -> None
62 return None # No such concept in Falcon
63
64 def raw_data(self):
65 # type: () -> Optional[str]
66
67 # As request data can only be read once we won't make this available
68 # to Sentry. Just send back a dummy string in case there was a
69 # content length.
70 # TODO(jmagnusson): Figure out if there's a way to support this
71 content_length = self.content_length()
72 if content_length > 0:
73 return "[REQUEST_CONTAINING_RAW_DATA]"
74 else:
75 return None
76
77 if FALCON3:
78
79 def json(self):
80 # type: () -> Optional[Dict[str, Any]]
81 try:
82 return self.request.media
83 except falcon.errors.HTTPBadRequest:
84 return None
85
86 else:
87
88 def json(self):
89 # type: () -> Optional[Dict[str, Any]]
90 try:
91 return self.request.media
92 except falcon.errors.HTTPBadRequest:
93 # NOTE(jmagnusson): We return `falcon.Request._media` here because
94 # falcon 1.4 doesn't do proper type checking in
95 # `falcon.Request.media`. This has been fixed in 2.0.
96 # Relevant code: https://github.com/falconry/falcon/blob/1.4.1/falcon/request.py#L953
97 return self.request._media
98
99
100 class SentryFalconMiddleware(object):
101 """Captures exceptions in Falcon requests and send to Sentry"""
102
103 def process_request(self, req, resp, *args, **kwargs):
104 # type: (Any, Any, *Any, **Any) -> None
105 hub = Hub.current
106 integration = hub.get_integration(FalconIntegration)
107 if integration is None:
108 return
109
110 with hub.configure_scope() as scope:
111 scope._name = "falcon"
112 scope.add_event_processor(_make_request_event_processor(req, integration))
113
114
115 TRANSACTION_STYLE_VALUES = ("uri_template", "path")
116
117
118 class FalconIntegration(Integration):
119 identifier = "falcon"
120
121 transaction_style = ""
122
123 def __init__(self, transaction_style="uri_template"):
124 # type: (str) -> None
125 if transaction_style not in TRANSACTION_STYLE_VALUES:
126 raise ValueError(
127 "Invalid value for transaction_style: %s (must be in %s)"
128 % (transaction_style, TRANSACTION_STYLE_VALUES)
129 )
130 self.transaction_style = transaction_style
131
132 @staticmethod
133 def setup_once():
134 # type: () -> None
135
136 version = parse_version(FALCON_VERSION)
137
138 if version is None:
139 raise DidNotEnable("Unparsable Falcon version: {}".format(FALCON_VERSION))
140
141 if version < (1, 4):
142 raise DidNotEnable("Falcon 1.4 or newer required.")
143
144 _patch_wsgi_app()
145 _patch_handle_exception()
146 _patch_prepare_middleware()
147
148
149 def _patch_wsgi_app():
150 # type: () -> None
151 original_wsgi_app = falcon_app_class.__call__
152
153 def sentry_patched_wsgi_app(self, env, start_response):
154 # type: (falcon.API, Any, Any) -> Any
155 hub = Hub.current
156 integration = hub.get_integration(FalconIntegration)
157 if integration is None:
158 return original_wsgi_app(self, env, start_response)
159
160 sentry_wrapped = SentryWsgiMiddleware(
161 lambda envi, start_resp: original_wsgi_app(self, envi, start_resp)
162 )
163
164 return sentry_wrapped(env, start_response)
165
166 falcon_app_class.__call__ = sentry_patched_wsgi_app
167
168
169 def _patch_handle_exception():
170 # type: () -> None
171 original_handle_exception = falcon_app_class._handle_exception
172
173 def sentry_patched_handle_exception(self, *args):
174 # type: (falcon.API, *Any) -> Any
175 # NOTE(jmagnusson): falcon 2.0 changed falcon.API._handle_exception
176 # method signature from `(ex, req, resp, params)` to
177 # `(req, resp, ex, params)`
178 if isinstance(args[0], Exception):
179 ex = args[0]
180 else:
181 ex = args[2]
182
183 was_handled = original_handle_exception(self, *args)
184
185 hub = Hub.current
186 integration = hub.get_integration(FalconIntegration)
187
188 if integration is not None and _exception_leads_to_http_5xx(ex):
189 # If an integration is there, a client has to be there.
190 client = hub.client # type: Any
191
192 event, hint = event_from_exception(
193 ex,
194 client_options=client.options,
195 mechanism={"type": "falcon", "handled": False},
196 )
197 hub.capture_event(event, hint=hint)
198
199 return was_handled
200
201 falcon_app_class._handle_exception = sentry_patched_handle_exception
202
203
204 def _patch_prepare_middleware():
205 # type: () -> None
206 original_prepare_middleware = falcon_helpers.prepare_middleware
207
208 def sentry_patched_prepare_middleware(
209 middleware=None, independent_middleware=False
210 ):
211 # type: (Any, Any) -> Any
212 hub = Hub.current
213 integration = hub.get_integration(FalconIntegration)
214 if integration is not None:
215 middleware = [SentryFalconMiddleware()] + (middleware or [])
216 return original_prepare_middleware(middleware, independent_middleware)
217
218 falcon_helpers.prepare_middleware = sentry_patched_prepare_middleware
219
220
221 def _exception_leads_to_http_5xx(ex):
222 # type: (Exception) -> bool
223 is_server_error = isinstance(ex, falcon.HTTPError) and (ex.status or "").startswith(
224 "5"
225 )
226 is_unhandled_error = not isinstance(
227 ex, (falcon.HTTPError, falcon.http_status.HTTPStatus)
228 )
229 return is_server_error or is_unhandled_error
230
231
232 def _set_transaction_name_and_source(event, transaction_style, request):
233 # type: (Dict[str, Any], str, falcon.Request) -> None
234 name_for_style = {
235 "uri_template": request.uri_template,
236 "path": request.path,
237 }
238 event["transaction"] = name_for_style[transaction_style]
239 event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]}
240
241
242 def _make_request_event_processor(req, integration):
243 # type: (falcon.Request, FalconIntegration) -> EventProcessor
244
245 def event_processor(event, hint):
246 # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
247 _set_transaction_name_and_source(event, integration.transaction_style, req)
248
249 with capture_internal_exceptions():
250 FalconRequestExtractor(req).extract_into_event(event)
251
252 return event
253
254 return event_processor
255
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/sentry_sdk/integrations/falcon.py b/sentry_sdk/integrations/falcon.py
--- a/sentry_sdk/integrations/falcon.py
+++ b/sentry_sdk/integrations/falcon.py
@@ -206,13 +206,20 @@
original_prepare_middleware = falcon_helpers.prepare_middleware
def sentry_patched_prepare_middleware(
- middleware=None, independent_middleware=False
+ middleware=None, independent_middleware=False, asgi=False
):
- # type: (Any, Any) -> Any
+ # type: (Any, Any, bool) -> Any
+ if asgi:
+ # We don't support ASGI Falcon apps, so we don't patch anything here
+ return original_prepare_middleware(middleware, independent_middleware, asgi)
+
hub = Hub.current
integration = hub.get_integration(FalconIntegration)
if integration is not None:
middleware = [SentryFalconMiddleware()] + (middleware or [])
+
+ # We intentionally omit the asgi argument here, since the default is False anyways,
+ # and this way, we remain backwards-compatible with pre-3.0.0 Falcon versions.
return original_prepare_middleware(middleware, independent_middleware)
falcon_helpers.prepare_middleware = sentry_patched_prepare_middleware
|
{"golden_diff": "diff --git a/sentry_sdk/integrations/falcon.py b/sentry_sdk/integrations/falcon.py\n--- a/sentry_sdk/integrations/falcon.py\n+++ b/sentry_sdk/integrations/falcon.py\n@@ -206,13 +206,20 @@\n original_prepare_middleware = falcon_helpers.prepare_middleware\n \n def sentry_patched_prepare_middleware(\n- middleware=None, independent_middleware=False\n+ middleware=None, independent_middleware=False, asgi=False\n ):\n- # type: (Any, Any) -> Any\n+ # type: (Any, Any, bool) -> Any\n+ if asgi:\n+ # We don't support ASGI Falcon apps, so we don't patch anything here\n+ return original_prepare_middleware(middleware, independent_middleware, asgi)\n+\n hub = Hub.current\n integration = hub.get_integration(FalconIntegration)\n if integration is not None:\n middleware = [SentryFalconMiddleware()] + (middleware or [])\n+\n+ # We intentionally omit the asgi argument here, since the default is False anyways,\n+ # and this way, we remain backwards-compatible with pre-3.0.0 Falcon versions.\n return original_prepare_middleware(middleware, independent_middleware)\n \n falcon_helpers.prepare_middleware = sentry_patched_prepare_middleware\n", "issue": "sentry-sdk stopped working with falcon 3+ asgi app\n### How do you use Sentry?\n\nSentry Saas (sentry.io)\n\n### Version\n\n1.16.0\n\n### Steps to Reproduce\n\nI created a test repo where the bug can be reproduced -> https://github.com/detoyz/sentry_falcon_asgi_bug . There are also details on how to setup project & reproduce the error.\r\n\r\nHowever a small summary here:\r\n1. Install sentry-sdk 1.16.0\r\n2. Install falcon\r\n3. Init `falcon.asgi.App()` and try to use it via [falcon-inspect-app](https://falcon.readthedocs.io/en/stable/api/inspect.html)\r\n4. **See error from sentry's falcon patching middleware**\r\n5. Downgrade sentry sdk to 1.15.0\r\n6. Your `falcon.asgi.App()` and `falcon-inspect-app` are working as expected\r\n\n\n### Expected Result\n\nBy using sentry-sdk anywhere in the code it should not break during runtime because of any `falcon.asgi.App()` isntances\n\n### Actual Result\n\nRuntime errors while using `falcon.asgi.App()` isntances with sentry-sdk client\r\n```\r\nTypeError: _patch_prepare_middleware.<locals>.sentry_patched_prepare_middleware() takes from 0 to 2 positional arguments but 3 were given\r\n```\n", "before_files": [{"content": "from __future__ import absolute_import\n\nfrom sentry_sdk.hub import Hub\nfrom sentry_sdk.integrations import Integration, DidNotEnable\nfrom sentry_sdk.integrations._wsgi_common import RequestExtractor\nfrom sentry_sdk.integrations.wsgi import SentryWsgiMiddleware\nfrom sentry_sdk.tracing import SOURCE_FOR_STYLE\nfrom sentry_sdk.utils import (\n capture_internal_exceptions,\n event_from_exception,\n parse_version,\n)\n\nfrom sentry_sdk._types import TYPE_CHECKING\n\nif TYPE_CHECKING:\n from typing import Any\n from typing import Dict\n from typing import Optional\n\n from sentry_sdk._types import EventProcessor\n\n# In Falcon 3.0 `falcon.api_helpers` is renamed to `falcon.app_helpers`\n# and `falcon.API` to `falcon.App`\n\ntry:\n import falcon # type: ignore\n\n from falcon import __version__ as FALCON_VERSION\nexcept ImportError:\n raise DidNotEnable(\"Falcon not installed\")\n\ntry:\n import falcon.app_helpers # type: ignore\n\n falcon_helpers = falcon.app_helpers\n falcon_app_class = falcon.App\n FALCON3 = True\nexcept ImportError:\n import falcon.api_helpers # type: ignore\n\n falcon_helpers = falcon.api_helpers\n falcon_app_class = falcon.API\n FALCON3 = False\n\n\nclass FalconRequestExtractor(RequestExtractor):\n def env(self):\n # type: () -> Dict[str, Any]\n return self.request.env\n\n def cookies(self):\n # type: () -> Dict[str, Any]\n return self.request.cookies\n\n def form(self):\n # type: () -> None\n return None # No such concept in Falcon\n\n def files(self):\n # type: () -> None\n return None # No such concept in Falcon\n\n def raw_data(self):\n # type: () -> Optional[str]\n\n # As request data can only be read once we won't make this available\n # to Sentry. Just send back a dummy string in case there was a\n # content length.\n # TODO(jmagnusson): Figure out if there's a way to support this\n content_length = self.content_length()\n if content_length > 0:\n return \"[REQUEST_CONTAINING_RAW_DATA]\"\n else:\n return None\n\n if FALCON3:\n\n def json(self):\n # type: () -> Optional[Dict[str, Any]]\n try:\n return self.request.media\n except falcon.errors.HTTPBadRequest:\n return None\n\n else:\n\n def json(self):\n # type: () -> Optional[Dict[str, Any]]\n try:\n return self.request.media\n except falcon.errors.HTTPBadRequest:\n # NOTE(jmagnusson): We return `falcon.Request._media` here because\n # falcon 1.4 doesn't do proper type checking in\n # `falcon.Request.media`. This has been fixed in 2.0.\n # Relevant code: https://github.com/falconry/falcon/blob/1.4.1/falcon/request.py#L953\n return self.request._media\n\n\nclass SentryFalconMiddleware(object):\n \"\"\"Captures exceptions in Falcon requests and send to Sentry\"\"\"\n\n def process_request(self, req, resp, *args, **kwargs):\n # type: (Any, Any, *Any, **Any) -> None\n hub = Hub.current\n integration = hub.get_integration(FalconIntegration)\n if integration is None:\n return\n\n with hub.configure_scope() as scope:\n scope._name = \"falcon\"\n scope.add_event_processor(_make_request_event_processor(req, integration))\n\n\nTRANSACTION_STYLE_VALUES = (\"uri_template\", \"path\")\n\n\nclass FalconIntegration(Integration):\n identifier = \"falcon\"\n\n transaction_style = \"\"\n\n def __init__(self, transaction_style=\"uri_template\"):\n # type: (str) -> None\n if transaction_style not in TRANSACTION_STYLE_VALUES:\n raise ValueError(\n \"Invalid value for transaction_style: %s (must be in %s)\"\n % (transaction_style, TRANSACTION_STYLE_VALUES)\n )\n self.transaction_style = transaction_style\n\n @staticmethod\n def setup_once():\n # type: () -> None\n\n version = parse_version(FALCON_VERSION)\n\n if version is None:\n raise DidNotEnable(\"Unparsable Falcon version: {}\".format(FALCON_VERSION))\n\n if version < (1, 4):\n raise DidNotEnable(\"Falcon 1.4 or newer required.\")\n\n _patch_wsgi_app()\n _patch_handle_exception()\n _patch_prepare_middleware()\n\n\ndef _patch_wsgi_app():\n # type: () -> None\n original_wsgi_app = falcon_app_class.__call__\n\n def sentry_patched_wsgi_app(self, env, start_response):\n # type: (falcon.API, Any, Any) -> Any\n hub = Hub.current\n integration = hub.get_integration(FalconIntegration)\n if integration is None:\n return original_wsgi_app(self, env, start_response)\n\n sentry_wrapped = SentryWsgiMiddleware(\n lambda envi, start_resp: original_wsgi_app(self, envi, start_resp)\n )\n\n return sentry_wrapped(env, start_response)\n\n falcon_app_class.__call__ = sentry_patched_wsgi_app\n\n\ndef _patch_handle_exception():\n # type: () -> None\n original_handle_exception = falcon_app_class._handle_exception\n\n def sentry_patched_handle_exception(self, *args):\n # type: (falcon.API, *Any) -> Any\n # NOTE(jmagnusson): falcon 2.0 changed falcon.API._handle_exception\n # method signature from `(ex, req, resp, params)` to\n # `(req, resp, ex, params)`\n if isinstance(args[0], Exception):\n ex = args[0]\n else:\n ex = args[2]\n\n was_handled = original_handle_exception(self, *args)\n\n hub = Hub.current\n integration = hub.get_integration(FalconIntegration)\n\n if integration is not None and _exception_leads_to_http_5xx(ex):\n # If an integration is there, a client has to be there.\n client = hub.client # type: Any\n\n event, hint = event_from_exception(\n ex,\n client_options=client.options,\n mechanism={\"type\": \"falcon\", \"handled\": False},\n )\n hub.capture_event(event, hint=hint)\n\n return was_handled\n\n falcon_app_class._handle_exception = sentry_patched_handle_exception\n\n\ndef _patch_prepare_middleware():\n # type: () -> None\n original_prepare_middleware = falcon_helpers.prepare_middleware\n\n def sentry_patched_prepare_middleware(\n middleware=None, independent_middleware=False\n ):\n # type: (Any, Any) -> Any\n hub = Hub.current\n integration = hub.get_integration(FalconIntegration)\n if integration is not None:\n middleware = [SentryFalconMiddleware()] + (middleware or [])\n return original_prepare_middleware(middleware, independent_middleware)\n\n falcon_helpers.prepare_middleware = sentry_patched_prepare_middleware\n\n\ndef _exception_leads_to_http_5xx(ex):\n # type: (Exception) -> bool\n is_server_error = isinstance(ex, falcon.HTTPError) and (ex.status or \"\").startswith(\n \"5\"\n )\n is_unhandled_error = not isinstance(\n ex, (falcon.HTTPError, falcon.http_status.HTTPStatus)\n )\n return is_server_error or is_unhandled_error\n\n\ndef _set_transaction_name_and_source(event, transaction_style, request):\n # type: (Dict[str, Any], str, falcon.Request) -> None\n name_for_style = {\n \"uri_template\": request.uri_template,\n \"path\": request.path,\n }\n event[\"transaction\"] = name_for_style[transaction_style]\n event[\"transaction_info\"] = {\"source\": SOURCE_FOR_STYLE[transaction_style]}\n\n\ndef _make_request_event_processor(req, integration):\n # type: (falcon.Request, FalconIntegration) -> EventProcessor\n\n def event_processor(event, hint):\n # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]\n _set_transaction_name_and_source(event, integration.transaction_style, req)\n\n with capture_internal_exceptions():\n FalconRequestExtractor(req).extract_into_event(event)\n\n return event\n\n return event_processor\n", "path": "sentry_sdk/integrations/falcon.py"}], "after_files": [{"content": "from __future__ import absolute_import\n\nfrom sentry_sdk.hub import Hub\nfrom sentry_sdk.integrations import Integration, DidNotEnable\nfrom sentry_sdk.integrations._wsgi_common import RequestExtractor\nfrom sentry_sdk.integrations.wsgi import SentryWsgiMiddleware\nfrom sentry_sdk.tracing import SOURCE_FOR_STYLE\nfrom sentry_sdk.utils import (\n capture_internal_exceptions,\n event_from_exception,\n parse_version,\n)\n\nfrom sentry_sdk._types import TYPE_CHECKING\n\nif TYPE_CHECKING:\n from typing import Any\n from typing import Dict\n from typing import Optional\n\n from sentry_sdk._types import EventProcessor\n\n# In Falcon 3.0 `falcon.api_helpers` is renamed to `falcon.app_helpers`\n# and `falcon.API` to `falcon.App`\n\ntry:\n import falcon # type: ignore\n\n from falcon import __version__ as FALCON_VERSION\nexcept ImportError:\n raise DidNotEnable(\"Falcon not installed\")\n\ntry:\n import falcon.app_helpers # type: ignore\n\n falcon_helpers = falcon.app_helpers\n falcon_app_class = falcon.App\n FALCON3 = True\nexcept ImportError:\n import falcon.api_helpers # type: ignore\n\n falcon_helpers = falcon.api_helpers\n falcon_app_class = falcon.API\n FALCON3 = False\n\n\nclass FalconRequestExtractor(RequestExtractor):\n def env(self):\n # type: () -> Dict[str, Any]\n return self.request.env\n\n def cookies(self):\n # type: () -> Dict[str, Any]\n return self.request.cookies\n\n def form(self):\n # type: () -> None\n return None # No such concept in Falcon\n\n def files(self):\n # type: () -> None\n return None # No such concept in Falcon\n\n def raw_data(self):\n # type: () -> Optional[str]\n\n # As request data can only be read once we won't make this available\n # to Sentry. Just send back a dummy string in case there was a\n # content length.\n # TODO(jmagnusson): Figure out if there's a way to support this\n content_length = self.content_length()\n if content_length > 0:\n return \"[REQUEST_CONTAINING_RAW_DATA]\"\n else:\n return None\n\n if FALCON3:\n\n def json(self):\n # type: () -> Optional[Dict[str, Any]]\n try:\n return self.request.media\n except falcon.errors.HTTPBadRequest:\n return None\n\n else:\n\n def json(self):\n # type: () -> Optional[Dict[str, Any]]\n try:\n return self.request.media\n except falcon.errors.HTTPBadRequest:\n # NOTE(jmagnusson): We return `falcon.Request._media` here because\n # falcon 1.4 doesn't do proper type checking in\n # `falcon.Request.media`. This has been fixed in 2.0.\n # Relevant code: https://github.com/falconry/falcon/blob/1.4.1/falcon/request.py#L953\n return self.request._media\n\n\nclass SentryFalconMiddleware(object):\n \"\"\"Captures exceptions in Falcon requests and send to Sentry\"\"\"\n\n def process_request(self, req, resp, *args, **kwargs):\n # type: (Any, Any, *Any, **Any) -> None\n hub = Hub.current\n integration = hub.get_integration(FalconIntegration)\n if integration is None:\n return\n\n with hub.configure_scope() as scope:\n scope._name = \"falcon\"\n scope.add_event_processor(_make_request_event_processor(req, integration))\n\n\nTRANSACTION_STYLE_VALUES = (\"uri_template\", \"path\")\n\n\nclass FalconIntegration(Integration):\n identifier = \"falcon\"\n\n transaction_style = \"\"\n\n def __init__(self, transaction_style=\"uri_template\"):\n # type: (str) -> None\n if transaction_style not in TRANSACTION_STYLE_VALUES:\n raise ValueError(\n \"Invalid value for transaction_style: %s (must be in %s)\"\n % (transaction_style, TRANSACTION_STYLE_VALUES)\n )\n self.transaction_style = transaction_style\n\n @staticmethod\n def setup_once():\n # type: () -> None\n\n version = parse_version(FALCON_VERSION)\n\n if version is None:\n raise DidNotEnable(\"Unparsable Falcon version: {}\".format(FALCON_VERSION))\n\n if version < (1, 4):\n raise DidNotEnable(\"Falcon 1.4 or newer required.\")\n\n _patch_wsgi_app()\n _patch_handle_exception()\n _patch_prepare_middleware()\n\n\ndef _patch_wsgi_app():\n # type: () -> None\n original_wsgi_app = falcon_app_class.__call__\n\n def sentry_patched_wsgi_app(self, env, start_response):\n # type: (falcon.API, Any, Any) -> Any\n hub = Hub.current\n integration = hub.get_integration(FalconIntegration)\n if integration is None:\n return original_wsgi_app(self, env, start_response)\n\n sentry_wrapped = SentryWsgiMiddleware(\n lambda envi, start_resp: original_wsgi_app(self, envi, start_resp)\n )\n\n return sentry_wrapped(env, start_response)\n\n falcon_app_class.__call__ = sentry_patched_wsgi_app\n\n\ndef _patch_handle_exception():\n # type: () -> None\n original_handle_exception = falcon_app_class._handle_exception\n\n def sentry_patched_handle_exception(self, *args):\n # type: (falcon.API, *Any) -> Any\n # NOTE(jmagnusson): falcon 2.0 changed falcon.API._handle_exception\n # method signature from `(ex, req, resp, params)` to\n # `(req, resp, ex, params)`\n if isinstance(args[0], Exception):\n ex = args[0]\n else:\n ex = args[2]\n\n was_handled = original_handle_exception(self, *args)\n\n hub = Hub.current\n integration = hub.get_integration(FalconIntegration)\n\n if integration is not None and _exception_leads_to_http_5xx(ex):\n # If an integration is there, a client has to be there.\n client = hub.client # type: Any\n\n event, hint = event_from_exception(\n ex,\n client_options=client.options,\n mechanism={\"type\": \"falcon\", \"handled\": False},\n )\n hub.capture_event(event, hint=hint)\n\n return was_handled\n\n falcon_app_class._handle_exception = sentry_patched_handle_exception\n\n\ndef _patch_prepare_middleware():\n # type: () -> None\n original_prepare_middleware = falcon_helpers.prepare_middleware\n\n def sentry_patched_prepare_middleware(\n middleware=None, independent_middleware=False, asgi=False\n ):\n # type: (Any, Any, bool) -> Any\n if asgi:\n # We don't support ASGI Falcon apps, so we don't patch anything here\n return original_prepare_middleware(middleware, independent_middleware, asgi)\n\n hub = Hub.current\n integration = hub.get_integration(FalconIntegration)\n if integration is not None:\n middleware = [SentryFalconMiddleware()] + (middleware or [])\n\n # We intentionally omit the asgi argument here, since the default is False anyways,\n # and this way, we remain backwards-compatible with pre-3.0.0 Falcon versions.\n return original_prepare_middleware(middleware, independent_middleware)\n\n falcon_helpers.prepare_middleware = sentry_patched_prepare_middleware\n\n\ndef _exception_leads_to_http_5xx(ex):\n # type: (Exception) -> bool\n is_server_error = isinstance(ex, falcon.HTTPError) and (ex.status or \"\").startswith(\n \"5\"\n )\n is_unhandled_error = not isinstance(\n ex, (falcon.HTTPError, falcon.http_status.HTTPStatus)\n )\n return is_server_error or is_unhandled_error\n\n\ndef _set_transaction_name_and_source(event, transaction_style, request):\n # type: (Dict[str, Any], str, falcon.Request) -> None\n name_for_style = {\n \"uri_template\": request.uri_template,\n \"path\": request.path,\n }\n event[\"transaction\"] = name_for_style[transaction_style]\n event[\"transaction_info\"] = {\"source\": SOURCE_FOR_STYLE[transaction_style]}\n\n\ndef _make_request_event_processor(req, integration):\n # type: (falcon.Request, FalconIntegration) -> EventProcessor\n\n def event_processor(event, hint):\n # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]\n _set_transaction_name_and_source(event, integration.transaction_style, req)\n\n with capture_internal_exceptions():\n FalconRequestExtractor(req).extract_into_event(event)\n\n return event\n\n return event_processor\n", "path": "sentry_sdk/integrations/falcon.py"}]}
| 3,103 | 295 |
gh_patches_debug_19292
|
rasdani/github-patches
|
git_diff
|
scikit-image__scikit-image-5412
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add rolling ball example for ECG data
## Description
The demo I made for this response in the PyWavelets google group would be nice to include in our gallery to show that the "n-dimensional" implementation of rolling ball allows application to signals as well:
https://groups.google.com/g/pywavelets/c/b3ErI_GmNBo
Fortunately, the short ECG waveform used for the example there is already bundled by PyWavelets.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `doc/examples/segmentation/plot_rolling_ball.py`
Content:
```
1 """
2 ================================================================
3 Use rolling-ball algorithm for estimating background intensity
4 ================================================================
5
6 The rolling-ball algorithm estimates the background intensity of a grayscale
7 image in case of uneven exposure. It is frequently used in biomedical
8 image processing and was first proposed by Stanley R. Sternberg in
9 1983 [1]_.
10
11 The algorithm works as a filter and is quite intuitive. We think of the image
12 as a surface that has unit-sized blocks stacked on top of each other in place
13 of each pixel. The number of blocks, and hence surface height, is determined
14 by the intensity of the pixel. To get the intensity of the background at a
15 desired (pixel) position, we imagine submerging a ball under the surface at the
16 desired position. Once it is completely covered by the blocks, the apex of
17 the ball determines the intensity of the background at that position. We can
18 then *roll* this ball around below the surface to get the background values for
19 the entire image.
20
21 Scikit-image implements a general version of this rolling-ball algorithm, which
22 allows you to not just use balls, but arbitrary shapes as kernel and works on
23 n-dimensional ndimages. This allows you to directly filter RGB images or filter
24 image stacks along any (or all) spacial dimensions.
25
26 .. [1] Sternberg, Stanley R. "Biomedical image processing." Computer 1 (1983):
27 22-34. :DOI:`10.1109/MC.1983.1654163`
28
29
30 Classic rolling ball
31 -------------------------------
32
33 In scikit-image, the rolling ball algorithm assumes that your background has
34 low intensity (black), whereas the features have high intensity (white). If
35 this is the case for your image, you can directly use the filter like so:
36
37 """
38
39 import imageio
40 import matplotlib.pyplot as plt
41 import numpy as np
42
43 from skimage import (
44 data, restoration, util
45 )
46
47
48 def plot_result(image, background):
49 fig, ax = plt.subplots(nrows=1, ncols=3)
50
51 ax[0].imshow(image, cmap='gray')
52 ax[0].set_title('Original image')
53 ax[0].axis('off')
54
55 ax[1].imshow(background, cmap='gray')
56 ax[1].set_title('Background')
57 ax[1].axis('off')
58
59 ax[2].imshow(image - background, cmap='gray')
60 ax[2].set_title('Result')
61 ax[2].axis('off')
62
63 fig.tight_layout()
64
65
66 image = data.coins()
67
68 background = restoration.rolling_ball(image)
69
70 plot_result(image, background)
71 plt.show()
72
73 ######################################################################
74 # White background
75 # ----------------
76 #
77 # If you have dark features on a bright background, you need to invert
78 # the image before you pass it into the algorithm, and then invert the
79 # result. This can be accomplished via:
80
81 image = data.page()
82 image_inverted = util.invert(image)
83
84 background_inverted = restoration.rolling_ball(image_inverted, radius=45)
85 filtered_image_inverted = image_inverted - background_inverted
86 filtered_image = util.invert(filtered_image_inverted)
87 background = util.invert(background_inverted)
88
89 fig, ax = plt.subplots(nrows=1, ncols=3)
90
91 ax[0].imshow(image, cmap='gray')
92 ax[0].set_title('Original image')
93 ax[0].axis('off')
94
95 ax[1].imshow(background, cmap='gray')
96 ax[1].set_title('Background')
97 ax[1].axis('off')
98
99 ax[2].imshow(filtered_image, cmap='gray')
100 ax[2].set_title('Result')
101 ax[2].axis('off')
102
103 fig.tight_layout()
104
105 plt.show()
106
107 ######################################################################
108 # Be careful not to fall victim to an integer underflow when subtracting
109 # a bright background. For example, this code looks correct, but may
110 # suffer from an underflow leading to unwanted artifacts. You can see
111 # this in the top right corner of the visualization.
112
113 image = data.page()
114 image_inverted = util.invert(image)
115
116 background_inverted = restoration.rolling_ball(image_inverted, radius=45)
117 background = util.invert(background_inverted)
118 underflow_image = image - background # integer underflow occurs here
119
120 # correct subtraction
121 correct_image = util.invert(image_inverted - background_inverted)
122
123 fig, ax = plt.subplots(nrows=1, ncols=2)
124
125 ax[0].imshow(underflow_image, cmap='gray')
126 ax[0].set_title('Background Removal with Underflow')
127 ax[0].axis('off')
128
129 ax[1].imshow(correct_image, cmap='gray')
130 ax[1].set_title('Correct Background Removal')
131 ax[1].axis('off')
132
133 fig.tight_layout()
134
135 plt.show()
136
137 ######################################################################
138 # Image Datatypes
139 # ---------------
140 #
141 # ``rolling_ball`` can handle datatypes other than `np.uint8`. You can
142 # pass them into the function in the same way.
143
144 image = data.coins()[:200, :200].astype(np.uint16)
145
146 background = restoration.rolling_ball(image, radius=70.5)
147 plot_result(image, background)
148 plt.show()
149
150 ######################################################################
151 # However, you need to be careful if you use floating point images
152 # that have been normalized to ``[0, 1]``. In this case the ball will
153 # be much larger than the image intensity, which can lead to
154 # unexpected results.
155
156 image = util.img_as_float(data.coins()[:200, :200])
157
158 background = restoration.rolling_ball(image, radius=70.5)
159 plot_result(image, background)
160 plt.show()
161
162 ######################################################################
163 # Because ``radius=70.5`` is much larger than the maximum intensity of
164 # the image, the effective kernel size is reduced significantly, i.e.,
165 # only a small cap (approximately ``radius=10``) of the ball is rolled
166 # around in the image. You can find a reproduction of this strange
167 # effect in the ``Advanced Shapes`` section below.
168 #
169 # To get the expected result, you need to reduce the intensity of the
170 # kernel. This is done by specifying the kernel manually using the
171 # ``kernel`` argument.
172 #
173 # Note: The radius is equal to the length of a semi-axis of an
174 # ellipsis, which is *half* a full axis. Hence, the kernel shape is
175 # multipled by two.
176
177 normalized_radius = 70.5 / 255
178 image = util.img_as_float(data.coins())
179 kernel = restoration.ellipsoid_kernel(
180 (70.5 * 2, 70.5 * 2),
181 normalized_radius * 2
182 )
183
184 background = restoration.rolling_ball(
185 image,
186 kernel=kernel
187 )
188 plot_result(image, background)
189 plt.show()
190
191 ######################################################################
192 # Advanced Shapes
193 # -----------------
194 #
195 # By default, ``rolling_ball`` uses a ball shaped kernel (surprise).
196 # Sometimes, this can be too limiting - as in the example above -,
197 # because the intensity dimension has a different scale compared to
198 # the spatial dimensions, or because the image dimensions may have
199 # different meanings - one could be a stack counter in an image stack.
200 #
201 # To account for this, ``rolling_ball`` has a ``kernel`` argument
202 # which allows you to specify the kernel to be used. A kernel must
203 # have the same dimensionality as the image (Note: dimensionality,
204 # not shape). To help with it's creation, two default kernels are
205 # provided by ``skimage``. ``ball_kernel`` specifies a ball shaped
206 # kernel and is used as the default kernel. ``ellipsoid_kernel``
207 # specifies an ellipsoid shaped kernel.
208
209 image = data.coins()
210 kernel = restoration.ellipsoid_kernel(
211 (70.5 * 2, 70.5 * 2),
212 70.5 * 2
213 )
214
215 background = restoration.rolling_ball(
216 image,
217 kernel=kernel
218 )
219 plot_result(image, background)
220 plt.show()
221
222 ######################################################################
223 # You can also use ``ellipsoid_kernel`` to recreate the previous,
224 # unexpected result and see that the effective (spatial) filter size
225 # was reduced.
226
227 image = data.coins()
228
229 kernel = restoration.ellipsoid_kernel(
230 (10 * 2, 10 * 2),
231 255 * 2
232 )
233
234 background = restoration.rolling_ball(
235 image,
236 kernel=kernel
237 )
238 plot_result(image, background)
239 plt.show()
240
241 ######################################################################
242 # Higher Dimensions
243 # -----------------
244 #
245 # Another feature of ``rolling_ball`` is that you can directly
246 # apply it to higher dimensional images, e.g., a z-stack of images
247 # obtained during confocal microscopy. The number of kernel
248 # dimensions must match the image dimensions, hence the kernel shape
249 # is now 3 dimensional.
250
251 image = data.cells3d()[:, 1, ...]
252 background = restoration.rolling_ball(
253 image,
254 kernel=restoration.ellipsoid_kernel(
255 (1, 21, 21),
256 0.1
257 )
258 )
259
260 plot_result(image[30, ...], background[30, ...])
261 plt.show()
262
263 ######################################################################
264 # A kernel size of 1 does not filter along this axis. In other words,
265 # above filter is applied to each image in the stack individually.
266 #
267 # However, you can also filter along all 3 dimensions at the same
268 # time by specifying a value other than 1.
269
270 image = data.cells3d()[:, 1, ...]
271 background = restoration.rolling_ball(
272 image,
273 kernel=restoration.ellipsoid_kernel(
274 (5, 21, 21),
275 0.1
276 )
277 )
278
279 plot_result(image[30, ...], background[30, ...])
280 plt.show()
281
282 ######################################################################
283 # Another possibility is to filter individual pixels along the
284 # planar axis (z-stack axis).
285
286 image = data.cells3d()[:, 1, ...]
287 background = restoration.rolling_ball(
288 image,
289 kernel=restoration.ellipsoid_kernel(
290 (100, 1, 1),
291 0.1
292 )
293 )
294
295 plot_result(image[30, ...], background[30, ...])
296 plt.show()
297
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/doc/examples/segmentation/plot_rolling_ball.py b/doc/examples/segmentation/plot_rolling_ball.py
--- a/doc/examples/segmentation/plot_rolling_ball.py
+++ b/doc/examples/segmentation/plot_rolling_ball.py
@@ -39,6 +39,7 @@
import imageio
import matplotlib.pyplot as plt
import numpy as np
+import pywt
from skimage import (
data, restoration, util
@@ -294,3 +295,23 @@
plot_result(image[30, ...], background[30, ...])
plt.show()
+
+######################################################################
+# 1D Signal Filtering
+# -------------------
+#
+# As another example of the n-dimensional feature of
+# ``rolling_ball``, we show an implementation for 1D data. Here,
+# we are interested in removing the background signal of an ECG waveform
+# to detect prominent peaks (higher values than the local baseline).
+# Smoother peaks can be removed with smaller values of the radius.
+
+x = pywt.data.ecg()
+background = restoration.rolling_ball(x, radius=80)
+background2 = restoration.rolling_ball(x, radius=10)
+plt.figure()
+plt.plot(x, label='original')
+plt.plot(x - background, label='radius=80')
+plt.plot(x - background2, label='radius=10')
+plt.legend()
+plt.show()
|
{"golden_diff": "diff --git a/doc/examples/segmentation/plot_rolling_ball.py b/doc/examples/segmentation/plot_rolling_ball.py\n--- a/doc/examples/segmentation/plot_rolling_ball.py\n+++ b/doc/examples/segmentation/plot_rolling_ball.py\n@@ -39,6 +39,7 @@\n import imageio\n import matplotlib.pyplot as plt\n import numpy as np\n+import pywt\n \n from skimage import (\n data, restoration, util\n@@ -294,3 +295,23 @@\n \n plot_result(image[30, ...], background[30, ...])\n plt.show()\n+\n+######################################################################\n+# 1D Signal Filtering\n+# -------------------\n+#\n+# As another example of the n-dimensional feature of\n+# ``rolling_ball``, we show an implementation for 1D data. Here,\n+# we are interested in removing the background signal of an ECG waveform\n+# to detect prominent peaks (higher values than the local baseline).\n+# Smoother peaks can be removed with smaller values of the radius.\n+\n+x = pywt.data.ecg()\n+background = restoration.rolling_ball(x, radius=80)\n+background2 = restoration.rolling_ball(x, radius=10)\n+plt.figure()\n+plt.plot(x, label='original')\n+plt.plot(x - background, label='radius=80')\n+plt.plot(x - background2, label='radius=10')\n+plt.legend()\n+plt.show()\n", "issue": "Add rolling ball example for ECG data\n## Description\r\n\r\nThe demo I made for this response in the PyWavelets google group would be nice to include in our gallery to show that the \"n-dimensional\" implementation of rolling ball allows application to signals as well:\r\nhttps://groups.google.com/g/pywavelets/c/b3ErI_GmNBo\r\n\r\nFortunately, the short ECG waveform used for the example there is already bundled by PyWavelets.\r\n\r\n\n", "before_files": [{"content": "\"\"\"\n================================================================\nUse rolling-ball algorithm for estimating background intensity\n================================================================\n\nThe rolling-ball algorithm estimates the background intensity of a grayscale\nimage in case of uneven exposure. It is frequently used in biomedical\nimage processing and was first proposed by Stanley R. Sternberg in\n1983 [1]_.\n\nThe algorithm works as a filter and is quite intuitive. We think of the image\nas a surface that has unit-sized blocks stacked on top of each other in place\nof each pixel. The number of blocks, and hence surface height, is determined\nby the intensity of the pixel. To get the intensity of the background at a\ndesired (pixel) position, we imagine submerging a ball under the surface at the\ndesired position. Once it is completely covered by the blocks, the apex of\nthe ball determines the intensity of the background at that position. We can\nthen *roll* this ball around below the surface to get the background values for\nthe entire image.\n\nScikit-image implements a general version of this rolling-ball algorithm, which\nallows you to not just use balls, but arbitrary shapes as kernel and works on\nn-dimensional ndimages. This allows you to directly filter RGB images or filter\nimage stacks along any (or all) spacial dimensions.\n\n.. [1] Sternberg, Stanley R. \"Biomedical image processing.\" Computer 1 (1983):\n 22-34. :DOI:`10.1109/MC.1983.1654163`\n\n\nClassic rolling ball\n-------------------------------\n\nIn scikit-image, the rolling ball algorithm assumes that your background has\nlow intensity (black), whereas the features have high intensity (white). If\nthis is the case for your image, you can directly use the filter like so:\n\n\"\"\"\n\nimport imageio\nimport matplotlib.pyplot as plt\nimport numpy as np\n\nfrom skimage import (\n data, restoration, util\n)\n\n\ndef plot_result(image, background):\n fig, ax = plt.subplots(nrows=1, ncols=3)\n\n ax[0].imshow(image, cmap='gray')\n ax[0].set_title('Original image')\n ax[0].axis('off')\n\n ax[1].imshow(background, cmap='gray')\n ax[1].set_title('Background')\n ax[1].axis('off')\n\n ax[2].imshow(image - background, cmap='gray')\n ax[2].set_title('Result')\n ax[2].axis('off')\n\n fig.tight_layout()\n\n\nimage = data.coins()\n\nbackground = restoration.rolling_ball(image)\n\nplot_result(image, background)\nplt.show()\n\n######################################################################\n# White background\n# ----------------\n#\n# If you have dark features on a bright background, you need to invert\n# the image before you pass it into the algorithm, and then invert the\n# result. This can be accomplished via:\n\nimage = data.page()\nimage_inverted = util.invert(image)\n\nbackground_inverted = restoration.rolling_ball(image_inverted, radius=45)\nfiltered_image_inverted = image_inverted - background_inverted\nfiltered_image = util.invert(filtered_image_inverted)\nbackground = util.invert(background_inverted)\n\nfig, ax = plt.subplots(nrows=1, ncols=3)\n\nax[0].imshow(image, cmap='gray')\nax[0].set_title('Original image')\nax[0].axis('off')\n\nax[1].imshow(background, cmap='gray')\nax[1].set_title('Background')\nax[1].axis('off')\n\nax[2].imshow(filtered_image, cmap='gray')\nax[2].set_title('Result')\nax[2].axis('off')\n\nfig.tight_layout()\n\nplt.show()\n\n######################################################################\n# Be careful not to fall victim to an integer underflow when subtracting\n# a bright background. For example, this code looks correct, but may\n# suffer from an underflow leading to unwanted artifacts. You can see\n# this in the top right corner of the visualization.\n\nimage = data.page()\nimage_inverted = util.invert(image)\n\nbackground_inverted = restoration.rolling_ball(image_inverted, radius=45)\nbackground = util.invert(background_inverted)\nunderflow_image = image - background # integer underflow occurs here\n\n# correct subtraction\ncorrect_image = util.invert(image_inverted - background_inverted)\n\nfig, ax = plt.subplots(nrows=1, ncols=2)\n\nax[0].imshow(underflow_image, cmap='gray')\nax[0].set_title('Background Removal with Underflow')\nax[0].axis('off')\n\nax[1].imshow(correct_image, cmap='gray')\nax[1].set_title('Correct Background Removal')\nax[1].axis('off')\n\nfig.tight_layout()\n\nplt.show()\n\n######################################################################\n# Image Datatypes\n# ---------------\n#\n# ``rolling_ball`` can handle datatypes other than `np.uint8`. You can\n# pass them into the function in the same way.\n\nimage = data.coins()[:200, :200].astype(np.uint16)\n\nbackground = restoration.rolling_ball(image, radius=70.5)\nplot_result(image, background)\nplt.show()\n\n######################################################################\n# However, you need to be careful if you use floating point images\n# that have been normalized to ``[0, 1]``. In this case the ball will\n# be much larger than the image intensity, which can lead to\n# unexpected results.\n\nimage = util.img_as_float(data.coins()[:200, :200])\n\nbackground = restoration.rolling_ball(image, radius=70.5)\nplot_result(image, background)\nplt.show()\n\n######################################################################\n# Because ``radius=70.5`` is much larger than the maximum intensity of\n# the image, the effective kernel size is reduced significantly, i.e.,\n# only a small cap (approximately ``radius=10``) of the ball is rolled\n# around in the image. You can find a reproduction of this strange\n# effect in the ``Advanced Shapes`` section below.\n#\n# To get the expected result, you need to reduce the intensity of the\n# kernel. This is done by specifying the kernel manually using the\n# ``kernel`` argument.\n#\n# Note: The radius is equal to the length of a semi-axis of an\n# ellipsis, which is *half* a full axis. Hence, the kernel shape is\n# multipled by two.\n\nnormalized_radius = 70.5 / 255\nimage = util.img_as_float(data.coins())\nkernel = restoration.ellipsoid_kernel(\n (70.5 * 2, 70.5 * 2),\n normalized_radius * 2\n)\n\nbackground = restoration.rolling_ball(\n image,\n kernel=kernel\n)\nplot_result(image, background)\nplt.show()\n\n######################################################################\n# Advanced Shapes\n# -----------------\n#\n# By default, ``rolling_ball`` uses a ball shaped kernel (surprise).\n# Sometimes, this can be too limiting - as in the example above -,\n# because the intensity dimension has a different scale compared to\n# the spatial dimensions, or because the image dimensions may have\n# different meanings - one could be a stack counter in an image stack.\n#\n# To account for this, ``rolling_ball`` has a ``kernel`` argument\n# which allows you to specify the kernel to be used. A kernel must\n# have the same dimensionality as the image (Note: dimensionality,\n# not shape). To help with it's creation, two default kernels are\n# provided by ``skimage``. ``ball_kernel`` specifies a ball shaped\n# kernel and is used as the default kernel. ``ellipsoid_kernel``\n# specifies an ellipsoid shaped kernel.\n\nimage = data.coins()\nkernel = restoration.ellipsoid_kernel(\n (70.5 * 2, 70.5 * 2),\n 70.5 * 2\n)\n\nbackground = restoration.rolling_ball(\n image,\n kernel=kernel\n)\nplot_result(image, background)\nplt.show()\n\n######################################################################\n# You can also use ``ellipsoid_kernel`` to recreate the previous,\n# unexpected result and see that the effective (spatial) filter size\n# was reduced.\n\nimage = data.coins()\n\nkernel = restoration.ellipsoid_kernel(\n (10 * 2, 10 * 2),\n 255 * 2\n)\n\nbackground = restoration.rolling_ball(\n image,\n kernel=kernel\n)\nplot_result(image, background)\nplt.show()\n\n######################################################################\n# Higher Dimensions\n# -----------------\n#\n# Another feature of ``rolling_ball`` is that you can directly\n# apply it to higher dimensional images, e.g., a z-stack of images\n# obtained during confocal microscopy. The number of kernel\n# dimensions must match the image dimensions, hence the kernel shape\n# is now 3 dimensional.\n\nimage = data.cells3d()[:, 1, ...]\nbackground = restoration.rolling_ball(\n image,\n kernel=restoration.ellipsoid_kernel(\n (1, 21, 21),\n 0.1\n )\n)\n\nplot_result(image[30, ...], background[30, ...])\nplt.show()\n\n######################################################################\n# A kernel size of 1 does not filter along this axis. In other words,\n# above filter is applied to each image in the stack individually.\n#\n# However, you can also filter along all 3 dimensions at the same\n# time by specifying a value other than 1.\n\nimage = data.cells3d()[:, 1, ...]\nbackground = restoration.rolling_ball(\n image,\n kernel=restoration.ellipsoid_kernel(\n (5, 21, 21),\n 0.1\n )\n)\n\nplot_result(image[30, ...], background[30, ...])\nplt.show()\n\n######################################################################\n# Another possibility is to filter individual pixels along the\n# planar axis (z-stack axis).\n\nimage = data.cells3d()[:, 1, ...]\nbackground = restoration.rolling_ball(\n image,\n kernel=restoration.ellipsoid_kernel(\n (100, 1, 1),\n 0.1\n )\n)\n\nplot_result(image[30, ...], background[30, ...])\nplt.show()\n", "path": "doc/examples/segmentation/plot_rolling_ball.py"}], "after_files": [{"content": "\"\"\"\n================================================================\nUse rolling-ball algorithm for estimating background intensity\n================================================================\n\nThe rolling-ball algorithm estimates the background intensity of a grayscale\nimage in case of uneven exposure. It is frequently used in biomedical\nimage processing and was first proposed by Stanley R. Sternberg in\n1983 [1]_.\n\nThe algorithm works as a filter and is quite intuitive. We think of the image\nas a surface that has unit-sized blocks stacked on top of each other in place\nof each pixel. The number of blocks, and hence surface height, is determined\nby the intensity of the pixel. To get the intensity of the background at a\ndesired (pixel) position, we imagine submerging a ball under the surface at the\ndesired position. Once it is completely covered by the blocks, the apex of\nthe ball determines the intensity of the background at that position. We can\nthen *roll* this ball around below the surface to get the background values for\nthe entire image.\n\nScikit-image implements a general version of this rolling-ball algorithm, which\nallows you to not just use balls, but arbitrary shapes as kernel and works on\nn-dimensional ndimages. This allows you to directly filter RGB images or filter\nimage stacks along any (or all) spacial dimensions.\n\n.. [1] Sternberg, Stanley R. \"Biomedical image processing.\" Computer 1 (1983):\n 22-34. :DOI:`10.1109/MC.1983.1654163`\n\n\nClassic rolling ball\n-------------------------------\n\nIn scikit-image, the rolling ball algorithm assumes that your background has\nlow intensity (black), whereas the features have high intensity (white). If\nthis is the case for your image, you can directly use the filter like so:\n\n\"\"\"\n\nimport imageio\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport pywt\n\nfrom skimage import (\n data, restoration, util\n)\n\n\ndef plot_result(image, background):\n fig, ax = plt.subplots(nrows=1, ncols=3)\n\n ax[0].imshow(image, cmap='gray')\n ax[0].set_title('Original image')\n ax[0].axis('off')\n\n ax[1].imshow(background, cmap='gray')\n ax[1].set_title('Background')\n ax[1].axis('off')\n\n ax[2].imshow(image - background, cmap='gray')\n ax[2].set_title('Result')\n ax[2].axis('off')\n\n fig.tight_layout()\n\n\nimage = data.coins()\n\nbackground = restoration.rolling_ball(image)\n\nplot_result(image, background)\nplt.show()\n\n######################################################################\n# White background\n# ----------------\n#\n# If you have dark features on a bright background, you need to invert\n# the image before you pass it into the algorithm, and then invert the\n# result. This can be accomplished via:\n\nimage = data.page()\nimage_inverted = util.invert(image)\n\nbackground_inverted = restoration.rolling_ball(image_inverted, radius=45)\nfiltered_image_inverted = image_inverted - background_inverted\nfiltered_image = util.invert(filtered_image_inverted)\nbackground = util.invert(background_inverted)\n\nfig, ax = plt.subplots(nrows=1, ncols=3)\n\nax[0].imshow(image, cmap='gray')\nax[0].set_title('Original image')\nax[0].axis('off')\n\nax[1].imshow(background, cmap='gray')\nax[1].set_title('Background')\nax[1].axis('off')\n\nax[2].imshow(filtered_image, cmap='gray')\nax[2].set_title('Result')\nax[2].axis('off')\n\nfig.tight_layout()\n\nplt.show()\n\n######################################################################\n# Be careful not to fall victim to an integer underflow when subtracting\n# a bright background. For example, this code looks correct, but may\n# suffer from an underflow leading to unwanted artifacts. You can see\n# this in the top right corner of the visualization.\n\nimage = data.page()\nimage_inverted = util.invert(image)\n\nbackground_inverted = restoration.rolling_ball(image_inverted, radius=45)\nbackground = util.invert(background_inverted)\nunderflow_image = image - background # integer underflow occurs here\n\n# correct subtraction\ncorrect_image = util.invert(image_inverted - background_inverted)\n\nfig, ax = plt.subplots(nrows=1, ncols=2)\n\nax[0].imshow(underflow_image, cmap='gray')\nax[0].set_title('Background Removal with Underflow')\nax[0].axis('off')\n\nax[1].imshow(correct_image, cmap='gray')\nax[1].set_title('Correct Background Removal')\nax[1].axis('off')\n\nfig.tight_layout()\n\nplt.show()\n\n######################################################################\n# Image Datatypes\n# ---------------\n#\n# ``rolling_ball`` can handle datatypes other than `np.uint8`. You can\n# pass them into the function in the same way.\n\nimage = data.coins()[:200, :200].astype(np.uint16)\n\nbackground = restoration.rolling_ball(image, radius=70.5)\nplot_result(image, background)\nplt.show()\n\n######################################################################\n# However, you need to be careful if you use floating point images\n# that have been normalized to ``[0, 1]``. In this case the ball will\n# be much larger than the image intensity, which can lead to\n# unexpected results.\n\nimage = util.img_as_float(data.coins()[:200, :200])\n\nbackground = restoration.rolling_ball(image, radius=70.5)\nplot_result(image, background)\nplt.show()\n\n######################################################################\n# Because ``radius=70.5`` is much larger than the maximum intensity of\n# the image, the effective kernel size is reduced significantly, i.e.,\n# only a small cap (approximately ``radius=10``) of the ball is rolled\n# around in the image. You can find a reproduction of this strange\n# effect in the ``Advanced Shapes`` section below.\n#\n# To get the expected result, you need to reduce the intensity of the\n# kernel. This is done by specifying the kernel manually using the\n# ``kernel`` argument.\n#\n# Note: The radius is equal to the length of a semi-axis of an\n# ellipsis, which is *half* a full axis. Hence, the kernel shape is\n# multipled by two.\n\nnormalized_radius = 70.5 / 255\nimage = util.img_as_float(data.coins())\nkernel = restoration.ellipsoid_kernel(\n (70.5 * 2, 70.5 * 2),\n normalized_radius * 2\n)\n\nbackground = restoration.rolling_ball(\n image,\n kernel=kernel\n)\nplot_result(image, background)\nplt.show()\n\n######################################################################\n# Advanced Shapes\n# -----------------\n#\n# By default, ``rolling_ball`` uses a ball shaped kernel (surprise).\n# Sometimes, this can be too limiting - as in the example above -,\n# because the intensity dimension has a different scale compared to\n# the spatial dimensions, or because the image dimensions may have\n# different meanings - one could be a stack counter in an image stack.\n#\n# To account for this, ``rolling_ball`` has a ``kernel`` argument\n# which allows you to specify the kernel to be used. A kernel must\n# have the same dimensionality as the image (Note: dimensionality,\n# not shape). To help with it's creation, two default kernels are\n# provided by ``skimage``. ``ball_kernel`` specifies a ball shaped\n# kernel and is used as the default kernel. ``ellipsoid_kernel``\n# specifies an ellipsoid shaped kernel.\n\nimage = data.coins()\nkernel = restoration.ellipsoid_kernel(\n (70.5 * 2, 70.5 * 2),\n 70.5 * 2\n)\n\nbackground = restoration.rolling_ball(\n image,\n kernel=kernel\n)\nplot_result(image, background)\nplt.show()\n\n######################################################################\n# You can also use ``ellipsoid_kernel`` to recreate the previous,\n# unexpected result and see that the effective (spatial) filter size\n# was reduced.\n\nimage = data.coins()\n\nkernel = restoration.ellipsoid_kernel(\n (10 * 2, 10 * 2),\n 255 * 2\n)\n\nbackground = restoration.rolling_ball(\n image,\n kernel=kernel\n)\nplot_result(image, background)\nplt.show()\n\n######################################################################\n# Higher Dimensions\n# -----------------\n#\n# Another feature of ``rolling_ball`` is that you can directly\n# apply it to higher dimensional images, e.g., a z-stack of images\n# obtained during confocal microscopy. The number of kernel\n# dimensions must match the image dimensions, hence the kernel shape\n# is now 3 dimensional.\n\nimage = data.cells3d()[:, 1, ...]\nbackground = restoration.rolling_ball(\n image,\n kernel=restoration.ellipsoid_kernel(\n (1, 21, 21),\n 0.1\n )\n)\n\nplot_result(image[30, ...], background[30, ...])\nplt.show()\n\n######################################################################\n# A kernel size of 1 does not filter along this axis. In other words,\n# above filter is applied to each image in the stack individually.\n#\n# However, you can also filter along all 3 dimensions at the same\n# time by specifying a value other than 1.\n\nimage = data.cells3d()[:, 1, ...]\nbackground = restoration.rolling_ball(\n image,\n kernel=restoration.ellipsoid_kernel(\n (5, 21, 21),\n 0.1\n )\n)\n\nplot_result(image[30, ...], background[30, ...])\nplt.show()\n\n######################################################################\n# Another possibility is to filter individual pixels along the\n# planar axis (z-stack axis).\n\nimage = data.cells3d()[:, 1, ...]\nbackground = restoration.rolling_ball(\n image,\n kernel=restoration.ellipsoid_kernel(\n (100, 1, 1),\n 0.1\n )\n)\n\nplot_result(image[30, ...], background[30, ...])\nplt.show()\n\n######################################################################\n# 1D Signal Filtering\n# -------------------\n#\n# As another example of the n-dimensional feature of\n# ``rolling_ball``, we show an implementation for 1D data. Here,\n# we are interested in removing the background signal of an ECG waveform\n# to detect prominent peaks (higher values than the local baseline).\n# Smoother peaks can be removed with smaller values of the radius.\n\nx = pywt.data.ecg()\nbackground = restoration.rolling_ball(x, radius=80)\nbackground2 = restoration.rolling_ball(x, radius=10)\nplt.figure()\nplt.plot(x, label='original')\nplt.plot(x - background, label='radius=80')\nplt.plot(x - background2, label='radius=10')\nplt.legend()\nplt.show()\n", "path": "doc/examples/segmentation/plot_rolling_ball.py"}]}
| 3,367 | 309 |
gh_patches_debug_67
|
rasdani/github-patches
|
git_diff
|
pytorch__text-87
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Length of iterator fails in Python 2
The division `len(dataset) / batch_size` will be cast to int in python2, so that `math.ceil` doesn't really work when `len(dataset)` is not a multiple of batch size.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `torchtext/data/iterator.py`
Content:
```
1 import math
2 import random
3 from contextlib import contextmanager
4 from copy import deepcopy
5
6 from .batch import Batch
7 from .dataset import Dataset
8
9
10 class RandomShuffler(object):
11 """Use random functions while keeping track of the random state to make it
12 reproducible and deterministic."""
13
14 def __init__(self, random_state=None):
15 self._random_state = random_state
16 if self._random_state is None:
17 self._random_state = random.getstate()
18
19 @contextmanager
20 def use_internal_state(self):
21 """Use a specific RNG state."""
22 old_state = random.getstate()
23 random.setstate(self._random_state)
24 yield
25 self._random_state = random.getstate()
26 random.setstate(old_state)
27
28 @property
29 def random_state(self):
30 return deepcopy(self._random_state)
31
32 @random_state.setter
33 def random_state(self, s):
34 self._random_state = s
35
36 def __call__(self, data):
37 """Shuffle and return a new list."""
38 with self.use_internal_state():
39 return random.sample(data, len(data))
40
41
42 class Iterator(object):
43 """Defines an iterator that loads batches of data from a Dataset.
44
45 Attributes:
46 dataset: The Dataset object to load Examples from.
47 batch_size: Batch size.
48 batch_size_fn: Function of three arguments (new example to add, current
49 count of examples in the batch, and current effective batch size)
50 that returns the new effective batch size resulting from adding
51 that example to a batch. This is useful for dynamic batching, where
52 this function would add to the current effective batch size the
53 number of tokens in the new example.
54 sort_key: A key to use for sorting examples in order to batch together
55 examples with similar lengths and minimize padding. The sort_key
56 provided to the Iterator constructor overrides the sort_key
57 attribute of the Dataset, or defers to it if None.
58 train: Whether the iterator represents a train set.
59 repeat: Whether to repeat the iterator for multiple epochs.
60 shuffle: Whether to shuffle examples between epochs.
61 sort: Whether to sort examples according to self.sort_key.
62 Note that repeat, shuffle, and sort default to train, train, and
63 (not train).
64 device: Device to create batches on. Use -1 for CPU and None for the
65 currently active GPU device.
66 """
67
68 def __init__(self, dataset, batch_size, sort_key=None, device=None,
69 batch_size_fn=lambda new, count, sofar: count, train=True,
70 repeat=None, shuffle=None, sort=None):
71 self.batch_size, self.train, self.dataset = batch_size, train, dataset
72 self.batch_size_fn = batch_size_fn
73 self.iterations = 0
74 self.repeat = train if repeat is None else repeat
75 self.shuffle = train if shuffle is None else shuffle
76 self.sort = not train if sort is None else sort
77 if sort_key is None:
78 self.sort_key = dataset.sort_key
79 else:
80 self.sort_key = sort_key
81 self.device = device
82
83 self.random_shuffler = RandomShuffler()
84
85 # For state loading/saving only
86 self._iterations_this_epoch = 0
87 self._random_state_this_epoch = None
88 self._restored_from_state = False
89
90 @classmethod
91 def splits(cls, datasets, batch_sizes=None, **kwargs):
92 """Create Iterator objects for multiple splits of a dataset.
93
94 Arguments:
95 datasets: Tuple of Dataset objects corresponding to the splits. The
96 first such object should be the train set.
97 batch_sizes: Tuple of batch sizes to use for the different splits,
98 or None to use the same batch_size for all splits.
99 Remaining keyword arguments: Passed to the constructor of the
100 iterator class being used.
101 """
102 if batch_sizes is None:
103 batch_sizes = [kwargs.pop('batch_size')] * len(datasets)
104 ret = []
105 for i in range(len(datasets)):
106 train = i == 0
107 ret.append(cls(
108 datasets[i], batch_size=batch_sizes[i], train=train, **kwargs))
109 return tuple(ret)
110
111 def data(self):
112 """Return the examples in the dataset in order, sorted, or shuffled."""
113 if self.sort:
114 xs = sorted(self.dataset, key=self.sort_key)
115 elif self.shuffle:
116 xs = [self.dataset[i] for i in self.random_shuffler(range(len(self.dataset)))]
117 else:
118 xs = self.dataset
119 return xs
120
121 def init_epoch(self):
122 """Set up the batch generator for a new epoch."""
123
124 if self._restored_from_state:
125 self.random_shuffler.random_state = self._random_state_this_epoch
126 else:
127 self._random_state_this_epoch = self.random_shuffler.random_state
128
129 self.create_batches()
130
131 if self._restored_from_state:
132 self._restored_from_state = False
133 else:
134 self._iterations_this_epoch = 0
135
136 if not self.repeat:
137 self.iterations = 0
138
139 def create_batches(self):
140 self.batches = batch(self.data(), self.batch_size, self.batch_size_fn)
141
142 @property
143 def epoch(self):
144 return self.iterations / len(self)
145
146 def __len__(self):
147 return math.ceil(len(self.dataset) / self.batch_size)
148
149 def __iter__(self):
150 while True:
151 self.init_epoch()
152 for idx, minibatch in enumerate(self.batches):
153 # fast-forward if loaded from state
154 if self._iterations_this_epoch > idx:
155 continue
156 self.iterations += 1
157 self._iterations_this_epoch += 1
158 yield Batch(minibatch, self.dataset, self.device,
159 self.train)
160 if not self.repeat:
161 raise StopIteration
162
163 def state_dict(self):
164 return {
165 "iterations": self.iterations,
166 "iterations_this_epoch": self._iterations_this_epoch,
167 "random_state_this_epoch": self._random_state_this_epoch}
168
169 def load_state_dict(self, state_dict):
170 self.iterations = state_dict["iterations"]
171 self._iterations_this_epoch = state_dict["iterations_this_epoch"]
172 self._random_state_this_epoch = state_dict["random_state_this_epoch"]
173 self._restored_from_state = True
174
175
176 class BPTTIterator(Iterator):
177 """Defines an iterator for language modeling tasks that use BPTT.
178
179 Provides contiguous streams of examples together with targets that are
180 one timestep further forward, for language modeling training with
181 backpropagation through time (BPTT). Expects a Dataset with a single
182 example and a single field called 'text' and produces Batches with text and
183 target attributes.
184
185 Attributes:
186 dataset: The Dataset object to load Examples from.
187 batch_size: Batch size.
188 bptt_len: Length of sequences for backpropagation through time.
189 sort_key: A key to use for sorting examples in order to batch together
190 examples with similar lengths and minimize padding. The sort_key
191 provided to the Iterator constructor overrides the sort_key
192 attribute of the Dataset, or defers to it if None.
193 train: Whether the iterator represents a train set.
194 repeat: Whether to repeat the iterator for multiple epochs.
195 shuffle: Whether to shuffle examples between epochs.
196 sort: Whether to sort examples according to self.sort_key.
197 Note that repeat, shuffle, and sort default to train, train, and
198 (not train).
199 device: Device to create batches on. Use -1 for CPU and None for the
200 currently active GPU device.
201 """
202
203 def __init__(self, dataset, batch_size, bptt_len, **kwargs):
204 self.bptt_len = bptt_len
205 super(BPTTIterator, self).__init__(dataset, batch_size, **kwargs)
206
207 def __len__(self):
208 return math.ceil(len(self.dataset[0].text) /
209 (self.batch_size * self.bptt_len))
210
211 def __iter__(self):
212 text = self.dataset[0].text
213 TEXT = self.dataset.fields['text']
214 TEXT.eos_token = None
215 text = text + ([TEXT.pad_token] * (math.ceil(len(text) / self.batch_size) *
216 self.batch_size - len(text)))
217 data = TEXT.numericalize(
218 [text], device=self.device, train=self.train)
219 data = data.view(self.batch_size, -1).t().contiguous()
220 dataset = Dataset(examples=self.dataset.examples, fields=[
221 ('text', TEXT), ('target', TEXT)])
222 while True:
223 for i in range(0, len(self) * self.bptt_len, self.bptt_len):
224 seq_len = min(self.bptt_len, len(data) - 1 - i)
225 yield Batch.fromvars(
226 dataset, self.batch_size, train=self.train,
227 text=data[i:i + seq_len],
228 target=data[i + 1:i + 1 + seq_len])
229 if not self.repeat:
230 raise StopIteration
231
232
233 class BucketIterator(Iterator):
234 """Defines an iterator that batches examples of similar lengths together.
235
236 Minimizes amount of padding needed while producing freshly shuffled
237 batches for each new epoch. See pool for the bucketing procedure used.
238 """
239
240 def create_batches(self):
241 if self.sort:
242 self.batches = batch(self.data(), self.batch_size,
243 self.batch_size_fn)
244 else:
245 self.batches = pool(self.data(), self.batch_size,
246 self.sort_key, self.batch_size_fn,
247 random_shuffler=self.random_shuffler)
248
249
250 def batch(data, batch_size, batch_size_fn=lambda new, count, sofar: count):
251 """Yield elements from data in chunks of batch_size."""
252 minibatch, size_so_far = [], 0
253 for ex in data:
254 minibatch.append(ex)
255 size_so_far = batch_size_fn(ex, len(minibatch), size_so_far)
256 if size_so_far == batch_size:
257 yield minibatch
258 minibatch, size_so_far = [], 0
259 elif size_so_far > batch_size:
260 yield minibatch[:-1]
261 minibatch, size_so_far = minibatch[-1:], batch_size_fn(ex, 1, 0)
262 if minibatch:
263 yield minibatch
264
265
266 def pool(data, batch_size, key, batch_size_fn=lambda new, count, sofar: count,
267 random_shuffler=None):
268 """Sort within buckets, then batch, then shuffle batches.
269
270 Partitions data into chunks of size 100*batch_size, sorts examples within
271 each chunk using sort_key, then batch these examples and shuffle the
272 batches.
273 """
274 if random_shuffler is None:
275 random_shuffler = random.shuffle
276 for p in batch(data, batch_size * 100, batch_size_fn):
277 p_batch = batch(sorted(p, key=key), batch_size, batch_size_fn)
278 for b in random_shuffler(list(p_batch)):
279 yield b
280
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/torchtext/data/iterator.py b/torchtext/data/iterator.py
--- a/torchtext/data/iterator.py
+++ b/torchtext/data/iterator.py
@@ -1,3 +1,5 @@
+from __future__ import division
+
import math
import random
from contextlib import contextmanager
|
{"golden_diff": "diff --git a/torchtext/data/iterator.py b/torchtext/data/iterator.py\n--- a/torchtext/data/iterator.py\n+++ b/torchtext/data/iterator.py\n@@ -1,3 +1,5 @@\n+from __future__ import division\n+\n import math\n import random\n from contextlib import contextmanager\n", "issue": "Length of iterator fails in Python 2\nThe division `len(dataset) / batch_size` will be cast to int in python2, so that `math.ceil` doesn't really work when `len(dataset)` is not a multiple of batch size.\n", "before_files": [{"content": "import math\nimport random\nfrom contextlib import contextmanager\nfrom copy import deepcopy\n\nfrom .batch import Batch\nfrom .dataset import Dataset\n\n\nclass RandomShuffler(object):\n \"\"\"Use random functions while keeping track of the random state to make it\n reproducible and deterministic.\"\"\"\n\n def __init__(self, random_state=None):\n self._random_state = random_state\n if self._random_state is None:\n self._random_state = random.getstate()\n\n @contextmanager\n def use_internal_state(self):\n \"\"\"Use a specific RNG state.\"\"\"\n old_state = random.getstate()\n random.setstate(self._random_state)\n yield\n self._random_state = random.getstate()\n random.setstate(old_state)\n\n @property\n def random_state(self):\n return deepcopy(self._random_state)\n\n @random_state.setter\n def random_state(self, s):\n self._random_state = s\n\n def __call__(self, data):\n \"\"\"Shuffle and return a new list.\"\"\"\n with self.use_internal_state():\n return random.sample(data, len(data))\n\n\nclass Iterator(object):\n \"\"\"Defines an iterator that loads batches of data from a Dataset.\n\n Attributes:\n dataset: The Dataset object to load Examples from.\n batch_size: Batch size.\n batch_size_fn: Function of three arguments (new example to add, current\n count of examples in the batch, and current effective batch size)\n that returns the new effective batch size resulting from adding\n that example to a batch. This is useful for dynamic batching, where\n this function would add to the current effective batch size the\n number of tokens in the new example.\n sort_key: A key to use for sorting examples in order to batch together\n examples with similar lengths and minimize padding. The sort_key\n provided to the Iterator constructor overrides the sort_key\n attribute of the Dataset, or defers to it if None.\n train: Whether the iterator represents a train set.\n repeat: Whether to repeat the iterator for multiple epochs.\n shuffle: Whether to shuffle examples between epochs.\n sort: Whether to sort examples according to self.sort_key.\n Note that repeat, shuffle, and sort default to train, train, and\n (not train).\n device: Device to create batches on. Use -1 for CPU and None for the\n currently active GPU device.\n \"\"\"\n\n def __init__(self, dataset, batch_size, sort_key=None, device=None,\n batch_size_fn=lambda new, count, sofar: count, train=True,\n repeat=None, shuffle=None, sort=None):\n self.batch_size, self.train, self.dataset = batch_size, train, dataset\n self.batch_size_fn = batch_size_fn\n self.iterations = 0\n self.repeat = train if repeat is None else repeat\n self.shuffle = train if shuffle is None else shuffle\n self.sort = not train if sort is None else sort\n if sort_key is None:\n self.sort_key = dataset.sort_key\n else:\n self.sort_key = sort_key\n self.device = device\n\n self.random_shuffler = RandomShuffler()\n\n # For state loading/saving only\n self._iterations_this_epoch = 0\n self._random_state_this_epoch = None\n self._restored_from_state = False\n\n @classmethod\n def splits(cls, datasets, batch_sizes=None, **kwargs):\n \"\"\"Create Iterator objects for multiple splits of a dataset.\n\n Arguments:\n datasets: Tuple of Dataset objects corresponding to the splits. The\n first such object should be the train set.\n batch_sizes: Tuple of batch sizes to use for the different splits,\n or None to use the same batch_size for all splits.\n Remaining keyword arguments: Passed to the constructor of the\n iterator class being used.\n \"\"\"\n if batch_sizes is None:\n batch_sizes = [kwargs.pop('batch_size')] * len(datasets)\n ret = []\n for i in range(len(datasets)):\n train = i == 0\n ret.append(cls(\n datasets[i], batch_size=batch_sizes[i], train=train, **kwargs))\n return tuple(ret)\n\n def data(self):\n \"\"\"Return the examples in the dataset in order, sorted, or shuffled.\"\"\"\n if self.sort:\n xs = sorted(self.dataset, key=self.sort_key)\n elif self.shuffle:\n xs = [self.dataset[i] for i in self.random_shuffler(range(len(self.dataset)))]\n else:\n xs = self.dataset\n return xs\n\n def init_epoch(self):\n \"\"\"Set up the batch generator for a new epoch.\"\"\"\n\n if self._restored_from_state:\n self.random_shuffler.random_state = self._random_state_this_epoch\n else:\n self._random_state_this_epoch = self.random_shuffler.random_state\n\n self.create_batches()\n\n if self._restored_from_state:\n self._restored_from_state = False\n else:\n self._iterations_this_epoch = 0\n\n if not self.repeat:\n self.iterations = 0\n\n def create_batches(self):\n self.batches = batch(self.data(), self.batch_size, self.batch_size_fn)\n\n @property\n def epoch(self):\n return self.iterations / len(self)\n\n def __len__(self):\n return math.ceil(len(self.dataset) / self.batch_size)\n\n def __iter__(self):\n while True:\n self.init_epoch()\n for idx, minibatch in enumerate(self.batches):\n # fast-forward if loaded from state\n if self._iterations_this_epoch > idx:\n continue\n self.iterations += 1\n self._iterations_this_epoch += 1\n yield Batch(minibatch, self.dataset, self.device,\n self.train)\n if not self.repeat:\n raise StopIteration\n\n def state_dict(self):\n return {\n \"iterations\": self.iterations,\n \"iterations_this_epoch\": self._iterations_this_epoch,\n \"random_state_this_epoch\": self._random_state_this_epoch}\n\n def load_state_dict(self, state_dict):\n self.iterations = state_dict[\"iterations\"]\n self._iterations_this_epoch = state_dict[\"iterations_this_epoch\"]\n self._random_state_this_epoch = state_dict[\"random_state_this_epoch\"]\n self._restored_from_state = True\n\n\nclass BPTTIterator(Iterator):\n \"\"\"Defines an iterator for language modeling tasks that use BPTT.\n\n Provides contiguous streams of examples together with targets that are\n one timestep further forward, for language modeling training with\n backpropagation through time (BPTT). Expects a Dataset with a single\n example and a single field called 'text' and produces Batches with text and\n target attributes.\n\n Attributes:\n dataset: The Dataset object to load Examples from.\n batch_size: Batch size.\n bptt_len: Length of sequences for backpropagation through time.\n sort_key: A key to use for sorting examples in order to batch together\n examples with similar lengths and minimize padding. The sort_key\n provided to the Iterator constructor overrides the sort_key\n attribute of the Dataset, or defers to it if None.\n train: Whether the iterator represents a train set.\n repeat: Whether to repeat the iterator for multiple epochs.\n shuffle: Whether to shuffle examples between epochs.\n sort: Whether to sort examples according to self.sort_key.\n Note that repeat, shuffle, and sort default to train, train, and\n (not train).\n device: Device to create batches on. Use -1 for CPU and None for the\n currently active GPU device.\n \"\"\"\n\n def __init__(self, dataset, batch_size, bptt_len, **kwargs):\n self.bptt_len = bptt_len\n super(BPTTIterator, self).__init__(dataset, batch_size, **kwargs)\n\n def __len__(self):\n return math.ceil(len(self.dataset[0].text) /\n (self.batch_size * self.bptt_len))\n\n def __iter__(self):\n text = self.dataset[0].text\n TEXT = self.dataset.fields['text']\n TEXT.eos_token = None\n text = text + ([TEXT.pad_token] * (math.ceil(len(text) / self.batch_size) *\n self.batch_size - len(text)))\n data = TEXT.numericalize(\n [text], device=self.device, train=self.train)\n data = data.view(self.batch_size, -1).t().contiguous()\n dataset = Dataset(examples=self.dataset.examples, fields=[\n ('text', TEXT), ('target', TEXT)])\n while True:\n for i in range(0, len(self) * self.bptt_len, self.bptt_len):\n seq_len = min(self.bptt_len, len(data) - 1 - i)\n yield Batch.fromvars(\n dataset, self.batch_size, train=self.train,\n text=data[i:i + seq_len],\n target=data[i + 1:i + 1 + seq_len])\n if not self.repeat:\n raise StopIteration\n\n\nclass BucketIterator(Iterator):\n \"\"\"Defines an iterator that batches examples of similar lengths together.\n\n Minimizes amount of padding needed while producing freshly shuffled\n batches for each new epoch. See pool for the bucketing procedure used.\n \"\"\"\n\n def create_batches(self):\n if self.sort:\n self.batches = batch(self.data(), self.batch_size,\n self.batch_size_fn)\n else:\n self.batches = pool(self.data(), self.batch_size,\n self.sort_key, self.batch_size_fn,\n random_shuffler=self.random_shuffler)\n\n\ndef batch(data, batch_size, batch_size_fn=lambda new, count, sofar: count):\n \"\"\"Yield elements from data in chunks of batch_size.\"\"\"\n minibatch, size_so_far = [], 0\n for ex in data:\n minibatch.append(ex)\n size_so_far = batch_size_fn(ex, len(minibatch), size_so_far)\n if size_so_far == batch_size:\n yield minibatch\n minibatch, size_so_far = [], 0\n elif size_so_far > batch_size:\n yield minibatch[:-1]\n minibatch, size_so_far = minibatch[-1:], batch_size_fn(ex, 1, 0)\n if minibatch:\n yield minibatch\n\n\ndef pool(data, batch_size, key, batch_size_fn=lambda new, count, sofar: count,\n random_shuffler=None):\n \"\"\"Sort within buckets, then batch, then shuffle batches.\n\n Partitions data into chunks of size 100*batch_size, sorts examples within\n each chunk using sort_key, then batch these examples and shuffle the\n batches.\n \"\"\"\n if random_shuffler is None:\n random_shuffler = random.shuffle\n for p in batch(data, batch_size * 100, batch_size_fn):\n p_batch = batch(sorted(p, key=key), batch_size, batch_size_fn)\n for b in random_shuffler(list(p_batch)):\n yield b\n", "path": "torchtext/data/iterator.py"}], "after_files": [{"content": "from __future__ import division\n\nimport math\nimport random\nfrom contextlib import contextmanager\nfrom copy import deepcopy\n\nfrom .batch import Batch\nfrom .dataset import Dataset\n\n\nclass RandomShuffler(object):\n \"\"\"Use random functions while keeping track of the random state to make it\n reproducible and deterministic.\"\"\"\n\n def __init__(self, random_state=None):\n self._random_state = random_state\n if self._random_state is None:\n self._random_state = random.getstate()\n\n @contextmanager\n def use_internal_state(self):\n \"\"\"Use a specific RNG state.\"\"\"\n old_state = random.getstate()\n random.setstate(self._random_state)\n yield\n self._random_state = random.getstate()\n random.setstate(old_state)\n\n @property\n def random_state(self):\n return deepcopy(self._random_state)\n\n @random_state.setter\n def random_state(self, s):\n self._random_state = s\n\n def __call__(self, data):\n \"\"\"Shuffle and return a new list.\"\"\"\n with self.use_internal_state():\n return random.sample(data, len(data))\n\n\nclass Iterator(object):\n \"\"\"Defines an iterator that loads batches of data from a Dataset.\n\n Attributes:\n dataset: The Dataset object to load Examples from.\n batch_size: Batch size.\n batch_size_fn: Function of three arguments (new example to add, current\n count of examples in the batch, and current effective batch size)\n that returns the new effective batch size resulting from adding\n that example to a batch. This is useful for dynamic batching, where\n this function would add to the current effective batch size the\n number of tokens in the new example.\n sort_key: A key to use for sorting examples in order to batch together\n examples with similar lengths and minimize padding. The sort_key\n provided to the Iterator constructor overrides the sort_key\n attribute of the Dataset, or defers to it if None.\n train: Whether the iterator represents a train set.\n repeat: Whether to repeat the iterator for multiple epochs.\n shuffle: Whether to shuffle examples between epochs.\n sort: Whether to sort examples according to self.sort_key.\n Note that repeat, shuffle, and sort default to train, train, and\n (not train).\n device: Device to create batches on. Use -1 for CPU and None for the\n currently active GPU device.\n \"\"\"\n\n def __init__(self, dataset, batch_size, sort_key=None, device=None,\n batch_size_fn=lambda new, count, sofar: count, train=True,\n repeat=None, shuffle=None, sort=None):\n self.batch_size, self.train, self.dataset = batch_size, train, dataset\n self.batch_size_fn = batch_size_fn\n self.iterations = 0\n self.repeat = train if repeat is None else repeat\n self.shuffle = train if shuffle is None else shuffle\n self.sort = not train if sort is None else sort\n if sort_key is None:\n self.sort_key = dataset.sort_key\n else:\n self.sort_key = sort_key\n self.device = device\n\n self.random_shuffler = RandomShuffler()\n\n # For state loading/saving only\n self._iterations_this_epoch = 0\n self._random_state_this_epoch = None\n self._restored_from_state = False\n\n @classmethod\n def splits(cls, datasets, batch_sizes=None, **kwargs):\n \"\"\"Create Iterator objects for multiple splits of a dataset.\n\n Arguments:\n datasets: Tuple of Dataset objects corresponding to the splits. The\n first such object should be the train set.\n batch_sizes: Tuple of batch sizes to use for the different splits,\n or None to use the same batch_size for all splits.\n Remaining keyword arguments: Passed to the constructor of the\n iterator class being used.\n \"\"\"\n if batch_sizes is None:\n batch_sizes = [kwargs.pop('batch_size')] * len(datasets)\n ret = []\n for i in range(len(datasets)):\n train = i == 0\n ret.append(cls(\n datasets[i], batch_size=batch_sizes[i], train=train, **kwargs))\n return tuple(ret)\n\n def data(self):\n \"\"\"Return the examples in the dataset in order, sorted, or shuffled.\"\"\"\n if self.sort:\n xs = sorted(self.dataset, key=self.sort_key)\n elif self.shuffle:\n xs = [self.dataset[i] for i in self.random_shuffler(range(len(self.dataset)))]\n else:\n xs = self.dataset\n return xs\n\n def init_epoch(self):\n \"\"\"Set up the batch generator for a new epoch.\"\"\"\n\n if self._restored_from_state:\n self.random_shuffler.random_state = self._random_state_this_epoch\n else:\n self._random_state_this_epoch = self.random_shuffler.random_state\n\n self.create_batches()\n\n if self._restored_from_state:\n self._restored_from_state = False\n else:\n self._iterations_this_epoch = 0\n\n if not self.repeat:\n self.iterations = 0\n\n def create_batches(self):\n self.batches = batch(self.data(), self.batch_size, self.batch_size_fn)\n\n @property\n def epoch(self):\n return self.iterations / len(self)\n\n def __len__(self):\n return math.ceil(len(self.dataset) / self.batch_size)\n\n def __iter__(self):\n while True:\n self.init_epoch()\n for idx, minibatch in enumerate(self.batches):\n # fast-forward if loaded from state\n if self._iterations_this_epoch > idx:\n continue\n self.iterations += 1\n self._iterations_this_epoch += 1\n yield Batch(minibatch, self.dataset, self.device,\n self.train)\n if not self.repeat:\n raise StopIteration\n\n def state_dict(self):\n return {\n \"iterations\": self.iterations,\n \"iterations_this_epoch\": self._iterations_this_epoch,\n \"random_state_this_epoch\": self._random_state_this_epoch}\n\n def load_state_dict(self, state_dict):\n self.iterations = state_dict[\"iterations\"]\n self._iterations_this_epoch = state_dict[\"iterations_this_epoch\"]\n self._random_state_this_epoch = state_dict[\"random_state_this_epoch\"]\n self._restored_from_state = True\n\n\nclass BPTTIterator(Iterator):\n \"\"\"Defines an iterator for language modeling tasks that use BPTT.\n\n Provides contiguous streams of examples together with targets that are\n one timestep further forward, for language modeling training with\n backpropagation through time (BPTT). Expects a Dataset with a single\n example and a single field called 'text' and produces Batches with text and\n target attributes.\n\n Attributes:\n dataset: The Dataset object to load Examples from.\n batch_size: Batch size.\n bptt_len: Length of sequences for backpropagation through time.\n sort_key: A key to use for sorting examples in order to batch together\n examples with similar lengths and minimize padding. The sort_key\n provided to the Iterator constructor overrides the sort_key\n attribute of the Dataset, or defers to it if None.\n train: Whether the iterator represents a train set.\n repeat: Whether to repeat the iterator for multiple epochs.\n shuffle: Whether to shuffle examples between epochs.\n sort: Whether to sort examples according to self.sort_key.\n Note that repeat, shuffle, and sort default to train, train, and\n (not train).\n device: Device to create batches on. Use -1 for CPU and None for the\n currently active GPU device.\n \"\"\"\n\n def __init__(self, dataset, batch_size, bptt_len, **kwargs):\n self.bptt_len = bptt_len\n super(BPTTIterator, self).__init__(dataset, batch_size, **kwargs)\n\n def __len__(self):\n return math.ceil(len(self.dataset[0].text) /\n (self.batch_size * self.bptt_len))\n\n def __iter__(self):\n text = self.dataset[0].text\n TEXT = self.dataset.fields['text']\n TEXT.eos_token = None\n text = text + ([TEXT.pad_token] * (math.ceil(len(text) / self.batch_size) *\n self.batch_size - len(text)))\n data = TEXT.numericalize(\n [text], device=self.device, train=self.train)\n data = data.view(self.batch_size, -1).t().contiguous()\n dataset = Dataset(examples=self.dataset.examples, fields=[\n ('text', TEXT), ('target', TEXT)])\n while True:\n for i in range(0, len(self) * self.bptt_len, self.bptt_len):\n seq_len = min(self.bptt_len, len(data) - 1 - i)\n yield Batch.fromvars(\n dataset, self.batch_size, train=self.train,\n text=data[i:i + seq_len],\n target=data[i + 1:i + 1 + seq_len])\n if not self.repeat:\n raise StopIteration\n\n\nclass BucketIterator(Iterator):\n \"\"\"Defines an iterator that batches examples of similar lengths together.\n\n Minimizes amount of padding needed while producing freshly shuffled\n batches for each new epoch. See pool for the bucketing procedure used.\n \"\"\"\n\n def create_batches(self):\n if self.sort:\n self.batches = batch(self.data(), self.batch_size,\n self.batch_size_fn)\n else:\n self.batches = pool(self.data(), self.batch_size,\n self.sort_key, self.batch_size_fn,\n random_shuffler=self.random_shuffler)\n\n\ndef batch(data, batch_size, batch_size_fn=lambda new, count, sofar: count):\n \"\"\"Yield elements from data in chunks of batch_size.\"\"\"\n minibatch, size_so_far = [], 0\n for ex in data:\n minibatch.append(ex)\n size_so_far = batch_size_fn(ex, len(minibatch), size_so_far)\n if size_so_far == batch_size:\n yield minibatch\n minibatch, size_so_far = [], 0\n elif size_so_far > batch_size:\n yield minibatch[:-1]\n minibatch, size_so_far = minibatch[-1:], batch_size_fn(ex, 1, 0)\n if minibatch:\n yield minibatch\n\n\ndef pool(data, batch_size, key, batch_size_fn=lambda new, count, sofar: count,\n random_shuffler=None):\n \"\"\"Sort within buckets, then batch, then shuffle batches.\n\n Partitions data into chunks of size 100*batch_size, sorts examples within\n each chunk using sort_key, then batch these examples and shuffle the\n batches.\n \"\"\"\n if random_shuffler is None:\n random_shuffler = random.shuffle\n for p in batch(data, batch_size * 100, batch_size_fn):\n p_batch = batch(sorted(p, key=key), batch_size, batch_size_fn)\n for b in random_shuffler(list(p_batch)):\n yield b\n", "path": "torchtext/data/iterator.py"}]}
| 3,423 | 72 |
gh_patches_debug_20678
|
rasdani/github-patches
|
git_diff
|
freqtrade__freqtrade-7571
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Allow freqai to pull prediction_models from user_data
Currently, only classes present in `freqai/prediction_models` are available for backtesting/trading.
Allowing the user to define a custom model to be used with `--freqaimodel` would allow more flexibility.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `freqtrade/configuration/directory_operations.py`
Content:
```
1 import logging
2 import shutil
3 from pathlib import Path
4 from typing import Optional
5
6 from freqtrade.constants import USER_DATA_FILES, Config
7 from freqtrade.exceptions import OperationalException
8
9
10 logger = logging.getLogger(__name__)
11
12
13 def create_datadir(config: Config, datadir: Optional[str] = None) -> Path:
14
15 folder = Path(datadir) if datadir else Path(f"{config['user_data_dir']}/data")
16 if not datadir:
17 # set datadir
18 exchange_name = config.get('exchange', {}).get('name', '').lower()
19 folder = folder.joinpath(exchange_name)
20
21 if not folder.is_dir():
22 folder.mkdir(parents=True)
23 logger.info(f'Created data directory: {datadir}')
24 return folder
25
26
27 def chown_user_directory(directory: Path) -> None:
28 """
29 Use Sudo to change permissions of the home-directory if necessary
30 Only applies when running in docker!
31 """
32 import os
33 if os.environ.get('FT_APP_ENV') == 'docker':
34 try:
35 import subprocess
36 subprocess.check_output(
37 ['sudo', 'chown', '-R', 'ftuser:', str(directory.resolve())])
38 except Exception:
39 logger.warning(f"Could not chown {directory}")
40
41
42 def create_userdata_dir(directory: str, create_dir: bool = False) -> Path:
43 """
44 Create userdata directory structure.
45 if create_dir is True, then the parent-directory will be created if it does not exist.
46 Sub-directories will always be created if the parent directory exists.
47 Raises OperationalException if given a non-existing directory.
48 :param directory: Directory to check
49 :param create_dir: Create directory if it does not exist.
50 :return: Path object containing the directory
51 """
52 sub_dirs = ["backtest_results", "data", "hyperopts", "hyperopt_results", "logs",
53 "notebooks", "plot", "strategies", ]
54 folder = Path(directory)
55 chown_user_directory(folder)
56 if not folder.is_dir():
57 if create_dir:
58 folder.mkdir(parents=True)
59 logger.info(f'Created user-data directory: {folder}')
60 else:
61 raise OperationalException(
62 f"Directory `{folder}` does not exist. "
63 "Please use `freqtrade create-userdir` to create a user directory")
64
65 # Create required subdirectories
66 for f in sub_dirs:
67 subfolder = folder / f
68 if not subfolder.is_dir():
69 subfolder.mkdir(parents=False)
70 return folder
71
72
73 def copy_sample_files(directory: Path, overwrite: bool = False) -> None:
74 """
75 Copy files from templates to User data directory.
76 :param directory: Directory to copy data to
77 :param overwrite: Overwrite existing sample files
78 """
79 if not directory.is_dir():
80 raise OperationalException(f"Directory `{directory}` does not exist.")
81 sourcedir = Path(__file__).parents[1] / "templates"
82 for source, target in USER_DATA_FILES.items():
83 targetdir = directory / target
84 if not targetdir.is_dir():
85 raise OperationalException(f"Directory `{targetdir}` does not exist.")
86 targetfile = targetdir / source
87 if targetfile.exists():
88 if not overwrite:
89 logger.warning(f"File `{targetfile}` exists already, not deploying sample file.")
90 continue
91 logger.warning(f"File `{targetfile}` exists already, overwriting.")
92 shutil.copy(str(sourcedir / source), str(targetfile))
93
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/freqtrade/configuration/directory_operations.py b/freqtrade/configuration/directory_operations.py
--- a/freqtrade/configuration/directory_operations.py
+++ b/freqtrade/configuration/directory_operations.py
@@ -3,7 +3,8 @@
from pathlib import Path
from typing import Optional
-from freqtrade.constants import USER_DATA_FILES, Config
+from freqtrade.constants import (USER_DATA_FILES, USERPATH_FREQAIMODELS, USERPATH_HYPEROPTS,
+ USERPATH_NOTEBOOKS, USERPATH_STRATEGIES, Config)
from freqtrade.exceptions import OperationalException
@@ -49,8 +50,8 @@
:param create_dir: Create directory if it does not exist.
:return: Path object containing the directory
"""
- sub_dirs = ["backtest_results", "data", "hyperopts", "hyperopt_results", "logs",
- "notebooks", "plot", "strategies", ]
+ sub_dirs = ["backtest_results", "data", USERPATH_HYPEROPTS, "hyperopt_results", "logs",
+ USERPATH_NOTEBOOKS, "plot", USERPATH_STRATEGIES, USERPATH_FREQAIMODELS]
folder = Path(directory)
chown_user_directory(folder)
if not folder.is_dir():
|
{"golden_diff": "diff --git a/freqtrade/configuration/directory_operations.py b/freqtrade/configuration/directory_operations.py\n--- a/freqtrade/configuration/directory_operations.py\n+++ b/freqtrade/configuration/directory_operations.py\n@@ -3,7 +3,8 @@\n from pathlib import Path\n from typing import Optional\n \n-from freqtrade.constants import USER_DATA_FILES, Config\n+from freqtrade.constants import (USER_DATA_FILES, USERPATH_FREQAIMODELS, USERPATH_HYPEROPTS,\n+ USERPATH_NOTEBOOKS, USERPATH_STRATEGIES, Config)\n from freqtrade.exceptions import OperationalException\n \n \n@@ -49,8 +50,8 @@\n :param create_dir: Create directory if it does not exist.\n :return: Path object containing the directory\n \"\"\"\n- sub_dirs = [\"backtest_results\", \"data\", \"hyperopts\", \"hyperopt_results\", \"logs\",\n- \"notebooks\", \"plot\", \"strategies\", ]\n+ sub_dirs = [\"backtest_results\", \"data\", USERPATH_HYPEROPTS, \"hyperopt_results\", \"logs\",\n+ USERPATH_NOTEBOOKS, \"plot\", USERPATH_STRATEGIES, USERPATH_FREQAIMODELS]\n folder = Path(directory)\n chown_user_directory(folder)\n if not folder.is_dir():\n", "issue": "Allow freqai to pull prediction_models from user_data\nCurrently, only classes present in `freqai/prediction_models` are available for backtesting/trading.\r\nAllowing the user to define a custom model to be used with `--freqaimodel` would allow more flexibility.\n", "before_files": [{"content": "import logging\nimport shutil\nfrom pathlib import Path\nfrom typing import Optional\n\nfrom freqtrade.constants import USER_DATA_FILES, Config\nfrom freqtrade.exceptions import OperationalException\n\n\nlogger = logging.getLogger(__name__)\n\n\ndef create_datadir(config: Config, datadir: Optional[str] = None) -> Path:\n\n folder = Path(datadir) if datadir else Path(f\"{config['user_data_dir']}/data\")\n if not datadir:\n # set datadir\n exchange_name = config.get('exchange', {}).get('name', '').lower()\n folder = folder.joinpath(exchange_name)\n\n if not folder.is_dir():\n folder.mkdir(parents=True)\n logger.info(f'Created data directory: {datadir}')\n return folder\n\n\ndef chown_user_directory(directory: Path) -> None:\n \"\"\"\n Use Sudo to change permissions of the home-directory if necessary\n Only applies when running in docker!\n \"\"\"\n import os\n if os.environ.get('FT_APP_ENV') == 'docker':\n try:\n import subprocess\n subprocess.check_output(\n ['sudo', 'chown', '-R', 'ftuser:', str(directory.resolve())])\n except Exception:\n logger.warning(f\"Could not chown {directory}\")\n\n\ndef create_userdata_dir(directory: str, create_dir: bool = False) -> Path:\n \"\"\"\n Create userdata directory structure.\n if create_dir is True, then the parent-directory will be created if it does not exist.\n Sub-directories will always be created if the parent directory exists.\n Raises OperationalException if given a non-existing directory.\n :param directory: Directory to check\n :param create_dir: Create directory if it does not exist.\n :return: Path object containing the directory\n \"\"\"\n sub_dirs = [\"backtest_results\", \"data\", \"hyperopts\", \"hyperopt_results\", \"logs\",\n \"notebooks\", \"plot\", \"strategies\", ]\n folder = Path(directory)\n chown_user_directory(folder)\n if not folder.is_dir():\n if create_dir:\n folder.mkdir(parents=True)\n logger.info(f'Created user-data directory: {folder}')\n else:\n raise OperationalException(\n f\"Directory `{folder}` does not exist. \"\n \"Please use `freqtrade create-userdir` to create a user directory\")\n\n # Create required subdirectories\n for f in sub_dirs:\n subfolder = folder / f\n if not subfolder.is_dir():\n subfolder.mkdir(parents=False)\n return folder\n\n\ndef copy_sample_files(directory: Path, overwrite: bool = False) -> None:\n \"\"\"\n Copy files from templates to User data directory.\n :param directory: Directory to copy data to\n :param overwrite: Overwrite existing sample files\n \"\"\"\n if not directory.is_dir():\n raise OperationalException(f\"Directory `{directory}` does not exist.\")\n sourcedir = Path(__file__).parents[1] / \"templates\"\n for source, target in USER_DATA_FILES.items():\n targetdir = directory / target\n if not targetdir.is_dir():\n raise OperationalException(f\"Directory `{targetdir}` does not exist.\")\n targetfile = targetdir / source\n if targetfile.exists():\n if not overwrite:\n logger.warning(f\"File `{targetfile}` exists already, not deploying sample file.\")\n continue\n logger.warning(f\"File `{targetfile}` exists already, overwriting.\")\n shutil.copy(str(sourcedir / source), str(targetfile))\n", "path": "freqtrade/configuration/directory_operations.py"}], "after_files": [{"content": "import logging\nimport shutil\nfrom pathlib import Path\nfrom typing import Optional\n\nfrom freqtrade.constants import (USER_DATA_FILES, USERPATH_FREQAIMODELS, USERPATH_HYPEROPTS,\n USERPATH_NOTEBOOKS, USERPATH_STRATEGIES, Config)\nfrom freqtrade.exceptions import OperationalException\n\n\nlogger = logging.getLogger(__name__)\n\n\ndef create_datadir(config: Config, datadir: Optional[str] = None) -> Path:\n\n folder = Path(datadir) if datadir else Path(f\"{config['user_data_dir']}/data\")\n if not datadir:\n # set datadir\n exchange_name = config.get('exchange', {}).get('name', '').lower()\n folder = folder.joinpath(exchange_name)\n\n if not folder.is_dir():\n folder.mkdir(parents=True)\n logger.info(f'Created data directory: {datadir}')\n return folder\n\n\ndef chown_user_directory(directory: Path) -> None:\n \"\"\"\n Use Sudo to change permissions of the home-directory if necessary\n Only applies when running in docker!\n \"\"\"\n import os\n if os.environ.get('FT_APP_ENV') == 'docker':\n try:\n import subprocess\n subprocess.check_output(\n ['sudo', 'chown', '-R', 'ftuser:', str(directory.resolve())])\n except Exception:\n logger.warning(f\"Could not chown {directory}\")\n\n\ndef create_userdata_dir(directory: str, create_dir: bool = False) -> Path:\n \"\"\"\n Create userdata directory structure.\n if create_dir is True, then the parent-directory will be created if it does not exist.\n Sub-directories will always be created if the parent directory exists.\n Raises OperationalException if given a non-existing directory.\n :param directory: Directory to check\n :param create_dir: Create directory if it does not exist.\n :return: Path object containing the directory\n \"\"\"\n sub_dirs = [\"backtest_results\", \"data\", USERPATH_HYPEROPTS, \"hyperopt_results\", \"logs\",\n USERPATH_NOTEBOOKS, \"plot\", USERPATH_STRATEGIES, USERPATH_FREQAIMODELS]\n folder = Path(directory)\n chown_user_directory(folder)\n if not folder.is_dir():\n if create_dir:\n folder.mkdir(parents=True)\n logger.info(f'Created user-data directory: {folder}')\n else:\n raise OperationalException(\n f\"Directory `{folder}` does not exist. \"\n \"Please use `freqtrade create-userdir` to create a user directory\")\n\n # Create required subdirectories\n for f in sub_dirs:\n subfolder = folder / f\n if not subfolder.is_dir():\n subfolder.mkdir(parents=False)\n return folder\n\n\ndef copy_sample_files(directory: Path, overwrite: bool = False) -> None:\n \"\"\"\n Copy files from templates to User data directory.\n :param directory: Directory to copy data to\n :param overwrite: Overwrite existing sample files\n \"\"\"\n if not directory.is_dir():\n raise OperationalException(f\"Directory `{directory}` does not exist.\")\n sourcedir = Path(__file__).parents[1] / \"templates\"\n for source, target in USER_DATA_FILES.items():\n targetdir = directory / target\n if not targetdir.is_dir():\n raise OperationalException(f\"Directory `{targetdir}` does not exist.\")\n targetfile = targetdir / source\n if targetfile.exists():\n if not overwrite:\n logger.warning(f\"File `{targetfile}` exists already, not deploying sample file.\")\n continue\n logger.warning(f\"File `{targetfile}` exists already, overwriting.\")\n shutil.copy(str(sourcedir / source), str(targetfile))\n", "path": "freqtrade/configuration/directory_operations.py"}]}
| 1,238 | 276 |
gh_patches_debug_1592
|
rasdani/github-patches
|
git_diff
|
translate__translate-4805
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Outdated footer on the documentation website
The copyright notice in the docs webpage still state 2022 instead of 2023.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs/conf.py`
Content:
```
1 # Configuration file for the Sphinx documentation builder.
2 #
3 # This file only contains a selection of the most common options. For a full
4 # list see the documentation:
5 # https://www.sphinx-doc.org/en/master/usage/configuration.html
6
7 # -- Path setup --------------------------------------------------------------
8
9 # If extensions (or modules to document with autodoc) are in another directory,
10 # add these directories to sys.path here. If the directory is relative to the
11 # documentation root, use os.path.abspath to make it absolute, like shown here.
12 #
13 import os
14 import sys
15
16
17 sys.path.insert(0, os.path.abspath("_ext"))
18 sys.path.insert(0, os.path.abspath("."))
19 sys.path.insert(0, os.path.abspath(".."))
20
21 # -- Project information -----------------------------------------------------
22
23 project = "Translate Toolkit"
24 copyright = "2002-2022, Translate"
25
26 # The short X.Y version.
27 version = "3.8.1"
28
29 # The full version, including alpha/beta/rc tags
30 release = version
31
32 # -- General configuration ---------------------------------------------------
33
34 # Add any Sphinx extension module names here, as strings. They can be
35 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
36 # ones.
37 extensions = [
38 "translate_docs",
39 "sphinx.ext.autodoc",
40 "sphinx.ext.coverage",
41 "sphinx.ext.extlinks",
42 "sphinx.ext.intersphinx",
43 "sphinx.ext.todo",
44 ]
45
46 # Add any paths that contain templates here, relative to this directory.
47 templates_path = ["_templates"]
48
49 # List of patterns, relative to source directory, that match files and
50 # directories to ignore when looking for source files.
51 # This pattern also affects html_static_path and html_extra_path.
52 exclude_patterns = ["_build", "_themes/README.rst", "releases/README.rst"]
53
54 # The master toctree document.
55 master_doc = "index"
56
57 # -- Missing modules --------------------------------------------------
58
59 autodoc_mock_imports = [
60 "aeidon",
61 "BeautifulSoup",
62 "glib",
63 "gobject",
64 "gtk",
65 "iniparse",
66 "vobject",
67 ]
68
69 # -- Options for HTML output -------------------------------------------------
70
71 # The theme to use for HTML and HTML Help pages. See the documentation for
72 # a list of builtin themes.
73 #
74 html_theme = "sphinx-bootstrap"
75
76 # Theme options are theme-specific and customize the look and feel of a theme
77 # further. For a list of options available for each theme, see the
78 # documentation.
79 html_theme_options = {
80 "nosidebar": True,
81 }
82
83 # Add any paths that contain custom themes here, relative to this directory.
84 html_theme_path = ["_themes"]
85
86 # Add any paths that contain custom static files (such as style sheets) here,
87 # relative to this directory. They are copied after the builtin static files,
88 # so a file named "default.css" will overwrite the builtin "default.css".
89 html_static_path = ["_static"]
90
91 # Output file base name for HTML help builder.
92 htmlhelp_basename = "TranslateToolkitdoc"
93
94
95 # -- Options for LaTeX output -------------------------------------------------
96
97 latex_elements = {
98 # The paper size ('letterpaper' or 'a4paper').
99 #'papersize': 'letterpaper',
100 # The font size ('10pt', '11pt' or '12pt').
101 #'pointsize': '10pt',
102 # Additional stuff for the LaTeX preamble.
103 #'preamble': '',
104 }
105
106 # Grouping the document tree into LaTeX files. List of tuples
107 # (source start file, target name, title, author, documentclass [howto/manual])
108 latex_documents = [
109 (
110 "index",
111 "TranslateToolkit.tex",
112 "Translate Toolkit Documentation",
113 "Translate.org.za",
114 "manual",
115 ),
116 ]
117
118 # The name of an image file (relative to this directory) to place at the top of
119 # the title page.
120 # latex_logo = None
121
122 # For "manual" documents, if this is true, then toplevel headings are parts,
123 # not chapters.
124 # latex_use_parts = False
125
126 # If true, show page references after internal links.
127 # latex_show_pagerefs = False
128
129 # If true, show URL addresses after external links.
130 # latex_show_urls = False
131
132 # Documents to append as an appendix to all manuals.
133 # latex_appendices = []
134
135 # If false, no module index is generated.
136 # latex_domain_indices = True
137
138
139 # -- Options for manual page output -------------------------------------------
140
141 # One entry per manual page. List of tuples
142 # (source start file, name, description, authors, manual section).
143 man_pages = [
144 (
145 "index",
146 "translatetoolkit",
147 "Translate Toolkit Documentation",
148 ["Translate.org.za"],
149 1,
150 )
151 ]
152
153 # If true, show URL addresses after external links.
154 # man_show_urls = False
155
156
157 # -- Options for Texinfo output -----------------------------------------------
158
159 # Grouping the document tree into Texinfo files. List of tuples
160 # (source start file, target name, title, author,
161 # dir menu entry, description, category)
162 texinfo_documents = [
163 (
164 "index",
165 "TranslateToolkit",
166 "Translate Toolkit Documentation",
167 "Translate.org.za",
168 "TranslateToolkit",
169 "One line description of project.",
170 "Miscellaneous",
171 ),
172 ]
173
174 # Documents to append as an appendix to all manuals.
175 # texinfo_appendices = []
176
177 # If false, no module index is generated.
178 # texinfo_domain_indices = True
179
180 # How to display URL addresses: 'footnote', 'no', or 'inline'.
181 # texinfo_show_urls = 'footnote'
182
183
184 # -- Coverage checker options -------------------------------------------------
185
186 coverage_ignore_modules = []
187
188 coverage_ignore_functions = ["main"]
189
190 coverage_ignore_classes = []
191
192 coverage_write_headline = False
193
194 # -- Options for intersphinx extension ---------------------------------------
195
196 intersphinx_mapping = {
197 "python": ("https://docs.python.org/3/", None),
198 "pytest": ("https://docs.pytest.org/en/latest/", None),
199 "django": (
200 "https://docs.djangoproject.com/en/stable/",
201 "https://docs.djangoproject.com/en/stable/_objects/",
202 ),
203 "pootle": ("https://docs.translatehouse.org/projects/pootle/en/latest/", None),
204 "guide": (
205 "https://docs.translatehouse.org/projects/localization-guide/en/latest/",
206 None,
207 ),
208 }
209
210
211 # -- Options for Exernal links -------------------------------------------------
212
213 extlinks = {
214 # :role: (URL, prefix)
215 "issue": ("https://github.com/translate/translate/issues/%s", "issue %s"),
216 "man": ("https://linux.die.net/man/1/%s", "%s"),
217 "wp": ("https://en.wikipedia.org/wiki/%s", "%s"),
218 }
219
220 # -- Options for Linkcheck -------------------------------------------------
221
222 # Add regex's here for links that should be ignored.
223 linkcheck_ignore = [
224 "http://your_server.com/filename.html", # Example URL
225 ".*localhost.*",
226 ]
227
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -21,7 +21,7 @@
# -- Project information -----------------------------------------------------
project = "Translate Toolkit"
-copyright = "2002-2022, Translate"
+copyright = "2002-2023, Translate"
# The short X.Y version.
version = "3.8.1"
|
{"golden_diff": "diff --git a/docs/conf.py b/docs/conf.py\n--- a/docs/conf.py\n+++ b/docs/conf.py\n@@ -21,7 +21,7 @@\n # -- Project information -----------------------------------------------------\n \n project = \"Translate Toolkit\"\n-copyright = \"2002-2022, Translate\"\n+copyright = \"2002-2023, Translate\"\n \n # The short X.Y version.\n version = \"3.8.1\"\n", "issue": "Outdated footer on the documentation website\nThe copyright notice in the docs webpage still state 2022 instead of 2023.\n", "before_files": [{"content": "# Configuration file for the Sphinx documentation builder.\n#\n# This file only contains a selection of the most common options. For a full\n# list see the documentation:\n# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\n# -- Path setup --------------------------------------------------------------\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\nimport os\nimport sys\n\n\nsys.path.insert(0, os.path.abspath(\"_ext\"))\nsys.path.insert(0, os.path.abspath(\".\"))\nsys.path.insert(0, os.path.abspath(\"..\"))\n\n# -- Project information -----------------------------------------------------\n\nproject = \"Translate Toolkit\"\ncopyright = \"2002-2022, Translate\"\n\n# The short X.Y version.\nversion = \"3.8.1\"\n\n# The full version, including alpha/beta/rc tags\nrelease = version\n\n# -- General configuration ---------------------------------------------------\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n \"translate_docs\",\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.coverage\",\n \"sphinx.ext.extlinks\",\n \"sphinx.ext.intersphinx\",\n \"sphinx.ext.todo\",\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = [\"_templates\"]\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This pattern also affects html_static_path and html_extra_path.\nexclude_patterns = [\"_build\", \"_themes/README.rst\", \"releases/README.rst\"]\n\n# The master toctree document.\nmaster_doc = \"index\"\n\n# -- Missing modules --------------------------------------------------\n\nautodoc_mock_imports = [\n \"aeidon\",\n \"BeautifulSoup\",\n \"glib\",\n \"gobject\",\n \"gtk\",\n \"iniparse\",\n \"vobject\",\n]\n\n# -- Options for HTML output -------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = \"sphinx-bootstrap\"\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\nhtml_theme_options = {\n \"nosidebar\": True,\n}\n\n# Add any paths that contain custom themes here, relative to this directory.\nhtml_theme_path = [\"_themes\"]\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = [\"_static\"]\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = \"TranslateToolkitdoc\"\n\n\n# -- Options for LaTeX output -------------------------------------------------\n\nlatex_elements = {\n # The paper size ('letterpaper' or 'a4paper').\n #'papersize': 'letterpaper',\n # The font size ('10pt', '11pt' or '12pt').\n #'pointsize': '10pt',\n # Additional stuff for the LaTeX preamble.\n #'preamble': '',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title, author, documentclass [howto/manual])\nlatex_documents = [\n (\n \"index\",\n \"TranslateToolkit.tex\",\n \"Translate Toolkit Documentation\",\n \"Translate.org.za\",\n \"manual\",\n ),\n]\n\n# The name of an image file (relative to this directory) to place at the top of\n# the title page.\n# latex_logo = None\n\n# For \"manual\" documents, if this is true, then toplevel headings are parts,\n# not chapters.\n# latex_use_parts = False\n\n# If true, show page references after internal links.\n# latex_show_pagerefs = False\n\n# If true, show URL addresses after external links.\n# latex_show_urls = False\n\n# Documents to append as an appendix to all manuals.\n# latex_appendices = []\n\n# If false, no module index is generated.\n# latex_domain_indices = True\n\n\n# -- Options for manual page output -------------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [\n (\n \"index\",\n \"translatetoolkit\",\n \"Translate Toolkit Documentation\",\n [\"Translate.org.za\"],\n 1,\n )\n]\n\n# If true, show URL addresses after external links.\n# man_show_urls = False\n\n\n# -- Options for Texinfo output -----------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n# dir menu entry, description, category)\ntexinfo_documents = [\n (\n \"index\",\n \"TranslateToolkit\",\n \"Translate Toolkit Documentation\",\n \"Translate.org.za\",\n \"TranslateToolkit\",\n \"One line description of project.\",\n \"Miscellaneous\",\n ),\n]\n\n# Documents to append as an appendix to all manuals.\n# texinfo_appendices = []\n\n# If false, no module index is generated.\n# texinfo_domain_indices = True\n\n# How to display URL addresses: 'footnote', 'no', or 'inline'.\n# texinfo_show_urls = 'footnote'\n\n\n# -- Coverage checker options -------------------------------------------------\n\ncoverage_ignore_modules = []\n\ncoverage_ignore_functions = [\"main\"]\n\ncoverage_ignore_classes = []\n\ncoverage_write_headline = False\n\n# -- Options for intersphinx extension ---------------------------------------\n\nintersphinx_mapping = {\n \"python\": (\"https://docs.python.org/3/\", None),\n \"pytest\": (\"https://docs.pytest.org/en/latest/\", None),\n \"django\": (\n \"https://docs.djangoproject.com/en/stable/\",\n \"https://docs.djangoproject.com/en/stable/_objects/\",\n ),\n \"pootle\": (\"https://docs.translatehouse.org/projects/pootle/en/latest/\", None),\n \"guide\": (\n \"https://docs.translatehouse.org/projects/localization-guide/en/latest/\",\n None,\n ),\n}\n\n\n# -- Options for Exernal links -------------------------------------------------\n\nextlinks = {\n # :role: (URL, prefix)\n \"issue\": (\"https://github.com/translate/translate/issues/%s\", \"issue %s\"),\n \"man\": (\"https://linux.die.net/man/1/%s\", \"%s\"),\n \"wp\": (\"https://en.wikipedia.org/wiki/%s\", \"%s\"),\n}\n\n# -- Options for Linkcheck -------------------------------------------------\n\n# Add regex's here for links that should be ignored.\nlinkcheck_ignore = [\n \"http://your_server.com/filename.html\", # Example URL\n \".*localhost.*\",\n]\n", "path": "docs/conf.py"}], "after_files": [{"content": "# Configuration file for the Sphinx documentation builder.\n#\n# This file only contains a selection of the most common options. For a full\n# list see the documentation:\n# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\n# -- Path setup --------------------------------------------------------------\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\nimport os\nimport sys\n\n\nsys.path.insert(0, os.path.abspath(\"_ext\"))\nsys.path.insert(0, os.path.abspath(\".\"))\nsys.path.insert(0, os.path.abspath(\"..\"))\n\n# -- Project information -----------------------------------------------------\n\nproject = \"Translate Toolkit\"\ncopyright = \"2002-2023, Translate\"\n\n# The short X.Y version.\nversion = \"3.8.1\"\n\n# The full version, including alpha/beta/rc tags\nrelease = version\n\n# -- General configuration ---------------------------------------------------\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n \"translate_docs\",\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.coverage\",\n \"sphinx.ext.extlinks\",\n \"sphinx.ext.intersphinx\",\n \"sphinx.ext.todo\",\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = [\"_templates\"]\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This pattern also affects html_static_path and html_extra_path.\nexclude_patterns = [\"_build\", \"_themes/README.rst\", \"releases/README.rst\"]\n\n# The master toctree document.\nmaster_doc = \"index\"\n\n# -- Missing modules --------------------------------------------------\n\nautodoc_mock_imports = [\n \"aeidon\",\n \"BeautifulSoup\",\n \"glib\",\n \"gobject\",\n \"gtk\",\n \"iniparse\",\n \"vobject\",\n]\n\n# -- Options for HTML output -------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = \"sphinx-bootstrap\"\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\nhtml_theme_options = {\n \"nosidebar\": True,\n}\n\n# Add any paths that contain custom themes here, relative to this directory.\nhtml_theme_path = [\"_themes\"]\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = [\"_static\"]\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = \"TranslateToolkitdoc\"\n\n\n# -- Options for LaTeX output -------------------------------------------------\n\nlatex_elements = {\n # The paper size ('letterpaper' or 'a4paper').\n #'papersize': 'letterpaper',\n # The font size ('10pt', '11pt' or '12pt').\n #'pointsize': '10pt',\n # Additional stuff for the LaTeX preamble.\n #'preamble': '',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title, author, documentclass [howto/manual])\nlatex_documents = [\n (\n \"index\",\n \"TranslateToolkit.tex\",\n \"Translate Toolkit Documentation\",\n \"Translate.org.za\",\n \"manual\",\n ),\n]\n\n# The name of an image file (relative to this directory) to place at the top of\n# the title page.\n# latex_logo = None\n\n# For \"manual\" documents, if this is true, then toplevel headings are parts,\n# not chapters.\n# latex_use_parts = False\n\n# If true, show page references after internal links.\n# latex_show_pagerefs = False\n\n# If true, show URL addresses after external links.\n# latex_show_urls = False\n\n# Documents to append as an appendix to all manuals.\n# latex_appendices = []\n\n# If false, no module index is generated.\n# latex_domain_indices = True\n\n\n# -- Options for manual page output -------------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [\n (\n \"index\",\n \"translatetoolkit\",\n \"Translate Toolkit Documentation\",\n [\"Translate.org.za\"],\n 1,\n )\n]\n\n# If true, show URL addresses after external links.\n# man_show_urls = False\n\n\n# -- Options for Texinfo output -----------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n# dir menu entry, description, category)\ntexinfo_documents = [\n (\n \"index\",\n \"TranslateToolkit\",\n \"Translate Toolkit Documentation\",\n \"Translate.org.za\",\n \"TranslateToolkit\",\n \"One line description of project.\",\n \"Miscellaneous\",\n ),\n]\n\n# Documents to append as an appendix to all manuals.\n# texinfo_appendices = []\n\n# If false, no module index is generated.\n# texinfo_domain_indices = True\n\n# How to display URL addresses: 'footnote', 'no', or 'inline'.\n# texinfo_show_urls = 'footnote'\n\n\n# -- Coverage checker options -------------------------------------------------\n\ncoverage_ignore_modules = []\n\ncoverage_ignore_functions = [\"main\"]\n\ncoverage_ignore_classes = []\n\ncoverage_write_headline = False\n\n# -- Options for intersphinx extension ---------------------------------------\n\nintersphinx_mapping = {\n \"python\": (\"https://docs.python.org/3/\", None),\n \"pytest\": (\"https://docs.pytest.org/en/latest/\", None),\n \"django\": (\n \"https://docs.djangoproject.com/en/stable/\",\n \"https://docs.djangoproject.com/en/stable/_objects/\",\n ),\n \"pootle\": (\"https://docs.translatehouse.org/projects/pootle/en/latest/\", None),\n \"guide\": (\n \"https://docs.translatehouse.org/projects/localization-guide/en/latest/\",\n None,\n ),\n}\n\n\n# -- Options for Exernal links -------------------------------------------------\n\nextlinks = {\n # :role: (URL, prefix)\n \"issue\": (\"https://github.com/translate/translate/issues/%s\", \"issue %s\"),\n \"man\": (\"https://linux.die.net/man/1/%s\", \"%s\"),\n \"wp\": (\"https://en.wikipedia.org/wiki/%s\", \"%s\"),\n}\n\n# -- Options for Linkcheck -------------------------------------------------\n\n# Add regex's here for links that should be ignored.\nlinkcheck_ignore = [\n \"http://your_server.com/filename.html\", # Example URL\n \".*localhost.*\",\n]\n", "path": "docs/conf.py"}]}
| 2,368 | 98 |
gh_patches_debug_1797
|
rasdani/github-patches
|
git_diff
|
readthedocs__readthedocs.org-5346
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove all warnings from pytest
When running `tox` we see these warnings in the summary.
We should use `request` fixture and access to `request.config` instead.
Docs: https://docs.pytest.org/en/latest/fixture.html#request-context
Change log: https://docs.pytest.org/en/latest/deprecations.html#pytest-config-global
```
====================================================================================== warnings summary ======================================================================================
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_mkdocs
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_mkdocs_index
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_mkdocs_index_no_directory_urls
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_mkdocs_no_directory_urls
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version_and_page
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version_and_page_htmldir
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version_and_page_signlehtml
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version_htmldir
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version_singlehtml
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_only
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_only_htmldir
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_only_singlehtml
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_restructured_text
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_restructured_text_invalid
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version_and_page
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version_and_page_htmldir
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version_and_page_singlehtml
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version_htmldir
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version_singlehtml
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_only
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_only_htmldir
readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_only_singlehtml
/home/humitos/rtfd/code/readthedocs-corporate/.tox/py36/readthedocs.org/readthedocs/rtd_tests/tests/test_core_tags.py:19: PytestDeprecationWarning: the `pytest.config` global is deprecated. Please use `request.config` or `pytest_configure` (if you're a pytest plugin) instead.
scheme=pytest.config.option.url_scheme,
-- Docs: https://docs.pytest.org/en/latest/warnings.html
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `conftest.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 import pytest
3 from django.conf import settings
4 from rest_framework.test import APIClient
5
6 try:
7 # TODO: this file is read/executed even when called from ``readthedocsinc``,
8 # so it's overriding the options that we are defining in the ``conftest.py``
9 # from the corporate site. We need to find a better way to avoid this.
10 import readthedocsinc
11 PYTEST_OPTIONS = ()
12 except ImportError:
13 PYTEST_OPTIONS = (
14 # Options to set test environment
15 ('community', True),
16 ('corporate', False),
17 ('environment', 'readthedocs'),
18
19 ('url_scheme', 'http'),
20 )
21
22
23 def pytest_addoption(parser):
24 parser.addoption(
25 '--including-search',
26 action='store_true',
27 dest='searchtests',
28 default=False, help='enable search tests',
29 )
30
31
32 def pytest_configure(config):
33 if not config.option.searchtests:
34 # Include ``not search``` to parameters so search tests do not perform
35 markexpr = getattr(config.option, 'markexpr')
36 if markexpr:
37 markexpr += ' and not search'
38 else:
39 markexpr = 'not search'
40 setattr(config.option, 'markexpr', markexpr.strip())
41
42 for option, value in PYTEST_OPTIONS:
43 setattr(config.option, option, value)
44
45
46 @pytest.fixture(autouse=True)
47 def settings_modification(settings):
48 settings.CELERY_ALWAYS_EAGER = True
49
50 @pytest.fixture
51 def api_client():
52 return APIClient()
53
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/conftest.py b/conftest.py
--- a/conftest.py
+++ b/conftest.py
@@ -47,6 +47,12 @@
def settings_modification(settings):
settings.CELERY_ALWAYS_EAGER = True
+
@pytest.fixture
def api_client():
return APIClient()
+
+
[email protected](scope="class")
+def url_scheme(request):
+ request.cls.url_scheme = request.config.option.url_scheme
|
{"golden_diff": "diff --git a/conftest.py b/conftest.py\n--- a/conftest.py\n+++ b/conftest.py\n@@ -47,6 +47,12 @@\n def settings_modification(settings):\n settings.CELERY_ALWAYS_EAGER = True\n \n+\n @pytest.fixture\n def api_client():\n return APIClient()\n+\n+\[email protected](scope=\"class\")\n+def url_scheme(request):\n+ request.cls.url_scheme = request.config.option.url_scheme\n", "issue": "Remove all warnings from pytest\nWhen running `tox` we see these warnings in the summary.\r\n\r\nWe should use `request` fixture and access to `request.config` instead.\r\n\r\nDocs: https://docs.pytest.org/en/latest/fixture.html#request-context\r\nChange log: https://docs.pytest.org/en/latest/deprecations.html#pytest-config-global\r\n\r\n\r\n```\r\n====================================================================================== warnings summary ======================================================================================\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_mkdocs\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_mkdocs_index\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_mkdocs_index_no_directory_urls\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_mkdocs_no_directory_urls\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version_and_page\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version_and_page_htmldir\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version_and_page_signlehtml\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version_htmldir\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version_singlehtml\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_only\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_only_htmldir\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_only_singlehtml\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_restructured_text\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_restructured_text_invalid\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version_and_page\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version_and_page_htmldir\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version_and_page_singlehtml\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version_htmldir\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version_singlehtml\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_only\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_only_htmldir\r\nreadthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_only_singlehtml\r\n /home/humitos/rtfd/code/readthedocs-corporate/.tox/py36/readthedocs.org/readthedocs/rtd_tests/tests/test_core_tags.py:19: PytestDeprecationWarning: the `pytest.config` global is deprecated. Please use `request.config` or `pytest_configure` (if you're a pytest plugin) instead.\r\n scheme=pytest.config.option.url_scheme,\r\n\r\n-- Docs: https://docs.pytest.org/en/latest/warnings.html\r\n```\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nimport pytest\nfrom django.conf import settings\nfrom rest_framework.test import APIClient\n\ntry:\n # TODO: this file is read/executed even when called from ``readthedocsinc``,\n # so it's overriding the options that we are defining in the ``conftest.py``\n # from the corporate site. We need to find a better way to avoid this.\n import readthedocsinc\n PYTEST_OPTIONS = ()\nexcept ImportError:\n PYTEST_OPTIONS = (\n # Options to set test environment\n ('community', True),\n ('corporate', False),\n ('environment', 'readthedocs'),\n\n ('url_scheme', 'http'),\n )\n\n\ndef pytest_addoption(parser):\n parser.addoption(\n '--including-search',\n action='store_true',\n dest='searchtests',\n default=False, help='enable search tests',\n )\n\n\ndef pytest_configure(config):\n if not config.option.searchtests:\n # Include ``not search``` to parameters so search tests do not perform\n markexpr = getattr(config.option, 'markexpr')\n if markexpr:\n markexpr += ' and not search'\n else:\n markexpr = 'not search'\n setattr(config.option, 'markexpr', markexpr.strip())\n\n for option, value in PYTEST_OPTIONS:\n setattr(config.option, option, value)\n\n\[email protected](autouse=True)\ndef settings_modification(settings):\n settings.CELERY_ALWAYS_EAGER = True\n\[email protected]\ndef api_client():\n return APIClient()\n", "path": "conftest.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\nimport pytest\nfrom django.conf import settings\nfrom rest_framework.test import APIClient\n\ntry:\n # TODO: this file is read/executed even when called from ``readthedocsinc``,\n # so it's overriding the options that we are defining in the ``conftest.py``\n # from the corporate site. We need to find a better way to avoid this.\n import readthedocsinc\n PYTEST_OPTIONS = ()\nexcept ImportError:\n PYTEST_OPTIONS = (\n # Options to set test environment\n ('community', True),\n ('corporate', False),\n ('environment', 'readthedocs'),\n\n ('url_scheme', 'http'),\n )\n\n\ndef pytest_addoption(parser):\n parser.addoption(\n '--including-search',\n action='store_true',\n dest='searchtests',\n default=False, help='enable search tests',\n )\n\n\ndef pytest_configure(config):\n if not config.option.searchtests:\n # Include ``not search``` to parameters so search tests do not perform\n markexpr = getattr(config.option, 'markexpr')\n if markexpr:\n markexpr += ' and not search'\n else:\n markexpr = 'not search'\n setattr(config.option, 'markexpr', markexpr.strip())\n\n for option, value in PYTEST_OPTIONS:\n setattr(config.option, option, value)\n\n\[email protected](autouse=True)\ndef settings_modification(settings):\n settings.CELERY_ALWAYS_EAGER = True\n\n\[email protected]\ndef api_client():\n return APIClient()\n\n\[email protected](scope=\"class\")\ndef url_scheme(request):\n request.cls.url_scheme = request.config.option.url_scheme\n", "path": "conftest.py"}]}
| 1,437 | 101 |
gh_patches_debug_22245
|
rasdani/github-patches
|
git_diff
|
saulpw__visidata-543
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
TSV file with column name "length" causes TypeError
**Small description**
Files in TSV format containing a column named `length` cannot be loaded.
**Expected result**
See content of TSV file.
**Actual result with screenshot**
An empty file is shown. In the footer line it says:
```
TypeError: 'property' object is not callable
```
**Steps to reproduce with sample data and a .vd**
Create a file named `test.tsv` with this content:
```
length
1
```
Then, try to open it:
```
vd test.tsv
```
**Additional context**
version 1.5.2
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `visidata/utils.py`
Content:
```
1 import operator
2
3 'Various helper classes and functions.'
4
5 __all__ = ['AttrDict', 'joinSheetnames', 'moveListItem', 'namedlist', 'classproperty']
6
7
8 class AttrDict(dict):
9 'Augment a dict with more convenient .attr syntax. not-present keys return None.'
10 def __getattr__(self, k):
11 try:
12 return self[k]
13 except KeyError:
14 return None
15
16 def __setattr__(self, k, v):
17 self[k] = v
18
19 def __dir__(self):
20 return self.keys()
21
22
23 class classproperty(property):
24 def __get__(self, cls, obj):
25 return classmethod(self.fget).__get__(None, obj or cls)()
26
27
28 def joinSheetnames(*sheetnames):
29 'Concatenate sheet names in a standard way'
30 return '_'.join(str(x) for x in sheetnames)
31
32
33 def moveListItem(L, fromidx, toidx):
34 "Move element within list `L` and return element's new index."
35 toidx = min(max(toidx, 0), len(L)-1)
36 fromidx = min(max(fromidx, 0), len(L)-1)
37 r = L.pop(fromidx)
38 L.insert(toidx, r)
39 return toidx
40
41
42 class OnExit:
43 '"with OnExit(func, ...):" calls func(...) when the context is exited'
44 def __init__(self, func, *args, **kwargs):
45 self.func = func
46 self.args = args
47 self.kwargs = kwargs
48
49 def __enter__(self):
50 return self
51
52 def __exit__(self, exc_type, exc_value, exc_traceback):
53 try:
54 self.func(*self.args, **self.kwargs)
55 except Exception as e:
56 vd.exceptionCaught(e)
57
58
59 def itemsetter(i):
60 def g(obj, v):
61 obj[i] = v
62 return g
63
64
65 def namedlist(objname, fieldnames):
66 'like namedtuple but editable'
67 class NamedListTemplate(list):
68 __name__ = objname
69 _fields = fieldnames
70
71 def __init__(self, L=None, **kwargs):
72 if L is None:
73 L = [None]*self.length()
74 elif len(L) < self.length():
75 L.extend([None]*(self.length() - len(L)))
76 super().__init__(L)
77 for k, v in kwargs.items():
78 setattr(self, k, v)
79
80 @classmethod
81 def length(cls):
82 return len(cls._fields)
83
84 def __getattr__(self, k):
85 'to enable .fieldname'
86 try:
87 return self[self._fields.index(k)]
88 except ValueError:
89 raise AttributeError
90
91 def __setattr__(self, k, v):
92 'to enable .fieldname ='
93 try:
94 self[self._fields.index(k)] = v
95 except ValueError:
96 super().__setattr__(k, v)
97
98 for i, attrname in enumerate(fieldnames):
99 # create property getter/setter for each field
100 setattr(NamedListTemplate, attrname, property(operator.itemgetter(i), itemsetter(i)))
101
102 return NamedListTemplate
103
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/visidata/utils.py b/visidata/utils.py
--- a/visidata/utils.py
+++ b/visidata/utils.py
@@ -70,17 +70,13 @@
def __init__(self, L=None, **kwargs):
if L is None:
- L = [None]*self.length()
- elif len(L) < self.length():
- L.extend([None]*(self.length() - len(L)))
+ L = [None]*len(self._fields)
+ elif len(L) < len(self._fields):
+ L.extend([None]*(len(self._fields) - len(L)))
super().__init__(L)
for k, v in kwargs.items():
setattr(self, k, v)
- @classmethod
- def length(cls):
- return len(cls._fields)
-
def __getattr__(self, k):
'to enable .fieldname'
try:
@@ -95,8 +91,4 @@
except ValueError:
super().__setattr__(k, v)
- for i, attrname in enumerate(fieldnames):
- # create property getter/setter for each field
- setattr(NamedListTemplate, attrname, property(operator.itemgetter(i), itemsetter(i)))
-
return NamedListTemplate
|
{"golden_diff": "diff --git a/visidata/utils.py b/visidata/utils.py\n--- a/visidata/utils.py\n+++ b/visidata/utils.py\n@@ -70,17 +70,13 @@\n \n def __init__(self, L=None, **kwargs):\n if L is None:\n- L = [None]*self.length()\n- elif len(L) < self.length():\n- L.extend([None]*(self.length() - len(L)))\n+ L = [None]*len(self._fields)\n+ elif len(L) < len(self._fields):\n+ L.extend([None]*(len(self._fields) - len(L)))\n super().__init__(L)\n for k, v in kwargs.items():\n setattr(self, k, v)\n \n- @classmethod\n- def length(cls):\n- return len(cls._fields)\n-\n def __getattr__(self, k):\n 'to enable .fieldname'\n try:\n@@ -95,8 +91,4 @@\n except ValueError:\n super().__setattr__(k, v)\n \n- for i, attrname in enumerate(fieldnames):\n- # create property getter/setter for each field\n- setattr(NamedListTemplate, attrname, property(operator.itemgetter(i), itemsetter(i)))\n-\n return NamedListTemplate\n", "issue": "TSV file with column name \"length\" causes TypeError\n**Small description**\r\nFiles in TSV format containing a column named `length` cannot be loaded.\r\n\r\n**Expected result**\r\nSee content of TSV file.\r\n\r\n**Actual result with screenshot**\r\nAn empty file is shown. In the footer line it says:\r\n```\r\nTypeError: 'property' object is not callable\r\n```\r\n\r\n**Steps to reproduce with sample data and a .vd**\r\nCreate a file named `test.tsv` with this content:\r\n```\r\nlength\r\n1\r\n```\r\nThen, try to open it:\r\n```\r\nvd test.tsv\r\n```\r\n\r\n**Additional context**\r\nversion 1.5.2\r\n\n", "before_files": [{"content": "import operator\n\n'Various helper classes and functions.'\n\n__all__ = ['AttrDict', 'joinSheetnames', 'moveListItem', 'namedlist', 'classproperty']\n\n\nclass AttrDict(dict):\n 'Augment a dict with more convenient .attr syntax. not-present keys return None.'\n def __getattr__(self, k):\n try:\n return self[k]\n except KeyError:\n return None\n\n def __setattr__(self, k, v):\n self[k] = v\n\n def __dir__(self):\n return self.keys()\n\n\nclass classproperty(property):\n def __get__(self, cls, obj):\n return classmethod(self.fget).__get__(None, obj or cls)()\n\n\ndef joinSheetnames(*sheetnames):\n 'Concatenate sheet names in a standard way'\n return '_'.join(str(x) for x in sheetnames)\n\n\ndef moveListItem(L, fromidx, toidx):\n \"Move element within list `L` and return element's new index.\"\n toidx = min(max(toidx, 0), len(L)-1)\n fromidx = min(max(fromidx, 0), len(L)-1)\n r = L.pop(fromidx)\n L.insert(toidx, r)\n return toidx\n\n\nclass OnExit:\n '\"with OnExit(func, ...):\" calls func(...) when the context is exited'\n def __init__(self, func, *args, **kwargs):\n self.func = func\n self.args = args\n self.kwargs = kwargs\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, exc_traceback):\n try:\n self.func(*self.args, **self.kwargs)\n except Exception as e:\n vd.exceptionCaught(e)\n\n\ndef itemsetter(i):\n def g(obj, v):\n obj[i] = v\n return g\n\n\ndef namedlist(objname, fieldnames):\n 'like namedtuple but editable'\n class NamedListTemplate(list):\n __name__ = objname\n _fields = fieldnames\n\n def __init__(self, L=None, **kwargs):\n if L is None:\n L = [None]*self.length()\n elif len(L) < self.length():\n L.extend([None]*(self.length() - len(L)))\n super().__init__(L)\n for k, v in kwargs.items():\n setattr(self, k, v)\n\n @classmethod\n def length(cls):\n return len(cls._fields)\n\n def __getattr__(self, k):\n 'to enable .fieldname'\n try:\n return self[self._fields.index(k)]\n except ValueError:\n raise AttributeError\n\n def __setattr__(self, k, v):\n 'to enable .fieldname ='\n try:\n self[self._fields.index(k)] = v\n except ValueError:\n super().__setattr__(k, v)\n\n for i, attrname in enumerate(fieldnames):\n # create property getter/setter for each field\n setattr(NamedListTemplate, attrname, property(operator.itemgetter(i), itemsetter(i)))\n\n return NamedListTemplate\n", "path": "visidata/utils.py"}], "after_files": [{"content": "import operator\n\n'Various helper classes and functions.'\n\n__all__ = ['AttrDict', 'joinSheetnames', 'moveListItem', 'namedlist', 'classproperty']\n\n\nclass AttrDict(dict):\n 'Augment a dict with more convenient .attr syntax. not-present keys return None.'\n def __getattr__(self, k):\n try:\n return self[k]\n except KeyError:\n return None\n\n def __setattr__(self, k, v):\n self[k] = v\n\n def __dir__(self):\n return self.keys()\n\n\nclass classproperty(property):\n def __get__(self, cls, obj):\n return classmethod(self.fget).__get__(None, obj or cls)()\n\n\ndef joinSheetnames(*sheetnames):\n 'Concatenate sheet names in a standard way'\n return '_'.join(str(x) for x in sheetnames)\n\n\ndef moveListItem(L, fromidx, toidx):\n \"Move element within list `L` and return element's new index.\"\n toidx = min(max(toidx, 0), len(L)-1)\n fromidx = min(max(fromidx, 0), len(L)-1)\n r = L.pop(fromidx)\n L.insert(toidx, r)\n return toidx\n\n\nclass OnExit:\n '\"with OnExit(func, ...):\" calls func(...) when the context is exited'\n def __init__(self, func, *args, **kwargs):\n self.func = func\n self.args = args\n self.kwargs = kwargs\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, exc_traceback):\n try:\n self.func(*self.args, **self.kwargs)\n except Exception as e:\n vd.exceptionCaught(e)\n\n\ndef itemsetter(i):\n def g(obj, v):\n obj[i] = v\n return g\n\n\ndef namedlist(objname, fieldnames):\n 'like namedtuple but editable'\n class NamedListTemplate(list):\n __name__ = objname\n _fields = fieldnames\n\n def __init__(self, L=None, **kwargs):\n if L is None:\n L = [None]*len(self._fields)\n elif len(L) < len(self._fields):\n L.extend([None]*(len(self._fields) - len(L)))\n super().__init__(L)\n for k, v in kwargs.items():\n setattr(self, k, v)\n\n def __getattr__(self, k):\n 'to enable .fieldname'\n try:\n return self[self._fields.index(k)]\n except ValueError:\n raise AttributeError\n\n def __setattr__(self, k, v):\n 'to enable .fieldname ='\n try:\n self[self._fields.index(k)] = v\n except ValueError:\n super().__setattr__(k, v)\n\n return NamedListTemplate\n", "path": "visidata/utils.py"}]}
| 1,277 | 282 |
gh_patches_debug_8342
|
rasdani/github-patches
|
git_diff
|
PaddlePaddle__models-799
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
policy_gradient 原理介绍部分内容格式存在问题
https://github.com/PaddlePaddle/models/tree/develop/fluid/policy_gradient
policy_gradient demo介绍部分,看起来格式存在问题,能辛苦调整下吗?或者以什么样的方式可以看到原始的文档呢? @wanghaoshuang @lcy-seso
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `fluid/policy_gradient/brain.py`
Content:
```
1 import numpy as np
2 import paddle.v2 as paddle
3 import paddle.fluid as fluid
4 # reproducible
5 np.random.seed(1)
6
7
8 class PolicyGradient:
9 def __init__(
10 self,
11 n_actions,
12 n_features,
13 learning_rate=0.01,
14 reward_decay=0.95,
15 output_graph=False, ):
16 self.n_actions = n_actions
17 self.n_features = n_features
18 self.lr = learning_rate
19 self.gamma = reward_decay
20
21 self.ep_obs, self.ep_as, self.ep_rs = [], [], []
22
23 self.place = fluid.CPUPlace()
24 self.exe = fluid.Executor(self.place)
25
26 def build_net(self):
27
28 obs = fluid.layers.data(
29 name='obs', shape=[self.n_features], dtype='float32')
30 acts = fluid.layers.data(name='acts', shape=[1], dtype='int64')
31 vt = fluid.layers.data(name='vt', shape=[1], dtype='float32')
32 # fc1
33 fc1 = fluid.layers.fc(
34 input=obs,
35 size=10,
36 act="tanh" # tanh activation
37 )
38 # fc2
39 self.all_act_prob = fluid.layers.fc(input=fc1,
40 size=self.n_actions,
41 act="softmax")
42 # to maximize total reward (log_p * R) is to minimize -(log_p * R)
43 neg_log_prob = fluid.layers.cross_entropy(
44 input=self.all_act_prob,
45 label=acts) # this is negative log of chosen action
46 neg_log_prob_weight = fluid.layers.elementwise_mul(x=neg_log_prob, y=vt)
47 loss = fluid.layers.reduce_mean(
48 x=neg_log_prob_weight) # reward guided loss
49
50 sgd_optimizer = fluid.optimizer.SGD(self.lr)
51 sgd_optimizer.minimize(loss)
52 self.exe.run(fluid.default_startup_program())
53
54 def choose_action(self, observation):
55 prob_weights = self.exe.run(
56 fluid.default_main_program().prune(self.all_act_prob),
57 feed={"obs": observation[np.newaxis, :]},
58 fetch_list=[self.all_act_prob])
59 prob_weights = np.array(prob_weights[0])
60 action = np.random.choice(
61 range(prob_weights.shape[1]),
62 p=prob_weights.ravel()) # select action w.r.t the actions prob
63 return action
64
65 def store_transition(self, s, a, r):
66 self.ep_obs.append(s)
67 self.ep_as.append(a)
68 self.ep_rs.append(r)
69
70 def learn(self):
71 # discount and normalize episode reward
72 discounted_ep_rs_norm = self._discount_and_norm_rewards()
73 tensor_obs = np.vstack(self.ep_obs).astype("float32")
74 tensor_as = np.array(self.ep_as).astype("int64")
75 tensor_as = tensor_as.reshape([tensor_as.shape[0], 1])
76 tensor_vt = discounted_ep_rs_norm.astype("float32")[:, np.newaxis]
77 # train on episode
78 self.exe.run(
79 fluid.default_main_program(),
80 feed={
81 "obs": tensor_obs, # shape=[None, n_obs]
82 "acts": tensor_as, # shape=[None, ]
83 "vt": tensor_vt # shape=[None, ]
84 })
85 self.ep_obs, self.ep_as, self.ep_rs = [], [], [] # empty episode data
86 return discounted_ep_rs_norm
87
88 def _discount_and_norm_rewards(self):
89 # discount episode rewards
90 discounted_ep_rs = np.zeros_like(self.ep_rs)
91 running_add = 0
92 for t in reversed(range(0, len(self.ep_rs))):
93 running_add = running_add * self.gamma + self.ep_rs[t]
94 discounted_ep_rs[t] = running_add
95
96 # normalize episode rewards
97 discounted_ep_rs -= np.mean(discounted_ep_rs)
98 discounted_ep_rs /= np.std(discounted_ep_rs)
99 return discounted_ep_rs
100
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/fluid/policy_gradient/brain.py b/fluid/policy_gradient/brain.py
--- a/fluid/policy_gradient/brain.py
+++ b/fluid/policy_gradient/brain.py
@@ -45,7 +45,7 @@
label=acts) # this is negative log of chosen action
neg_log_prob_weight = fluid.layers.elementwise_mul(x=neg_log_prob, y=vt)
loss = fluid.layers.reduce_mean(
- x=neg_log_prob_weight) # reward guided loss
+ neg_log_prob_weight) # reward guided loss
sgd_optimizer = fluid.optimizer.SGD(self.lr)
sgd_optimizer.minimize(loss)
|
{"golden_diff": "diff --git a/fluid/policy_gradient/brain.py b/fluid/policy_gradient/brain.py\n--- a/fluid/policy_gradient/brain.py\n+++ b/fluid/policy_gradient/brain.py\n@@ -45,7 +45,7 @@\n label=acts) # this is negative log of chosen action\n neg_log_prob_weight = fluid.layers.elementwise_mul(x=neg_log_prob, y=vt)\n loss = fluid.layers.reduce_mean(\n- x=neg_log_prob_weight) # reward guided loss\n+ neg_log_prob_weight) # reward guided loss\n \n sgd_optimizer = fluid.optimizer.SGD(self.lr)\n sgd_optimizer.minimize(loss)\n", "issue": "policy_gradient \u539f\u7406\u4ecb\u7ecd\u90e8\u5206\u5185\u5bb9\u683c\u5f0f\u5b58\u5728\u95ee\u9898\nhttps://github.com/PaddlePaddle/models/tree/develop/fluid/policy_gradient \r\npolicy_gradient demo\u4ecb\u7ecd\u90e8\u5206\uff0c\u770b\u8d77\u6765\u683c\u5f0f\u5b58\u5728\u95ee\u9898\uff0c\u80fd\u8f9b\u82e6\u8c03\u6574\u4e0b\u5417\uff1f\u6216\u8005\u4ee5\u4ec0\u4e48\u6837\u7684\u65b9\u5f0f\u53ef\u4ee5\u770b\u5230\u539f\u59cb\u7684\u6587\u6863\u5462\uff1f @wanghaoshuang @lcy-seso \n", "before_files": [{"content": "import numpy as np\nimport paddle.v2 as paddle\nimport paddle.fluid as fluid\n# reproducible\nnp.random.seed(1)\n\n\nclass PolicyGradient:\n def __init__(\n self,\n n_actions,\n n_features,\n learning_rate=0.01,\n reward_decay=0.95,\n output_graph=False, ):\n self.n_actions = n_actions\n self.n_features = n_features\n self.lr = learning_rate\n self.gamma = reward_decay\n\n self.ep_obs, self.ep_as, self.ep_rs = [], [], []\n\n self.place = fluid.CPUPlace()\n self.exe = fluid.Executor(self.place)\n\n def build_net(self):\n\n obs = fluid.layers.data(\n name='obs', shape=[self.n_features], dtype='float32')\n acts = fluid.layers.data(name='acts', shape=[1], dtype='int64')\n vt = fluid.layers.data(name='vt', shape=[1], dtype='float32')\n # fc1\n fc1 = fluid.layers.fc(\n input=obs,\n size=10,\n act=\"tanh\" # tanh activation\n )\n # fc2\n self.all_act_prob = fluid.layers.fc(input=fc1,\n size=self.n_actions,\n act=\"softmax\")\n # to maximize total reward (log_p * R) is to minimize -(log_p * R)\n neg_log_prob = fluid.layers.cross_entropy(\n input=self.all_act_prob,\n label=acts) # this is negative log of chosen action\n neg_log_prob_weight = fluid.layers.elementwise_mul(x=neg_log_prob, y=vt)\n loss = fluid.layers.reduce_mean(\n x=neg_log_prob_weight) # reward guided loss\n\n sgd_optimizer = fluid.optimizer.SGD(self.lr)\n sgd_optimizer.minimize(loss)\n self.exe.run(fluid.default_startup_program())\n\n def choose_action(self, observation):\n prob_weights = self.exe.run(\n fluid.default_main_program().prune(self.all_act_prob),\n feed={\"obs\": observation[np.newaxis, :]},\n fetch_list=[self.all_act_prob])\n prob_weights = np.array(prob_weights[0])\n action = np.random.choice(\n range(prob_weights.shape[1]),\n p=prob_weights.ravel()) # select action w.r.t the actions prob\n return action\n\n def store_transition(self, s, a, r):\n self.ep_obs.append(s)\n self.ep_as.append(a)\n self.ep_rs.append(r)\n\n def learn(self):\n # discount and normalize episode reward\n discounted_ep_rs_norm = self._discount_and_norm_rewards()\n tensor_obs = np.vstack(self.ep_obs).astype(\"float32\")\n tensor_as = np.array(self.ep_as).astype(\"int64\")\n tensor_as = tensor_as.reshape([tensor_as.shape[0], 1])\n tensor_vt = discounted_ep_rs_norm.astype(\"float32\")[:, np.newaxis]\n # train on episode\n self.exe.run(\n fluid.default_main_program(),\n feed={\n \"obs\": tensor_obs, # shape=[None, n_obs]\n \"acts\": tensor_as, # shape=[None, ]\n \"vt\": tensor_vt # shape=[None, ]\n })\n self.ep_obs, self.ep_as, self.ep_rs = [], [], [] # empty episode data\n return discounted_ep_rs_norm\n\n def _discount_and_norm_rewards(self):\n # discount episode rewards\n discounted_ep_rs = np.zeros_like(self.ep_rs)\n running_add = 0\n for t in reversed(range(0, len(self.ep_rs))):\n running_add = running_add * self.gamma + self.ep_rs[t]\n discounted_ep_rs[t] = running_add\n\n # normalize episode rewards\n discounted_ep_rs -= np.mean(discounted_ep_rs)\n discounted_ep_rs /= np.std(discounted_ep_rs)\n return discounted_ep_rs\n", "path": "fluid/policy_gradient/brain.py"}], "after_files": [{"content": "import numpy as np\nimport paddle.v2 as paddle\nimport paddle.fluid as fluid\n# reproducible\nnp.random.seed(1)\n\n\nclass PolicyGradient:\n def __init__(\n self,\n n_actions,\n n_features,\n learning_rate=0.01,\n reward_decay=0.95,\n output_graph=False, ):\n self.n_actions = n_actions\n self.n_features = n_features\n self.lr = learning_rate\n self.gamma = reward_decay\n\n self.ep_obs, self.ep_as, self.ep_rs = [], [], []\n\n self.place = fluid.CPUPlace()\n self.exe = fluid.Executor(self.place)\n\n def build_net(self):\n\n obs = fluid.layers.data(\n name='obs', shape=[self.n_features], dtype='float32')\n acts = fluid.layers.data(name='acts', shape=[1], dtype='int64')\n vt = fluid.layers.data(name='vt', shape=[1], dtype='float32')\n # fc1\n fc1 = fluid.layers.fc(\n input=obs,\n size=10,\n act=\"tanh\" # tanh activation\n )\n # fc2\n self.all_act_prob = fluid.layers.fc(input=fc1,\n size=self.n_actions,\n act=\"softmax\")\n # to maximize total reward (log_p * R) is to minimize -(log_p * R)\n neg_log_prob = fluid.layers.cross_entropy(\n input=self.all_act_prob,\n label=acts) # this is negative log of chosen action\n neg_log_prob_weight = fluid.layers.elementwise_mul(x=neg_log_prob, y=vt)\n loss = fluid.layers.reduce_mean(\n neg_log_prob_weight) # reward guided loss\n\n sgd_optimizer = fluid.optimizer.SGD(self.lr)\n sgd_optimizer.minimize(loss)\n self.exe.run(fluid.default_startup_program())\n\n def choose_action(self, observation):\n prob_weights = self.exe.run(\n fluid.default_main_program().prune(self.all_act_prob),\n feed={\"obs\": observation[np.newaxis, :]},\n fetch_list=[self.all_act_prob])\n prob_weights = np.array(prob_weights[0])\n action = np.random.choice(\n range(prob_weights.shape[1]),\n p=prob_weights.ravel()) # select action w.r.t the actions prob\n return action\n\n def store_transition(self, s, a, r):\n self.ep_obs.append(s)\n self.ep_as.append(a)\n self.ep_rs.append(r)\n\n def learn(self):\n # discount and normalize episode reward\n discounted_ep_rs_norm = self._discount_and_norm_rewards()\n tensor_obs = np.vstack(self.ep_obs).astype(\"float32\")\n tensor_as = np.array(self.ep_as).astype(\"int64\")\n tensor_as = tensor_as.reshape([tensor_as.shape[0], 1])\n tensor_vt = discounted_ep_rs_norm.astype(\"float32\")[:, np.newaxis]\n # train on episode\n self.exe.run(\n fluid.default_main_program(),\n feed={\n \"obs\": tensor_obs, # shape=[None, n_obs]\n \"acts\": tensor_as, # shape=[None, ]\n \"vt\": tensor_vt # shape=[None, ]\n })\n self.ep_obs, self.ep_as, self.ep_rs = [], [], [] # empty episode data\n return discounted_ep_rs_norm\n\n def _discount_and_norm_rewards(self):\n # discount episode rewards\n discounted_ep_rs = np.zeros_like(self.ep_rs)\n running_add = 0\n for t in reversed(range(0, len(self.ep_rs))):\n running_add = running_add * self.gamma + self.ep_rs[t]\n discounted_ep_rs[t] = running_add\n\n # normalize episode rewards\n discounted_ep_rs -= np.mean(discounted_ep_rs)\n discounted_ep_rs /= np.std(discounted_ep_rs)\n return discounted_ep_rs\n", "path": "fluid/policy_gradient/brain.py"}]}
| 1,364 | 148 |
gh_patches_debug_204
|
rasdani/github-patches
|
git_diff
|
pytorch__torchdynamo-1539
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove the triton dependency of Inductor CPU codegen
We import triton library even we compile the CPU codegen, e.g:
```
from ctypes import c_void_p, c_long
import torch
import random
from torch import empty_strided, as_strided, device
from torchinductor.codecache import AsyncCompile
aten = torch.ops.aten
async_compile = AsyncCompile()
import triton
import triton.language as tl
from torchinductor.triton_ops.autotune import grid
from torch._C import _cuda_getCurrentRawStream as get_cuda_stream
kernel0 = async_compile.cpp('''
#include "/tmp/torchinductor_ybliang/i5/ci5zbqbzeij2usetynv7oczewshegubkvtpswwuumpp6xjync55y.h"
extern "C" void kernel(const float* __restrict__ in_ptr0,
const float* __restrict__ in_ptr1,
float* __restrict__ out_ptr0,
const long ks0)
{
#pragma GCC ivdep
for(long i0=0; i0<ks0*ks0; ++i0)
{
{
{
auto tmp0 = in_ptr0[i0];
auto tmp1 = in_ptr1[i0];
auto tmp2 = tmp0 + tmp1;
out_ptr0[i0] = tmp2;
}
}
}
}
''')
```
This will cause dependency issue if users just want to use inductor on CPU. I think we should remove this dependency. Look at the code [here](https://github.com/pytorch/torchdynamo/blob/main/torchinductor/codegen/wrapper.py#L198), actually we add these headers according if ```has_triton```, maybe we should define a better criteria.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `torchinductor/utils.py`
Content:
```
1 import collections
2 import functools
3 import operator
4 import time
5 from importlib import import_module
6 from typing import Any
7 from typing import Dict
8 from typing import List
9
10 import numpy as np
11 import sympy
12 import torch
13 from torch.fx.immutable_collections import immutable_dict
14 from torch.fx.immutable_collections import immutable_list
15
16 from . import config
17
18 VarRanges = Dict[sympy.Expr, sympy.Expr]
19
20 # We import torchdynamo modules indirectly to allow a future rename to torch.dynamo
21 dynamo_config = import_module(f"{config.dynamo_import}.config")
22 dynamo_debug_utils = import_module(f"{config.dynamo_import}.debug_utils")
23 dynamo_logging = import_module(f"{config.dynamo_import}.logging")
24 dynamo_optimizations = import_module(f"{config.dynamo_import}.optimizations")
25 dynamo_testing = import_module(f"{config.dynamo_import}.testing")
26 dynamo_utils = import_module(f"{config.dynamo_import}.utils")
27
28
29 @functools.lru_cache(None)
30 def has_triton():
31 try:
32 import triton
33
34 return triton is not None
35 except (ImportError, ModuleNotFoundError):
36 return False
37
38
39 @functools.lru_cache(None)
40 def has_torchvision_roi_align():
41 try:
42 from torchvision.ops import roi_align # noqa
43
44 return roi_align is not None and hasattr(
45 getattr(torch.ops, "torchvision", None), "roi_align"
46 )
47 except (ImportError, ModuleNotFoundError):
48 return False
49
50
51 def conditional_product(*args):
52 return functools.reduce(operator.mul, [x for x in args if x])
53
54
55 def sympy_product(it):
56 return functools.reduce(operator.mul, it, sympy.Integer(1))
57
58
59 def sympy_dot(seq1, seq2):
60 assert len(seq1) == len(seq2)
61 return sympy.expand(sum(a * b for a, b in zip(seq1, seq2)))
62
63
64 def unique(it):
65 return {id(x): x for x in it}.values()
66
67
68 def ceildiv(numer: int, denom: int):
69 assert isinstance(numer, int) and isinstance(denom, int)
70 return -(numer // -denom)
71
72
73 def gen_gm_and_inputs(target, args, kwargs):
74 g = torch.fx.Graph()
75 g_args = []
76 a_args = []
77 for n, arg in enumerate(args):
78 if isinstance(arg, torch.Tensor):
79 g_args.append(g.placeholder(f"arg{n}"))
80 a_args.append(arg)
81 else:
82 g_args.append(arg)
83 assert all(not isinstance(x, torch.Tensor) for x in kwargs.values())
84 node = g.call_function(target, tuple(g_args), kwargs)
85 if (
86 len(target._schema.returns) == 1
87 and str(target._schema.returns[0].type) == "Tensor"
88 ):
89 node = (node,)
90 g.output(node)
91
92 gm = torch.fx.GraphModule({}, g)
93 return gm, a_args
94
95
96 def synchronize():
97 if torch.cuda.is_available():
98 torch.cuda.synchronize()
99
100
101 def timed(model, example_inputs, times=1):
102 synchronize()
103 torch.manual_seed(1337)
104 t0 = time.perf_counter()
105 for _ in range(times):
106 result = model(*example_inputs)
107 synchronize()
108 t1 = time.perf_counter()
109 # GC the result after timing
110 assert result is not None
111 return t1 - t0
112
113
114 def print_performance(fn, args=(), times=10, repeat=10, baseline=1.0):
115 timings = [timed(fn, args, times) for _ in range(repeat)]
116 took = np.median(timings)
117 print(f"{took/baseline:.6f}")
118 return took
119
120
121 immutable_dict.__hash__ = lambda self: hash(tuple(self.items()))
122 immutable_list.__hash__ = lambda self: hash(tuple(self))
123
124
125 def freeze_inputs(f):
126 """
127 Useful for wrapping lists in tuples for caching purposes
128 """
129
130 def freeze_value(x):
131 if isinstance(x, (immutable_dict, immutable_list)):
132 return x
133 if isinstance(x, list):
134 return immutable_list(x)
135 if isinstance(x, dict):
136 return immutable_dict(x)
137 return x
138
139 @functools.wraps(f)
140 def wrapped(*args):
141 args = [freeze_value(x) for x in args]
142 return f(*args)
143
144 wrapped.cache_info = f.cache_info
145 return wrapped
146
147
148 def precompute_method(obj: Any, method: str):
149 """Replace obj.method() with a new method that returns a precomputed constant."""
150 result = getattr(obj, method)()
151 setattr(obj, method, lambda: result)
152
153
154 def precompute_methods(obj: Any, methods: List[str]):
155 """Replace methods with new methods that returns a precomputed constants."""
156 for method in methods:
157 precompute_method(obj, method)
158
159
160 def cmp(a, b):
161 return int(a > b) - int(a < b)
162
163
164 def cache_on_self(fn):
165 key = f"__{fn.__name__}_cache"
166
167 @functools.wraps(fn)
168 def wrapper(self):
169 if not hasattr(self, key):
170 setattr(self, key, fn(self))
171 return getattr(self, key)
172
173 return wrapper
174
175
176 def sympy_str(expr: sympy.Expr):
177 """
178 Normal sympy str is very slow, this is a lot faster. The result are
179 somewhat worse, as it doesn't do as much simplification. So don't
180 use this for final codegen.
181 """
182 if isinstance(expr, sympy.Symbol):
183 return expr.name
184 if isinstance(expr, sympy.Add):
185 return " + ".join(map(sympy_str, expr.args))
186 if isinstance(expr, sympy.Mul):
187 return " * ".join(map(sympy_str, expr.args))
188
189 from .ir import CleanDiv
190 from .ir import IndexingDiv
191 from .ir import ModularIndexing
192
193 if isinstance(expr, (ModularIndexing, CleanDiv, IndexingDiv)):
194 return f"{expr.func.__name__}({', '.join(map(sympy_str, expr.args))})"
195 return str(expr)
196
197
198 def sympy_subs(expr: sympy.Expr, replacements: Dict[Any, Any]):
199 """
200 xreplace is faster than subs, but is way more picky
201 """
202
203 def promote_strings(key):
204 if isinstance(key, str):
205 return sympy.Symbol(key)
206 return key
207
208 return expr.xreplace(
209 {promote_strings(k): promote_strings(v) for k, v in replacements.items()}
210 )
211
212
213 def free_symbol_startswith(index: sympy.Expr, prefix: str):
214 return any(v.name.startswith(prefix) for v in index.free_symbols)
215
216
217 def has_incompatible_cudagraph_ops(gm):
218 forbidden_list = set(
219 [
220 "aten._fused_moving_avg_obs_fq_helper.default",
221 "aten._fused_moving_avg_obs_fq_helper_functional.default",
222 "fbgemm.dense_to_jagged.default",
223 "fbgemm.jagged_to_padded_dense.default",
224 ]
225 )
226 for node in gm.graph.nodes:
227 if str(node.target) in forbidden_list:
228 return True
229 return False
230
231
232 instance_descriptor = collections.namedtuple(
233 "instance_descriptor", ["divisible_by_16", "equal_to_1"]
234 )
235
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/torchinductor/utils.py b/torchinductor/utils.py
--- a/torchinductor/utils.py
+++ b/torchinductor/utils.py
@@ -26,6 +26,8 @@
@functools.lru_cache(None)
def has_triton():
+ if not torch.cuda.is_available():
+ return False
try:
import triton
|
{"golden_diff": "diff --git a/torchinductor/utils.py b/torchinductor/utils.py\n--- a/torchinductor/utils.py\n+++ b/torchinductor/utils.py\n@@ -26,6 +26,8 @@\n \n @functools.lru_cache(None)\n def has_triton():\n+ if not torch.cuda.is_available():\n+ return False\n try:\n import triton\n", "issue": "Remove the triton dependency of Inductor CPU codegen\nWe import triton library even we compile the CPU codegen, e.g:\r\n```\r\nfrom ctypes import c_void_p, c_long\r\nimport torch\r\nimport random\r\nfrom torch import empty_strided, as_strided, device\r\nfrom torchinductor.codecache import AsyncCompile\r\n\r\naten = torch.ops.aten\r\nasync_compile = AsyncCompile()\r\n\r\nimport triton\r\nimport triton.language as tl\r\nfrom torchinductor.triton_ops.autotune import grid\r\nfrom torch._C import _cuda_getCurrentRawStream as get_cuda_stream\r\n\r\n\r\nkernel0 = async_compile.cpp('''\r\n#include \"/tmp/torchinductor_ybliang/i5/ci5zbqbzeij2usetynv7oczewshegubkvtpswwuumpp6xjync55y.h\"\r\nextern \"C\" void kernel(const float* __restrict__ in_ptr0,\r\n const float* __restrict__ in_ptr1,\r\n float* __restrict__ out_ptr0,\r\n const long ks0)\r\n{\r\n #pragma GCC ivdep\r\n for(long i0=0; i0<ks0*ks0; ++i0)\r\n {\r\n {\r\n {\r\n auto tmp0 = in_ptr0[i0];\r\n auto tmp1 = in_ptr1[i0];\r\n auto tmp2 = tmp0 + tmp1;\r\n out_ptr0[i0] = tmp2;\r\n }\r\n }\r\n }\r\n}\r\n''')\r\n```\r\nThis will cause dependency issue if users just want to use inductor on CPU. I think we should remove this dependency. Look at the code [here](https://github.com/pytorch/torchdynamo/blob/main/torchinductor/codegen/wrapper.py#L198), actually we add these headers according if ```has_triton```, maybe we should define a better criteria. \n", "before_files": [{"content": "import collections\nimport functools\nimport operator\nimport time\nfrom importlib import import_module\nfrom typing import Any\nfrom typing import Dict\nfrom typing import List\n\nimport numpy as np\nimport sympy\nimport torch\nfrom torch.fx.immutable_collections import immutable_dict\nfrom torch.fx.immutable_collections import immutable_list\n\nfrom . import config\n\nVarRanges = Dict[sympy.Expr, sympy.Expr]\n\n# We import torchdynamo modules indirectly to allow a future rename to torch.dynamo\ndynamo_config = import_module(f\"{config.dynamo_import}.config\")\ndynamo_debug_utils = import_module(f\"{config.dynamo_import}.debug_utils\")\ndynamo_logging = import_module(f\"{config.dynamo_import}.logging\")\ndynamo_optimizations = import_module(f\"{config.dynamo_import}.optimizations\")\ndynamo_testing = import_module(f\"{config.dynamo_import}.testing\")\ndynamo_utils = import_module(f\"{config.dynamo_import}.utils\")\n\n\[email protected]_cache(None)\ndef has_triton():\n try:\n import triton\n\n return triton is not None\n except (ImportError, ModuleNotFoundError):\n return False\n\n\[email protected]_cache(None)\ndef has_torchvision_roi_align():\n try:\n from torchvision.ops import roi_align # noqa\n\n return roi_align is not None and hasattr(\n getattr(torch.ops, \"torchvision\", None), \"roi_align\"\n )\n except (ImportError, ModuleNotFoundError):\n return False\n\n\ndef conditional_product(*args):\n return functools.reduce(operator.mul, [x for x in args if x])\n\n\ndef sympy_product(it):\n return functools.reduce(operator.mul, it, sympy.Integer(1))\n\n\ndef sympy_dot(seq1, seq2):\n assert len(seq1) == len(seq2)\n return sympy.expand(sum(a * b for a, b in zip(seq1, seq2)))\n\n\ndef unique(it):\n return {id(x): x for x in it}.values()\n\n\ndef ceildiv(numer: int, denom: int):\n assert isinstance(numer, int) and isinstance(denom, int)\n return -(numer // -denom)\n\n\ndef gen_gm_and_inputs(target, args, kwargs):\n g = torch.fx.Graph()\n g_args = []\n a_args = []\n for n, arg in enumerate(args):\n if isinstance(arg, torch.Tensor):\n g_args.append(g.placeholder(f\"arg{n}\"))\n a_args.append(arg)\n else:\n g_args.append(arg)\n assert all(not isinstance(x, torch.Tensor) for x in kwargs.values())\n node = g.call_function(target, tuple(g_args), kwargs)\n if (\n len(target._schema.returns) == 1\n and str(target._schema.returns[0].type) == \"Tensor\"\n ):\n node = (node,)\n g.output(node)\n\n gm = torch.fx.GraphModule({}, g)\n return gm, a_args\n\n\ndef synchronize():\n if torch.cuda.is_available():\n torch.cuda.synchronize()\n\n\ndef timed(model, example_inputs, times=1):\n synchronize()\n torch.manual_seed(1337)\n t0 = time.perf_counter()\n for _ in range(times):\n result = model(*example_inputs)\n synchronize()\n t1 = time.perf_counter()\n # GC the result after timing\n assert result is not None\n return t1 - t0\n\n\ndef print_performance(fn, args=(), times=10, repeat=10, baseline=1.0):\n timings = [timed(fn, args, times) for _ in range(repeat)]\n took = np.median(timings)\n print(f\"{took/baseline:.6f}\")\n return took\n\n\nimmutable_dict.__hash__ = lambda self: hash(tuple(self.items()))\nimmutable_list.__hash__ = lambda self: hash(tuple(self))\n\n\ndef freeze_inputs(f):\n \"\"\"\n Useful for wrapping lists in tuples for caching purposes\n \"\"\"\n\n def freeze_value(x):\n if isinstance(x, (immutable_dict, immutable_list)):\n return x\n if isinstance(x, list):\n return immutable_list(x)\n if isinstance(x, dict):\n return immutable_dict(x)\n return x\n\n @functools.wraps(f)\n def wrapped(*args):\n args = [freeze_value(x) for x in args]\n return f(*args)\n\n wrapped.cache_info = f.cache_info\n return wrapped\n\n\ndef precompute_method(obj: Any, method: str):\n \"\"\"Replace obj.method() with a new method that returns a precomputed constant.\"\"\"\n result = getattr(obj, method)()\n setattr(obj, method, lambda: result)\n\n\ndef precompute_methods(obj: Any, methods: List[str]):\n \"\"\"Replace methods with new methods that returns a precomputed constants.\"\"\"\n for method in methods:\n precompute_method(obj, method)\n\n\ndef cmp(a, b):\n return int(a > b) - int(a < b)\n\n\ndef cache_on_self(fn):\n key = f\"__{fn.__name__}_cache\"\n\n @functools.wraps(fn)\n def wrapper(self):\n if not hasattr(self, key):\n setattr(self, key, fn(self))\n return getattr(self, key)\n\n return wrapper\n\n\ndef sympy_str(expr: sympy.Expr):\n \"\"\"\n Normal sympy str is very slow, this is a lot faster. The result are\n somewhat worse, as it doesn't do as much simplification. So don't\n use this for final codegen.\n \"\"\"\n if isinstance(expr, sympy.Symbol):\n return expr.name\n if isinstance(expr, sympy.Add):\n return \" + \".join(map(sympy_str, expr.args))\n if isinstance(expr, sympy.Mul):\n return \" * \".join(map(sympy_str, expr.args))\n\n from .ir import CleanDiv\n from .ir import IndexingDiv\n from .ir import ModularIndexing\n\n if isinstance(expr, (ModularIndexing, CleanDiv, IndexingDiv)):\n return f\"{expr.func.__name__}({', '.join(map(sympy_str, expr.args))})\"\n return str(expr)\n\n\ndef sympy_subs(expr: sympy.Expr, replacements: Dict[Any, Any]):\n \"\"\"\n xreplace is faster than subs, but is way more picky\n \"\"\"\n\n def promote_strings(key):\n if isinstance(key, str):\n return sympy.Symbol(key)\n return key\n\n return expr.xreplace(\n {promote_strings(k): promote_strings(v) for k, v in replacements.items()}\n )\n\n\ndef free_symbol_startswith(index: sympy.Expr, prefix: str):\n return any(v.name.startswith(prefix) for v in index.free_symbols)\n\n\ndef has_incompatible_cudagraph_ops(gm):\n forbidden_list = set(\n [\n \"aten._fused_moving_avg_obs_fq_helper.default\",\n \"aten._fused_moving_avg_obs_fq_helper_functional.default\",\n \"fbgemm.dense_to_jagged.default\",\n \"fbgemm.jagged_to_padded_dense.default\",\n ]\n )\n for node in gm.graph.nodes:\n if str(node.target) in forbidden_list:\n return True\n return False\n\n\ninstance_descriptor = collections.namedtuple(\n \"instance_descriptor\", [\"divisible_by_16\", \"equal_to_1\"]\n)\n", "path": "torchinductor/utils.py"}], "after_files": [{"content": "import collections\nimport functools\nimport operator\nimport time\nfrom importlib import import_module\nfrom typing import Any\nfrom typing import Dict\nfrom typing import List\n\nimport numpy as np\nimport sympy\nimport torch\nfrom torch.fx.immutable_collections import immutable_dict\nfrom torch.fx.immutable_collections import immutable_list\n\nfrom . import config\n\nVarRanges = Dict[sympy.Expr, sympy.Expr]\n\n# We import torchdynamo modules indirectly to allow a future rename to torch.dynamo\ndynamo_debug_utils = import_module(f\"{config.dynamo_import}.debug_utils\")\ndynamo_optimizations = import_module(f\"{config.dynamo_import}.optimizations\")\ndynamo_testing = import_module(f\"{config.dynamo_import}.testing\")\ndynamo_utils = import_module(f\"{config.dynamo_import}.utils\")\n\n\[email protected]_cache(None)\ndef has_triton():\n if not torch.cuda.is_available():\n return False\n try:\n import triton\n\n return triton is not None\n except (ImportError, ModuleNotFoundError):\n return False\n\n\[email protected]_cache(None)\ndef has_torchvision_roi_align():\n try:\n from torchvision.ops import roi_align # noqa\n\n return roi_align is not None and hasattr(\n getattr(torch.ops, \"torchvision\", None), \"roi_align\"\n )\n except (ImportError, ModuleNotFoundError):\n return False\n\n\[email protected]_cache(None)\ndef has_triton_libdevice():\n try:\n from triton.language import libdevice\n\n return libdevice is not None\n except (ImportError, ModuleNotFoundError):\n return False\n\n\ndef conditional_product(*args):\n return functools.reduce(operator.mul, [x for x in args if x])\n\n\ndef sympy_product(it):\n return functools.reduce(operator.mul, it, sympy.Integer(1))\n\n\ndef sympy_dot(seq1, seq2):\n assert len(seq1) == len(seq2)\n return sympy.expand(sum(a * b for a, b in zip(seq1, seq2)))\n\n\ndef unique(it):\n return {id(x): x for x in it}.values()\n\n\ndef ceildiv(numer: int, denom: int):\n assert isinstance(numer, int) and isinstance(denom, int)\n return -(numer // -denom)\n\n\ndef gen_gm_and_inputs(target, args, kwargs):\n g = torch.fx.Graph()\n g_args = []\n a_args = []\n for n, arg in enumerate(args):\n if isinstance(arg, torch.Tensor):\n g_args.append(g.placeholder(f\"arg{n}\"))\n a_args.append(arg)\n else:\n g_args.append(arg)\n assert all(not isinstance(x, torch.Tensor) for x in kwargs.values())\n node = g.call_function(target, tuple(g_args), kwargs)\n if (\n len(target._schema.returns) == 1\n and str(target._schema.returns[0].type) == \"Tensor\"\n ):\n node = (node,)\n g.output(node)\n\n gm = torch.fx.GraphModule({}, g)\n return gm, a_args\n\n\ndef synchronize():\n if torch.cuda.is_available():\n torch.cuda.synchronize()\n\n\ndef timed(model, example_inputs, times=1):\n synchronize()\n torch.manual_seed(1337)\n t0 = time.perf_counter()\n for _ in range(times):\n result = model(*example_inputs)\n synchronize()\n t1 = time.perf_counter()\n # GC the result after timing\n assert result is not None\n return t1 - t0\n\n\ndef print_performance(fn, args=(), times=10, repeat=10, baseline=1.0):\n timings = [timed(fn, args, times) for _ in range(repeat)]\n took = np.median(timings)\n print(f\"{took/baseline:.6f}\")\n return took\n\n\nimmutable_dict.__hash__ = lambda self: hash(tuple(self.items()))\nimmutable_list.__hash__ = lambda self: hash(tuple(self))\n\n\ndef freeze_inputs(f):\n \"\"\"\n Useful for wrapping lists in tuples for caching purposes\n \"\"\"\n\n def freeze_value(x):\n if isinstance(x, (immutable_dict, immutable_list)):\n return x\n if isinstance(x, list):\n return immutable_list(x)\n if isinstance(x, dict):\n return immutable_dict(x)\n return x\n\n @functools.wraps(f)\n def wrapped(*args):\n args = [freeze_value(x) for x in args]\n return f(*args)\n\n wrapped.cache_info = f.cache_info\n return wrapped\n\n\ndef precompute_method(obj: Any, method: str):\n \"\"\"Replace obj.method() with a new method that returns a precomputed constant.\"\"\"\n result = getattr(obj, method)()\n setattr(obj, method, lambda: result)\n\n\ndef precompute_methods(obj: Any, methods: List[str]):\n \"\"\"Replace methods with new methods that returns a precomputed constants.\"\"\"\n for method in methods:\n precompute_method(obj, method)\n\n\ndef cmp(a, b):\n return int(a > b) - int(a < b)\n\n\ndef cache_on_self(fn):\n key = f\"__{fn.__name__}_cache\"\n\n @functools.wraps(fn)\n def wrapper(self):\n if not hasattr(self, key):\n setattr(self, key, fn(self))\n return getattr(self, key)\n\n return wrapper\n\n\ndef sympy_str(expr: sympy.Expr):\n \"\"\"\n Normal sympy str is very slow, this is a lot faster. The result are\n somewhat worse, as it doesn't do as much simplification. So don't\n use this for final codegen.\n \"\"\"\n if isinstance(expr, sympy.Symbol):\n return expr.name\n if isinstance(expr, sympy.Add):\n return \" + \".join(map(sympy_str, expr.args))\n if isinstance(expr, sympy.Mul):\n return \" * \".join(map(sympy_str, expr.args))\n\n from .ir import CleanDiv\n from .ir import IndexingDiv\n from .ir import ModularIndexing\n\n if isinstance(expr, (ModularIndexing, CleanDiv, IndexingDiv)):\n return f\"{expr.func.__name__}({', '.join(map(sympy_str, expr.args))})\"\n return str(expr)\n\n\ndef sympy_subs(expr: sympy.Expr, replacements: Dict[Any, Any]):\n \"\"\"\n xreplace is faster than subs, but is way more picky\n \"\"\"\n\n def promote_strings(key):\n if isinstance(key, str):\n return sympy.Symbol(key)\n return key\n\n return expr.xreplace(\n {promote_strings(k): promote_strings(v) for k, v in replacements.items()}\n )\n\n\ndef free_symbol_startswith(index: sympy.Expr, prefix: str):\n return any(v.name.startswith(prefix) for v in index.free_symbols)\n\n\ndef has_incompatible_cudagraph_ops(gm):\n forbidden_list = set(\n [\n \"aten._fused_moving_avg_obs_fq_helper.default\",\n \"aten._fused_moving_avg_obs_fq_helper_functional.default\",\n \"fbgemm.dense_to_jagged.default\",\n \"fbgemm.jagged_to_padded_dense.default\",\n ]\n )\n for node in gm.graph.nodes:\n if str(node.target) in forbidden_list:\n return True\n return False\n\n\ninstance_descriptor = collections.namedtuple(\n \"instance_descriptor\", [\"divisible_by_16\", \"equal_to_1\"]\n)\n", "path": "torchinductor/utils.py"}]}
| 2,851 | 85 |
gh_patches_debug_22549
|
rasdani/github-patches
|
git_diff
|
psf__black-3543
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
GitHub Action: Use action version as default Black version, instead of latest
> I'm alright with making the default Black version tied to the action version being used. For context `version` was introduced because the action didn't exist for a long time so tying black version to action version wouldn't work for version 19.10b0 for example. In hidesight, having the default being the action version keeping the `version` configuration option around as an escape hatch is the better solution. This will involve some complexity since commit SHAs aren't supported by the version code (but are by GHA) but there might be some pre-existing logic in scripts/diff_shades_gha_helper.py we could reuse.
_Originally posted by @ichard26 in https://github.com/psf/black/issues/1140#issuecomment-1026379455_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `action/main.py`
Content:
```
1 import os
2 import shlex
3 import sys
4 from pathlib import Path
5 from subprocess import PIPE, STDOUT, run
6
7 ACTION_PATH = Path(os.environ["GITHUB_ACTION_PATH"])
8 ENV_PATH = ACTION_PATH / ".black-env"
9 ENV_BIN = ENV_PATH / ("Scripts" if sys.platform == "win32" else "bin")
10 OPTIONS = os.getenv("INPUT_OPTIONS", default="")
11 SRC = os.getenv("INPUT_SRC", default="")
12 JUPYTER = os.getenv("INPUT_JUPYTER") == "true"
13 BLACK_ARGS = os.getenv("INPUT_BLACK_ARGS", default="")
14 VERSION = os.getenv("INPUT_VERSION", default="")
15
16 run([sys.executable, "-m", "venv", str(ENV_PATH)], check=True)
17
18 version_specifier = VERSION
19 if VERSION and VERSION[0] in "0123456789":
20 version_specifier = f"=={VERSION}"
21 if JUPYTER:
22 extra_deps = "[colorama,jupyter]"
23 else:
24 extra_deps = "[colorama]"
25 req = f"black{extra_deps}{version_specifier}"
26 pip_proc = run(
27 [str(ENV_BIN / "python"), "-m", "pip", "install", req],
28 stdout=PIPE,
29 stderr=STDOUT,
30 encoding="utf-8",
31 )
32 if pip_proc.returncode:
33 print(pip_proc.stdout)
34 print("::error::Failed to install Black.", flush=True)
35 sys.exit(pip_proc.returncode)
36
37
38 base_cmd = [str(ENV_BIN / "black")]
39 if BLACK_ARGS:
40 # TODO: remove after a while since this is deprecated in favour of SRC + OPTIONS.
41 proc = run([*base_cmd, *shlex.split(BLACK_ARGS)])
42 else:
43 proc = run([*base_cmd, *shlex.split(OPTIONS), *shlex.split(SRC)])
44
45 sys.exit(proc.returncode)
46
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/action/main.py b/action/main.py
--- a/action/main.py
+++ b/action/main.py
@@ -22,12 +22,34 @@
extra_deps = "[colorama,jupyter]"
else:
extra_deps = "[colorama]"
-req = f"black{extra_deps}{version_specifier}"
+if version_specifier:
+ req = f"black{extra_deps}{version_specifier}"
+else:
+ describe_name = ""
+ with open(ACTION_PATH / ".git_archival.txt", encoding="utf-8") as fp:
+ for line in fp:
+ if line.startswith("describe-name: "):
+ describe_name = line[len("describe-name: ") :].rstrip()
+ break
+ if not describe_name:
+ print("::error::Failed to detect action version.", flush=True)
+ sys.exit(1)
+ # expected format is one of:
+ # - 23.1.0
+ # - 23.1.0-51-g448bba7
+ if describe_name.count("-") < 2:
+ # the action's commit matches a tag exactly, install exact version from PyPI
+ req = f"black{extra_deps}=={describe_name}"
+ else:
+ # the action's commit does not match any tag, install from the local git repo
+ req = f".{extra_deps}"
+print(f"Installing {req}...", flush=True)
pip_proc = run(
[str(ENV_BIN / "python"), "-m", "pip", "install", req],
stdout=PIPE,
stderr=STDOUT,
encoding="utf-8",
+ cwd=ACTION_PATH,
)
if pip_proc.returncode:
print(pip_proc.stdout)
|
{"golden_diff": "diff --git a/action/main.py b/action/main.py\n--- a/action/main.py\n+++ b/action/main.py\n@@ -22,12 +22,34 @@\n extra_deps = \"[colorama,jupyter]\"\n else:\n extra_deps = \"[colorama]\"\n-req = f\"black{extra_deps}{version_specifier}\"\n+if version_specifier:\n+ req = f\"black{extra_deps}{version_specifier}\"\n+else:\n+ describe_name = \"\"\n+ with open(ACTION_PATH / \".git_archival.txt\", encoding=\"utf-8\") as fp:\n+ for line in fp:\n+ if line.startswith(\"describe-name: \"):\n+ describe_name = line[len(\"describe-name: \") :].rstrip()\n+ break\n+ if not describe_name:\n+ print(\"::error::Failed to detect action version.\", flush=True)\n+ sys.exit(1)\n+ # expected format is one of:\n+ # - 23.1.0\n+ # - 23.1.0-51-g448bba7\n+ if describe_name.count(\"-\") < 2:\n+ # the action's commit matches a tag exactly, install exact version from PyPI\n+ req = f\"black{extra_deps}=={describe_name}\"\n+ else:\n+ # the action's commit does not match any tag, install from the local git repo\n+ req = f\".{extra_deps}\"\n+print(f\"Installing {req}...\", flush=True)\n pip_proc = run(\n [str(ENV_BIN / \"python\"), \"-m\", \"pip\", \"install\", req],\n stdout=PIPE,\n stderr=STDOUT,\n encoding=\"utf-8\",\n+ cwd=ACTION_PATH,\n )\n if pip_proc.returncode:\n print(pip_proc.stdout)\n", "issue": "GitHub Action: Use action version as default Black version, instead of latest\n> I'm alright with making the default Black version tied to the action version being used. For context `version` was introduced because the action didn't exist for a long time so tying black version to action version wouldn't work for version 19.10b0 for example. In hidesight, having the default being the action version keeping the `version` configuration option around as an escape hatch is the better solution. This will involve some complexity since commit SHAs aren't supported by the version code (but are by GHA) but there might be some pre-existing logic in scripts/diff_shades_gha_helper.py we could reuse.\r\n\r\n_Originally posted by @ichard26 in https://github.com/psf/black/issues/1140#issuecomment-1026379455_\r\n \n", "before_files": [{"content": "import os\nimport shlex\nimport sys\nfrom pathlib import Path\nfrom subprocess import PIPE, STDOUT, run\n\nACTION_PATH = Path(os.environ[\"GITHUB_ACTION_PATH\"])\nENV_PATH = ACTION_PATH / \".black-env\"\nENV_BIN = ENV_PATH / (\"Scripts\" if sys.platform == \"win32\" else \"bin\")\nOPTIONS = os.getenv(\"INPUT_OPTIONS\", default=\"\")\nSRC = os.getenv(\"INPUT_SRC\", default=\"\")\nJUPYTER = os.getenv(\"INPUT_JUPYTER\") == \"true\"\nBLACK_ARGS = os.getenv(\"INPUT_BLACK_ARGS\", default=\"\")\nVERSION = os.getenv(\"INPUT_VERSION\", default=\"\")\n\nrun([sys.executable, \"-m\", \"venv\", str(ENV_PATH)], check=True)\n\nversion_specifier = VERSION\nif VERSION and VERSION[0] in \"0123456789\":\n version_specifier = f\"=={VERSION}\"\nif JUPYTER:\n extra_deps = \"[colorama,jupyter]\"\nelse:\n extra_deps = \"[colorama]\"\nreq = f\"black{extra_deps}{version_specifier}\"\npip_proc = run(\n [str(ENV_BIN / \"python\"), \"-m\", \"pip\", \"install\", req],\n stdout=PIPE,\n stderr=STDOUT,\n encoding=\"utf-8\",\n)\nif pip_proc.returncode:\n print(pip_proc.stdout)\n print(\"::error::Failed to install Black.\", flush=True)\n sys.exit(pip_proc.returncode)\n\n\nbase_cmd = [str(ENV_BIN / \"black\")]\nif BLACK_ARGS:\n # TODO: remove after a while since this is deprecated in favour of SRC + OPTIONS.\n proc = run([*base_cmd, *shlex.split(BLACK_ARGS)])\nelse:\n proc = run([*base_cmd, *shlex.split(OPTIONS), *shlex.split(SRC)])\n\nsys.exit(proc.returncode)\n", "path": "action/main.py"}], "after_files": [{"content": "import os\nimport shlex\nimport sys\nfrom pathlib import Path\nfrom subprocess import PIPE, STDOUT, run\n\nACTION_PATH = Path(os.environ[\"GITHUB_ACTION_PATH\"])\nENV_PATH = ACTION_PATH / \".black-env\"\nENV_BIN = ENV_PATH / (\"Scripts\" if sys.platform == \"win32\" else \"bin\")\nOPTIONS = os.getenv(\"INPUT_OPTIONS\", default=\"\")\nSRC = os.getenv(\"INPUT_SRC\", default=\"\")\nJUPYTER = os.getenv(\"INPUT_JUPYTER\") == \"true\"\nBLACK_ARGS = os.getenv(\"INPUT_BLACK_ARGS\", default=\"\")\nVERSION = os.getenv(\"INPUT_VERSION\", default=\"\")\n\nrun([sys.executable, \"-m\", \"venv\", str(ENV_PATH)], check=True)\n\nversion_specifier = VERSION\nif VERSION and VERSION[0] in \"0123456789\":\n version_specifier = f\"=={VERSION}\"\nif JUPYTER:\n extra_deps = \"[colorama,jupyter]\"\nelse:\n extra_deps = \"[colorama]\"\nif version_specifier:\n req = f\"black{extra_deps}{version_specifier}\"\nelse:\n describe_name = \"\"\n with open(ACTION_PATH / \".git_archival.txt\", encoding=\"utf-8\") as fp:\n for line in fp:\n if line.startswith(\"describe-name: \"):\n describe_name = line[len(\"describe-name: \") :].rstrip()\n break\n if not describe_name:\n print(\"::error::Failed to detect action version.\", flush=True)\n sys.exit(1)\n # expected format is one of:\n # - 23.1.0\n # - 23.1.0-51-g448bba7\n if describe_name.count(\"-\") < 2:\n # the action's commit matches a tag exactly, install exact version from PyPI\n req = f\"black{extra_deps}=={describe_name}\"\n else:\n # the action's commit does not match any tag, install from the local git repo\n req = f\".{extra_deps}\"\nprint(f\"Installing {req}...\", flush=True)\npip_proc = run(\n [str(ENV_BIN / \"python\"), \"-m\", \"pip\", \"install\", req],\n stdout=PIPE,\n stderr=STDOUT,\n encoding=\"utf-8\",\n cwd=ACTION_PATH,\n)\nif pip_proc.returncode:\n print(pip_proc.stdout)\n print(\"::error::Failed to install Black.\", flush=True)\n sys.exit(pip_proc.returncode)\n\n\nbase_cmd = [str(ENV_BIN / \"black\")]\nif BLACK_ARGS:\n # TODO: remove after a while since this is deprecated in favour of SRC + OPTIONS.\n proc = run([*base_cmd, *shlex.split(BLACK_ARGS)])\nelse:\n proc = run([*base_cmd, *shlex.split(OPTIONS), *shlex.split(SRC)])\n\nsys.exit(proc.returncode)\n", "path": "action/main.py"}]}
| 923 | 393 |
gh_patches_debug_27942
|
rasdani/github-patches
|
git_diff
|
pyjanitor-devs__pyjanitor-461
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[DOC] API Documentation for Biology functions is not standardized
# Brief Description of Fix
<!-- Please describe the fix in terms of a "before" and "after". In other words, what's not so good about the current docs
page, and what you would like to see it become.
Example starter wording is provided. -->
Currently, the docs do not have a standard docstring format for functions.
### Example of inconsistent docstring
> (no `Returns`, no `Functional usage example`, no `Method chaining example`)
<img width="690" alt="incorrect_func_doc" src="https://user-images.githubusercontent.com/24984410/61177779-6936e100-a5a2-11e9-8332-0c02bad7b5bf.png">
### Example of a correct/desired docstring
<img width="716" alt="correct_func_doc" src="https://user-images.githubusercontent.com/24984410/61177795-c5016a00-a5a2-11e9-8cd9-415f440d95c2.png">
I would like to propose a change, such that now the docs contain a **standardized** docstring suite. All functions should contain (at a minimum) the following:
- `Parameters`
- ` Returns`
- `Functional usage example`
- `Method chaining example`
**NOTE**: This can be done for all functions within the `janitor` directory. For ease of review, this will focus on the `biology.py` file and move to other files/functions as time permits.
# Relevant Context
<!-- Please put here, in bullet points, links to the relevant docs page. A few starting template points are available
to get you started. -->
- [Link to documentation page](https://pyjanitor.readthedocs.io/reference/biology.html)
- [Link to exact file to be edited](https://github.com/loganthomas/pyjanitor/blob/dev/janitor/biology.py)
[DOC] API Documentation for Biology functions is not standardized
# Brief Description of Fix
<!-- Please describe the fix in terms of a "before" and "after". In other words, what's not so good about the current docs
page, and what you would like to see it become.
Example starter wording is provided. -->
Currently, the docs do not have a standard docstring format for functions.
### Example of inconsistent docstring
> (no `Returns`, no `Functional usage example`, no `Method chaining example`)
<img width="690" alt="incorrect_func_doc" src="https://user-images.githubusercontent.com/24984410/61177779-6936e100-a5a2-11e9-8332-0c02bad7b5bf.png">
### Example of a correct/desired docstring
<img width="716" alt="correct_func_doc" src="https://user-images.githubusercontent.com/24984410/61177795-c5016a00-a5a2-11e9-8cd9-415f440d95c2.png">
I would like to propose a change, such that now the docs contain a **standardized** docstring suite. All functions should contain (at a minimum) the following:
- `Parameters`
- ` Returns`
- `Functional usage example`
- `Method chaining example`
**NOTE**: This can be done for all functions within the `janitor` directory. For ease of review, this will focus on the `biology.py` file and move to other files/functions as time permits.
# Relevant Context
<!-- Please put here, in bullet points, links to the relevant docs page. A few starting template points are available
to get you started. -->
- [Link to documentation page](https://pyjanitor.readthedocs.io/reference/biology.html)
- [Link to exact file to be edited](https://github.com/loganthomas/pyjanitor/blob/dev/janitor/biology.py)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `janitor/biology.py`
Content:
```
1 """
2 Biology and bioinformatics-oriented data cleaning functions.
3 """
4
5 import pandas as pd
6 import pandas_flavor as pf
7
8 from .utils import deprecated_alias, import_message
9
10 try:
11 from Bio import SeqIO
12 except ImportError:
13 import_message(
14 "biology", "biopython", "conda install -c conda-forge biopython"
15 )
16
17
18 @pf.register_dataframe_method
19 @deprecated_alias(col_name="column_name")
20 def join_fasta(
21 df: pd.DataFrame, filename: str, id_col: str, column_name
22 ) -> pd.DataFrame:
23 """
24 Convenience method to join in a FASTA file as a column.
25
26 This allows us to add the string sequence of a FASTA file as a new column
27 of data in the dataframe.
28
29 This method only attaches the string representation of the SeqRecord.Seq
30 object from Biopython. Does not attach the full SeqRecord. Alphabet is
31 also not stored, under the assumption that the data scientist has domain
32 knowledge of what kind of sequence is being read in (nucleotide vs. amino
33 acid.)
34
35 This method mutates the original DataFrame.
36
37 For more advanced functions, please use phylopandas.
38
39 :param df: A pandas DataFrame.
40 :param filename: Path to the FASTA file.
41 :param id_col: The column in the DataFrame that houses sequence IDs.
42 :param column_name: The name of the new column.
43 """
44 seqrecords = {
45 x.id: x.seq.__str__() for x in SeqIO.parse(filename, "fasta")
46 }
47 seq_col = [seqrecords[i] for i in df[id_col]]
48 df[column_name] = seq_col
49 return df
50
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/janitor/biology.py b/janitor/biology.py
--- a/janitor/biology.py
+++ b/janitor/biology.py
@@ -18,7 +18,7 @@
@pf.register_dataframe_method
@deprecated_alias(col_name="column_name")
def join_fasta(
- df: pd.DataFrame, filename: str, id_col: str, column_name
+ df: pd.DataFrame, filename: str, id_col: str, column_name: str
) -> pd.DataFrame:
"""
Convenience method to join in a FASTA file as a column.
@@ -36,10 +36,37 @@
For more advanced functions, please use phylopandas.
+ Functional usage example:
+
+ .. code-block:: python
+
+ import janitor.biology
+
+ df = janitor.biology.join_fasta(
+ df=df,
+ filename='fasta_file.fasta',
+ id_col='sequence_accession',
+ column_name='sequence',
+ )
+
+ Method chaining example:
+
+ .. code-block:: python
+
+ import pandas as pd
+ import janitor.biology
+
+ df = pd.DataFrame(...).join_fasta(
+ filename='fasta_file.fasta',
+ id_col='sequence_accession',
+ column_name='sequence',
+ )
+
:param df: A pandas DataFrame.
:param filename: Path to the FASTA file.
:param id_col: The column in the DataFrame that houses sequence IDs.
:param column_name: The name of the new column.
+ :returns: A pandas DataFrame with new FASTA string sequence column.
"""
seqrecords = {
x.id: x.seq.__str__() for x in SeqIO.parse(filename, "fasta")
|
{"golden_diff": "diff --git a/janitor/biology.py b/janitor/biology.py\n--- a/janitor/biology.py\n+++ b/janitor/biology.py\n@@ -18,7 +18,7 @@\n @pf.register_dataframe_method\n @deprecated_alias(col_name=\"column_name\")\n def join_fasta(\n- df: pd.DataFrame, filename: str, id_col: str, column_name\n+ df: pd.DataFrame, filename: str, id_col: str, column_name: str\n ) -> pd.DataFrame:\n \"\"\"\n Convenience method to join in a FASTA file as a column.\n@@ -36,10 +36,37 @@\n \n For more advanced functions, please use phylopandas.\n \n+ Functional usage example:\n+\n+ .. code-block:: python\n+\n+ import janitor.biology\n+\n+ df = janitor.biology.join_fasta(\n+ df=df,\n+ filename='fasta_file.fasta',\n+ id_col='sequence_accession',\n+ column_name='sequence',\n+ )\n+\n+ Method chaining example:\n+\n+ .. code-block:: python\n+\n+ import pandas as pd\n+ import janitor.biology\n+\n+ df = pd.DataFrame(...).join_fasta(\n+ filename='fasta_file.fasta',\n+ id_col='sequence_accession',\n+ column_name='sequence',\n+ )\n+\n :param df: A pandas DataFrame.\n :param filename: Path to the FASTA file.\n :param id_col: The column in the DataFrame that houses sequence IDs.\n :param column_name: The name of the new column.\n+ :returns: A pandas DataFrame with new FASTA string sequence column.\n \"\"\"\n seqrecords = {\n x.id: x.seq.__str__() for x in SeqIO.parse(filename, \"fasta\")\n", "issue": "[DOC] API Documentation for Biology functions is not standardized\n# Brief Description of Fix\r\n\r\n<!-- Please describe the fix in terms of a \"before\" and \"after\". In other words, what's not so good about the current docs\r\npage, and what you would like to see it become. \r\n\r\nExample starter wording is provided. -->\r\n\r\nCurrently, the docs do not have a standard docstring format for functions.\r\n### Example of inconsistent docstring\r\n> (no `Returns`, no `Functional usage example`, no `Method chaining example`)\r\n<img width=\"690\" alt=\"incorrect_func_doc\" src=\"https://user-images.githubusercontent.com/24984410/61177779-6936e100-a5a2-11e9-8332-0c02bad7b5bf.png\">\r\n\r\n### Example of a correct/desired docstring\r\n<img width=\"716\" alt=\"correct_func_doc\" src=\"https://user-images.githubusercontent.com/24984410/61177795-c5016a00-a5a2-11e9-8cd9-415f440d95c2.png\">\r\n\r\nI would like to propose a change, such that now the docs contain a **standardized** docstring suite. All functions should contain (at a minimum) the following:\r\n - `Parameters`\r\n - ` Returns`\r\n - `Functional usage example`\r\n - `Method chaining example`\r\n\r\n**NOTE**: This can be done for all functions within the `janitor` directory. For ease of review, this will focus on the `biology.py` file and move to other files/functions as time permits.\r\n\r\n# Relevant Context\r\n\r\n<!-- Please put here, in bullet points, links to the relevant docs page. A few starting template points are available\r\nto get you started. -->\r\n\r\n- [Link to documentation page](https://pyjanitor.readthedocs.io/reference/biology.html)\r\n- [Link to exact file to be edited](https://github.com/loganthomas/pyjanitor/blob/dev/janitor/biology.py)\r\n\n[DOC] API Documentation for Biology functions is not standardized\n# Brief Description of Fix\r\n\r\n<!-- Please describe the fix in terms of a \"before\" and \"after\". In other words, what's not so good about the current docs\r\npage, and what you would like to see it become. \r\n\r\nExample starter wording is provided. -->\r\n\r\nCurrently, the docs do not have a standard docstring format for functions.\r\n### Example of inconsistent docstring\r\n> (no `Returns`, no `Functional usage example`, no `Method chaining example`)\r\n<img width=\"690\" alt=\"incorrect_func_doc\" src=\"https://user-images.githubusercontent.com/24984410/61177779-6936e100-a5a2-11e9-8332-0c02bad7b5bf.png\">\r\n\r\n### Example of a correct/desired docstring\r\n<img width=\"716\" alt=\"correct_func_doc\" src=\"https://user-images.githubusercontent.com/24984410/61177795-c5016a00-a5a2-11e9-8cd9-415f440d95c2.png\">\r\n\r\nI would like to propose a change, such that now the docs contain a **standardized** docstring suite. All functions should contain (at a minimum) the following:\r\n - `Parameters`\r\n - ` Returns`\r\n - `Functional usage example`\r\n - `Method chaining example`\r\n\r\n**NOTE**: This can be done for all functions within the `janitor` directory. For ease of review, this will focus on the `biology.py` file and move to other files/functions as time permits.\r\n\r\n# Relevant Context\r\n\r\n<!-- Please put here, in bullet points, links to the relevant docs page. A few starting template points are available\r\nto get you started. -->\r\n\r\n- [Link to documentation page](https://pyjanitor.readthedocs.io/reference/biology.html)\r\n- [Link to exact file to be edited](https://github.com/loganthomas/pyjanitor/blob/dev/janitor/biology.py)\r\n\n", "before_files": [{"content": "\"\"\"\nBiology and bioinformatics-oriented data cleaning functions.\n\"\"\"\n\nimport pandas as pd\nimport pandas_flavor as pf\n\nfrom .utils import deprecated_alias, import_message\n\ntry:\n from Bio import SeqIO\nexcept ImportError:\n import_message(\n \"biology\", \"biopython\", \"conda install -c conda-forge biopython\"\n )\n\n\[email protected]_dataframe_method\n@deprecated_alias(col_name=\"column_name\")\ndef join_fasta(\n df: pd.DataFrame, filename: str, id_col: str, column_name\n) -> pd.DataFrame:\n \"\"\"\n Convenience method to join in a FASTA file as a column.\n\n This allows us to add the string sequence of a FASTA file as a new column\n of data in the dataframe.\n\n This method only attaches the string representation of the SeqRecord.Seq\n object from Biopython. Does not attach the full SeqRecord. Alphabet is\n also not stored, under the assumption that the data scientist has domain\n knowledge of what kind of sequence is being read in (nucleotide vs. amino\n acid.)\n\n This method mutates the original DataFrame.\n\n For more advanced functions, please use phylopandas.\n\n :param df: A pandas DataFrame.\n :param filename: Path to the FASTA file.\n :param id_col: The column in the DataFrame that houses sequence IDs.\n :param column_name: The name of the new column.\n \"\"\"\n seqrecords = {\n x.id: x.seq.__str__() for x in SeqIO.parse(filename, \"fasta\")\n }\n seq_col = [seqrecords[i] for i in df[id_col]]\n df[column_name] = seq_col\n return df\n", "path": "janitor/biology.py"}], "after_files": [{"content": "\"\"\"\nBiology and bioinformatics-oriented data cleaning functions.\n\"\"\"\n\nimport pandas as pd\nimport pandas_flavor as pf\n\nfrom .utils import deprecated_alias, import_message\n\ntry:\n from Bio import SeqIO\nexcept ImportError:\n import_message(\n \"biology\", \"biopython\", \"conda install -c conda-forge biopython\"\n )\n\n\[email protected]_dataframe_method\n@deprecated_alias(col_name=\"column_name\")\ndef join_fasta(\n df: pd.DataFrame, filename: str, id_col: str, column_name: str\n) -> pd.DataFrame:\n \"\"\"\n Convenience method to join in a FASTA file as a column.\n\n This allows us to add the string sequence of a FASTA file as a new column\n of data in the dataframe.\n\n This method only attaches the string representation of the SeqRecord.Seq\n object from Biopython. Does not attach the full SeqRecord. Alphabet is\n also not stored, under the assumption that the data scientist has domain\n knowledge of what kind of sequence is being read in (nucleotide vs. amino\n acid.)\n\n This method mutates the original DataFrame.\n\n For more advanced functions, please use phylopandas.\n\n Functional usage example:\n\n .. code-block:: python\n\n import janitor.biology\n\n df = janitor.biology.join_fasta(\n df=df,\n filename='fasta_file.fasta',\n id_col='sequence_accession',\n column_name='sequence',\n )\n\n Method chaining example:\n\n .. code-block:: python\n\n import pandas as pd\n import janitor.biology\n\n df = pd.DataFrame(...).join_fasta(\n filename='fasta_file.fasta',\n id_col='sequence_accession',\n column_name='sequence',\n )\n\n :param df: A pandas DataFrame.\n :param filename: Path to the FASTA file.\n :param id_col: The column in the DataFrame that houses sequence IDs.\n :param column_name: The name of the new column.\n :returns: A pandas DataFrame with new FASTA string sequence column.\n \"\"\"\n seqrecords = {\n x.id: x.seq.__str__() for x in SeqIO.parse(filename, \"fasta\")\n }\n seq_col = [seqrecords[i] for i in df[id_col]]\n df[column_name] = seq_col\n return df\n", "path": "janitor/biology.py"}]}
| 1,632 | 392 |
gh_patches_debug_13931
|
rasdani/github-patches
|
git_diff
|
sopel-irc__sopel-987
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
KeyError trying to reload a system module name
`KeyError: 'time' (file "/opt/rh/python33/root/usr/lib/python3.3/site-packages/sopel/modules/reload.py", line 62, in f_reload)` should be a sane error message
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sopel/modules/reload.py`
Content:
```
1 # coding=utf-8
2 """
3 reload.py - Sopel Module Reloader Module
4 Copyright 2008, Sean B. Palmer, inamidst.com
5 Licensed under the Eiffel Forum License 2.
6
7 http://sopel.chat
8 """
9 from __future__ import unicode_literals, absolute_import, print_function, division
10
11 import collections
12 import sys
13 import time
14 from sopel.tools import iteritems
15 import sopel.loader
16 import sopel.module
17 import subprocess
18
19
20 @sopel.module.nickname_commands("reload")
21 @sopel.module.priority("low")
22 @sopel.module.thread(False)
23 def f_reload(bot, trigger):
24 """Reloads a module, for use by admins only."""
25 if not trigger.admin:
26 return
27
28 name = trigger.group(2)
29 if name == bot.config.core.owner:
30 return bot.reply('What?')
31
32 if not name or name == '*' or name.upper() == 'ALL THE THINGS':
33 bot._callables = {
34 'high': collections.defaultdict(list),
35 'medium': collections.defaultdict(list),
36 'low': collections.defaultdict(list)
37 }
38 bot.command_groups = collections.defaultdict(list)
39 bot.setup()
40 return bot.reply('done')
41
42 if name not in sys.modules:
43 return bot.reply('%s: not loaded, try the `load` command' % name)
44
45 old_module = sys.modules[name]
46
47 old_callables = {}
48 for obj_name, obj in iteritems(vars(old_module)):
49 bot.unregister(obj)
50
51 # Also remove all references to sopel callables from top level of the
52 # module, so that they will not get loaded again if reloading the
53 # module does not override them.
54 for obj_name in old_callables.keys():
55 delattr(old_module, obj_name)
56
57 # Also delete the setup function
58 if hasattr(old_module, "setup"):
59 delattr(old_module, "setup")
60
61 modules = sopel.loader.enumerate_modules(bot.config)
62 path, type_ = modules[name]
63 load_module(bot, name, path, type_)
64
65
66 def load_module(bot, name, path, type_):
67 module, mtime = sopel.loader.load_module(name, path, type_)
68 relevant_parts = sopel.loader.clean_module(module, bot.config)
69
70 bot.register(*relevant_parts)
71
72 # TODO sys.modules[name] = module
73 if hasattr(module, 'setup'):
74 module.setup(bot)
75
76 modified = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(mtime))
77
78 bot.reply('%r (version: %s)' % (module, modified))
79
80
81 @sopel.module.nickname_commands('update')
82 def f_update(bot, trigger):
83 if not trigger.admin:
84 return
85
86 """Pulls the latest versions of all modules from Git"""
87 proc = subprocess.Popen('/usr/bin/git pull',
88 stdout=subprocess.PIPE,
89 stderr=subprocess.PIPE, shell=True)
90 bot.reply(proc.communicate()[0])
91
92 f_reload(bot, trigger)
93
94
95 @sopel.module.nickname_commands("load")
96 @sopel.module.priority("low")
97 @sopel.module.thread(False)
98 def f_load(bot, trigger):
99 """Loads a module, for use by admins only."""
100 if not trigger.admin:
101 return
102
103 name = trigger.group(2)
104 path = ''
105 if name == bot.config.core.owner:
106 return bot.reply('What?')
107
108 if name in sys.modules:
109 return bot.reply('Module already loaded, use reload')
110
111 mods = sopel.loader.enumerate_modules(bot.config)
112 if name not in mods:
113 return bot.reply('Module %s not found' % name)
114 path, type_ = mods[name]
115 load_module(bot, name, path, type_)
116
117
118 # Catch PM based messages
119 @sopel.module.commands("reload")
120 @sopel.module.priority("low")
121 @sopel.module.thread(False)
122 def pm_f_reload(bot, trigger):
123 """Wrapper for allowing delivery of .reload command via PM"""
124 if trigger.is_privmsg:
125 f_reload(bot, trigger)
126
127
128 @sopel.module.commands('update')
129 def pm_f_update(bot, trigger):
130 """Wrapper for allowing delivery of .update command via PM"""
131 if trigger.is_privmsg:
132 f_update(bot, trigger)
133
134
135 @sopel.module.commands("load")
136 @sopel.module.priority("low")
137 @sopel.module.thread(False)
138 def pm_f_load(bot, trigger):
139 """Wrapper for allowing delivery of .load command via PM"""
140 if trigger.is_privmsg:
141 f_load(bot, trigger)
142
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/sopel/modules/reload.py b/sopel/modules/reload.py
--- a/sopel/modules/reload.py
+++ b/sopel/modules/reload.py
@@ -40,7 +40,7 @@
return bot.reply('done')
if name not in sys.modules:
- return bot.reply('%s: not loaded, try the `load` command' % name)
+ return bot.reply('"%s" not loaded, try the `load` command' % name)
old_module = sys.modules[name]
@@ -59,6 +59,8 @@
delattr(old_module, "setup")
modules = sopel.loader.enumerate_modules(bot.config)
+ if name not in modules:
+ return bot.reply('"%s" not loaded, try the `load` command' % name)
path, type_ = modules[name]
load_module(bot, name, path, type_)
|
{"golden_diff": "diff --git a/sopel/modules/reload.py b/sopel/modules/reload.py\n--- a/sopel/modules/reload.py\n+++ b/sopel/modules/reload.py\n@@ -40,7 +40,7 @@\n return bot.reply('done')\n \n if name not in sys.modules:\n- return bot.reply('%s: not loaded, try the `load` command' % name)\n+ return bot.reply('\"%s\" not loaded, try the `load` command' % name)\n \n old_module = sys.modules[name]\n \n@@ -59,6 +59,8 @@\n delattr(old_module, \"setup\")\n \n modules = sopel.loader.enumerate_modules(bot.config)\n+ if name not in modules:\n+ return bot.reply('\"%s\" not loaded, try the `load` command' % name)\n path, type_ = modules[name]\n load_module(bot, name, path, type_)\n", "issue": "KeyError trying to reload a system module name\n`KeyError: 'time' (file \"/opt/rh/python33/root/usr/lib/python3.3/site-packages/sopel/modules/reload.py\", line 62, in f_reload)` should be a sane error message\n\n", "before_files": [{"content": "# coding=utf-8\n\"\"\"\nreload.py - Sopel Module Reloader Module\nCopyright 2008, Sean B. Palmer, inamidst.com\nLicensed under the Eiffel Forum License 2.\n\nhttp://sopel.chat\n\"\"\"\nfrom __future__ import unicode_literals, absolute_import, print_function, division\n\nimport collections\nimport sys\nimport time\nfrom sopel.tools import iteritems\nimport sopel.loader\nimport sopel.module\nimport subprocess\n\n\[email protected]_commands(\"reload\")\[email protected](\"low\")\[email protected](False)\ndef f_reload(bot, trigger):\n \"\"\"Reloads a module, for use by admins only.\"\"\"\n if not trigger.admin:\n return\n\n name = trigger.group(2)\n if name == bot.config.core.owner:\n return bot.reply('What?')\n\n if not name or name == '*' or name.upper() == 'ALL THE THINGS':\n bot._callables = {\n 'high': collections.defaultdict(list),\n 'medium': collections.defaultdict(list),\n 'low': collections.defaultdict(list)\n }\n bot.command_groups = collections.defaultdict(list)\n bot.setup()\n return bot.reply('done')\n\n if name not in sys.modules:\n return bot.reply('%s: not loaded, try the `load` command' % name)\n\n old_module = sys.modules[name]\n\n old_callables = {}\n for obj_name, obj in iteritems(vars(old_module)):\n bot.unregister(obj)\n\n # Also remove all references to sopel callables from top level of the\n # module, so that they will not get loaded again if reloading the\n # module does not override them.\n for obj_name in old_callables.keys():\n delattr(old_module, obj_name)\n\n # Also delete the setup function\n if hasattr(old_module, \"setup\"):\n delattr(old_module, \"setup\")\n\n modules = sopel.loader.enumerate_modules(bot.config)\n path, type_ = modules[name]\n load_module(bot, name, path, type_)\n\n\ndef load_module(bot, name, path, type_):\n module, mtime = sopel.loader.load_module(name, path, type_)\n relevant_parts = sopel.loader.clean_module(module, bot.config)\n\n bot.register(*relevant_parts)\n\n # TODO sys.modules[name] = module\n if hasattr(module, 'setup'):\n module.setup(bot)\n\n modified = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(mtime))\n\n bot.reply('%r (version: %s)' % (module, modified))\n\n\[email protected]_commands('update')\ndef f_update(bot, trigger):\n if not trigger.admin:\n return\n\n \"\"\"Pulls the latest versions of all modules from Git\"\"\"\n proc = subprocess.Popen('/usr/bin/git pull',\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, shell=True)\n bot.reply(proc.communicate()[0])\n\n f_reload(bot, trigger)\n\n\[email protected]_commands(\"load\")\[email protected](\"low\")\[email protected](False)\ndef f_load(bot, trigger):\n \"\"\"Loads a module, for use by admins only.\"\"\"\n if not trigger.admin:\n return\n\n name = trigger.group(2)\n path = ''\n if name == bot.config.core.owner:\n return bot.reply('What?')\n\n if name in sys.modules:\n return bot.reply('Module already loaded, use reload')\n\n mods = sopel.loader.enumerate_modules(bot.config)\n if name not in mods:\n return bot.reply('Module %s not found' % name)\n path, type_ = mods[name]\n load_module(bot, name, path, type_)\n\n\n# Catch PM based messages\[email protected](\"reload\")\[email protected](\"low\")\[email protected](False)\ndef pm_f_reload(bot, trigger):\n \"\"\"Wrapper for allowing delivery of .reload command via PM\"\"\"\n if trigger.is_privmsg:\n f_reload(bot, trigger)\n\n\[email protected]('update')\ndef pm_f_update(bot, trigger):\n \"\"\"Wrapper for allowing delivery of .update command via PM\"\"\"\n if trigger.is_privmsg:\n f_update(bot, trigger)\n\n\[email protected](\"load\")\[email protected](\"low\")\[email protected](False)\ndef pm_f_load(bot, trigger):\n \"\"\"Wrapper for allowing delivery of .load command via PM\"\"\"\n if trigger.is_privmsg:\n f_load(bot, trigger)\n", "path": "sopel/modules/reload.py"}], "after_files": [{"content": "# coding=utf-8\n\"\"\"\nreload.py - Sopel Module Reloader Module\nCopyright 2008, Sean B. Palmer, inamidst.com\nLicensed under the Eiffel Forum License 2.\n\nhttp://sopel.chat\n\"\"\"\nfrom __future__ import unicode_literals, absolute_import, print_function, division\n\nimport collections\nimport sys\nimport time\nfrom sopel.tools import iteritems\nimport sopel.loader\nimport sopel.module\nimport subprocess\n\n\[email protected]_commands(\"reload\")\[email protected](\"low\")\[email protected](False)\ndef f_reload(bot, trigger):\n \"\"\"Reloads a module, for use by admins only.\"\"\"\n if not trigger.admin:\n return\n\n name = trigger.group(2)\n if name == bot.config.core.owner:\n return bot.reply('What?')\n\n if not name or name == '*' or name.upper() == 'ALL THE THINGS':\n bot._callables = {\n 'high': collections.defaultdict(list),\n 'medium': collections.defaultdict(list),\n 'low': collections.defaultdict(list)\n }\n bot.command_groups = collections.defaultdict(list)\n bot.setup()\n return bot.reply('done')\n\n if name not in sys.modules:\n return bot.reply('\"%s\" not loaded, try the `load` command' % name)\n\n old_module = sys.modules[name]\n\n old_callables = {}\n for obj_name, obj in iteritems(vars(old_module)):\n bot.unregister(obj)\n\n # Also remove all references to sopel callables from top level of the\n # module, so that they will not get loaded again if reloading the\n # module does not override them.\n for obj_name in old_callables.keys():\n delattr(old_module, obj_name)\n\n # Also delete the setup function\n if hasattr(old_module, \"setup\"):\n delattr(old_module, \"setup\")\n\n modules = sopel.loader.enumerate_modules(bot.config)\n if name not in modules:\n return bot.reply('\"%s\" not loaded, try the `load` command' % name)\n path, type_ = modules[name]\n load_module(bot, name, path, type_)\n\n\ndef load_module(bot, name, path, type_):\n module, mtime = sopel.loader.load_module(name, path, type_)\n relevant_parts = sopel.loader.clean_module(module, bot.config)\n\n bot.register(*relevant_parts)\n\n # TODO sys.modules[name] = module\n if hasattr(module, 'setup'):\n module.setup(bot)\n\n modified = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(mtime))\n\n bot.reply('%r (version: %s)' % (module, modified))\n\n\[email protected]_commands('update')\ndef f_update(bot, trigger):\n if not trigger.admin:\n return\n\n \"\"\"Pulls the latest versions of all modules from Git\"\"\"\n proc = subprocess.Popen('/usr/bin/git pull',\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, shell=True)\n bot.reply(proc.communicate()[0])\n\n f_reload(bot, trigger)\n\n\[email protected]_commands(\"load\")\[email protected](\"low\")\[email protected](False)\ndef f_load(bot, trigger):\n \"\"\"Loads a module, for use by admins only.\"\"\"\n if not trigger.admin:\n return\n\n name = trigger.group(2)\n path = ''\n if name == bot.config.core.owner:\n return bot.reply('What?')\n\n if name in sys.modules:\n return bot.reply('Module already loaded, use reload')\n\n mods = sopel.loader.enumerate_modules(bot.config)\n if name not in mods:\n return bot.reply('Module %s not found' % name)\n path, type_ = mods[name]\n load_module(bot, name, path, type_)\n\n\n# Catch PM based messages\[email protected](\"reload\")\[email protected](\"low\")\[email protected](False)\ndef pm_f_reload(bot, trigger):\n \"\"\"Wrapper for allowing delivery of .reload command via PM\"\"\"\n if trigger.is_privmsg:\n f_reload(bot, trigger)\n\n\[email protected]('update')\ndef pm_f_update(bot, trigger):\n \"\"\"Wrapper for allowing delivery of .update command via PM\"\"\"\n if trigger.is_privmsg:\n f_update(bot, trigger)\n\n\[email protected](\"load\")\[email protected](\"low\")\[email protected](False)\ndef pm_f_load(bot, trigger):\n \"\"\"Wrapper for allowing delivery of .load command via PM\"\"\"\n if trigger.is_privmsg:\n f_load(bot, trigger)\n", "path": "sopel/modules/reload.py"}]}
| 1,628 | 205 |
gh_patches_debug_23976
|
rasdani/github-patches
|
git_diff
|
Mailu__Mailu-1653
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove setup stack "replicas" option
In preparation for 1.8-rc release, and according to #1582 , remove the "replicas" option from setup step 5 in the stack deployment option. Also add an additional notice that users should review their configuration by hand, as well as a big warning about "replicas prone to data-corruption" to the "experimental" box at the top of setup.
(note that replicas is just *currently* not supported.)
Note that if you know what you’re doing, replicas can be easily configured by hand.
Relates: #1222 #1224 #1637
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup/server.py`
Content:
```
1 import flask
2 import flask_bootstrap
3 import redis
4 import json
5 import os
6 import jinja2
7 import uuid
8 import string
9 import random
10 import ipaddress
11 import hashlib
12 import time
13
14
15 version = os.getenv("this_version", "master")
16 static_url_path = "/" + version + "/static"
17 app = flask.Flask(__name__, static_url_path=static_url_path)
18 flask_bootstrap.Bootstrap(app)
19 db = redis.StrictRedis(host='redis', port=6379, db=0)
20
21
22 def render_flavor(flavor, template, data):
23 return flask.render_template(
24 os.path.join(flavor, template),
25 **data
26 )
27
28
29 @app.add_template_global
30 def secret(length=16):
31 charset = string.ascii_uppercase + string.digits
32 return ''.join(
33 random.SystemRandom().choice(charset)
34 for _ in range(length)
35 )
36
37 #Original copied from https://github.com/andrewlkho/ulagen
38 def random_ipv6_subnet():
39 eui64 = uuid.getnode() >> 24 << 48 | 0xfffe000000 | uuid.getnode() & 0xffffff
40 eui64_canon = "-".join([format(eui64, "02X")[i:i+2] for i in range(0, 18, 2)])
41
42 h = hashlib.sha1()
43 h.update((eui64_canon + str(time.time() - time.mktime((1900, 1, 1, 0, 0, 0, 0, 1, -1)))).encode('utf-8'))
44 globalid = h.hexdigest()[0:10]
45
46 prefix = ":".join(("fd" + globalid[0:2], globalid[2:6], globalid[6:10]))
47 return prefix
48
49 def build_app(path):
50
51 app.jinja_env.trim_blocks = True
52 app.jinja_env.lstrip_blocks = True
53
54 @app.context_processor
55 def app_context():
56 return dict(versions=os.getenv("VERSIONS","master").split(','))
57
58 prefix_bp = flask.Blueprint(version, __name__)
59 prefix_bp.jinja_loader = jinja2.ChoiceLoader([
60 jinja2.FileSystemLoader(os.path.join(path, "templates")),
61 jinja2.FileSystemLoader(os.path.join(path, "flavors"))
62 ])
63
64 root_bp = flask.Blueprint("root", __name__)
65 root_bp.jinja_loader = jinja2.ChoiceLoader([
66 jinja2.FileSystemLoader(os.path.join(path, "templates")),
67 jinja2.FileSystemLoader(os.path.join(path, "flavors"))
68 ])
69
70 @prefix_bp.context_processor
71 @root_bp.context_processor
72 def bp_context(version=version):
73 return dict(version=version)
74
75 @prefix_bp.route("/")
76 @root_bp.route("/")
77 def wizard():
78 return flask.render_template('wizard.html')
79
80 @prefix_bp.route("/submit_flavor", methods=["POST"])
81 @root_bp.route("/submit_flavor", methods=["POST"])
82 def submit_flavor():
83 data = flask.request.form.copy()
84 subnet6 = random_ipv6_subnet()
85 steps = sorted(os.listdir(os.path.join(path, "templates", "steps", data["flavor"])))
86 return flask.render_template('wizard.html', flavor=data["flavor"], steps=steps, subnet6=subnet6)
87
88 @prefix_bp.route("/submit", methods=["POST"])
89 @root_bp.route("/submit", methods=["POST"])
90 def submit():
91 data = flask.request.form.copy()
92 data['uid'] = str(uuid.uuid4())
93 try:
94 data['dns'] = str(ipaddress.IPv4Network(data['subnet'], strict=False)[-2])
95 except ValueError as err:
96 return "Error while generating files: " + str(err)
97 db.set(data['uid'], json.dumps(data))
98 return flask.redirect(flask.url_for('.setup', uid=data['uid']))
99
100 @prefix_bp.route("/setup/<uid>", methods=["GET"])
101 @root_bp.route("/setup/<uid>", methods=["GET"])
102 def setup(uid):
103 data = json.loads(db.get(uid))
104 flavor = data.get("flavor", "compose")
105 rendered = render_flavor(flavor, "setup.html", data)
106 return flask.render_template("setup.html", contents=rendered)
107
108 @prefix_bp.route("/file/<uid>/<filepath>", methods=["GET"])
109 @root_bp.route("/file/<uid>/<filepath>", methods=["GET"])
110 def file(uid, filepath):
111 data = json.loads(db.get(uid))
112 flavor = data.get("flavor", "compose")
113 return flask.Response(
114 render_flavor(flavor, filepath, data),
115 mimetype="application/text"
116 )
117
118 app.register_blueprint(prefix_bp, url_prefix="/{}".format(version))
119 app.register_blueprint(root_bp)
120
121
122 if __name__ == "__main__":
123 build_app("/tmp/mailutest")
124 app.run(debug=True)
125
```
Path: `docs/conf.py`
Content:
```
1 #!/usr/bin/env python3
2 # -*- coding: utf-8 -*-
3 #
4
5 import os
6
7 extensions = ['sphinx.ext.imgmath', 'sphinx.ext.viewcode']
8 templates_path = ['_templates']
9 source_suffix = '.rst'
10 master_doc = 'index'
11 project = 'Mailu'
12 copyright = '2018, Mailu authors'
13 author = 'Mailu authors'
14 version = release = os.environ.get('VERSION', 'master')
15 language = None
16 exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'Dockerfile', 'docker-compose.yml']
17 pygments_style = 'sphinx'
18 todo_include_todos = False
19 html_theme = 'sphinx_rtd_theme'
20 html_title = 'Mailu, Docker based mail server'
21 html_static_path = []
22 htmlhelp_basename = 'Mailudoc'
23
24 # Custom sidebar templates, must be a dictionary that maps document names
25 # to template names.
26 html_sidebars = {
27 '**': [
28 'relations.html',
29 'searchbox.html',
30 ]
31 }
32
33 # Theme options
34 html_context = {
35 'display_github': True,
36 'github_user': 'mailu',
37 'github_repo': 'mailu',
38 'github_version': version,
39 'stable_version': '1.7',
40 'versions': [
41 ('1.5', '/1.5/'),
42 ('1.6', '/1.6/'),
43 ('1.7', '/1.7/'),
44 ('master', '/master/')
45 ],
46 'conf_py_path': '/docs/'
47 }
48
```
Path: `core/admin/mailu/configuration.py`
Content:
```
1 import os
2
3 from socrate import system
4
5 DEFAULT_CONFIG = {
6 # Specific to the admin UI
7 'DOCKER_SOCKET': 'unix:///var/run/docker.sock',
8 'BABEL_DEFAULT_LOCALE': 'en',
9 'BABEL_DEFAULT_TIMEZONE': 'UTC',
10 'BOOTSTRAP_SERVE_LOCAL': True,
11 'RATELIMIT_STORAGE_URL': '',
12 'QUOTA_STORAGE_URL': '',
13 'DEBUG': False,
14 'DOMAIN_REGISTRATION': False,
15 'TEMPLATES_AUTO_RELOAD': True,
16 # Database settings
17 'DB_FLAVOR': None,
18 'DB_USER': 'mailu',
19 'DB_PW': None,
20 'DB_HOST': 'database',
21 'DB_NAME': 'mailu',
22 'SQLITE_DATABASE_FILE':'data/main.db',
23 'SQLALCHEMY_DATABASE_URI': 'sqlite:////data/main.db',
24 'SQLALCHEMY_TRACK_MODIFICATIONS': False,
25 # Statistics management
26 'INSTANCE_ID_PATH': '/data/instance',
27 'STATS_ENDPOINT': '0.{}.stats.mailu.io',
28 # Common configuration variables
29 'SECRET_KEY': 'changeMe',
30 'DOMAIN': 'mailu.io',
31 'HOSTNAMES': 'mail.mailu.io,alternative.mailu.io,yetanother.mailu.io',
32 'POSTMASTER': 'postmaster',
33 'TLS_FLAVOR': 'cert',
34 'AUTH_RATELIMIT': '10/minute;1000/hour',
35 'AUTH_RATELIMIT_SUBNET': True,
36 'DISABLE_STATISTICS': False,
37 # Mail settings
38 'DMARC_RUA': None,
39 'DMARC_RUF': None,
40 'WELCOME': False,
41 'WELCOME_SUBJECT': 'Dummy welcome topic',
42 'WELCOME_BODY': 'Dummy welcome body',
43 'DKIM_SELECTOR': 'dkim',
44 'DKIM_PATH': '/dkim/{domain}.{selector}.key',
45 'DEFAULT_QUOTA': 1000000000,
46 # Web settings
47 'SITENAME': 'Mailu',
48 'WEBSITE': 'https://mailu.io',
49 'WEB_ADMIN': '/admin',
50 'WEB_WEBMAIL': '/webmail',
51 'WEBMAIL': 'none',
52 'RECAPTCHA_PUBLIC_KEY': '',
53 'RECAPTCHA_PRIVATE_KEY': '',
54 # Advanced settings
55 'PASSWORD_SCHEME': 'PBKDF2',
56 'LOG_LEVEL': 'WARNING',
57 # Host settings
58 'HOST_IMAP': 'imap',
59 'HOST_LMTP': 'imap:2525',
60 'HOST_POP3': 'imap',
61 'HOST_SMTP': 'smtp',
62 'HOST_AUTHSMTP': 'smtp',
63 'HOST_ADMIN': 'admin',
64 'WEBMAIL': 'none',
65 'HOST_WEBMAIL': 'webmail',
66 'HOST_WEBDAV': 'webdav:5232',
67 'HOST_REDIS': 'redis',
68 'HOST_FRONT': 'front',
69 'SUBNET': '192.168.203.0/24',
70 'SUBNET6': None,
71 'POD_ADDRESS_RANGE': None
72 }
73
74 class ConfigManager(dict):
75 """ Naive configuration manager that uses environment only
76 """
77
78 DB_TEMPLATES = {
79 'sqlite': 'sqlite:////{SQLITE_DATABASE_FILE}',
80 'postgresql': 'postgresql://{DB_USER}:{DB_PW}@{DB_HOST}/{DB_NAME}',
81 'mysql': 'mysql://{DB_USER}:{DB_PW}@{DB_HOST}/{DB_NAME}'
82 }
83
84 def __init__(self):
85 self.config = dict()
86
87 def get_host_address(self, name):
88 # if MYSERVICE_ADDRESS is defined, use this
89 if '{}_ADDRESS'.format(name) in os.environ:
90 return os.environ.get('{}_ADDRESS'.format(name))
91 # otherwise use the host name and resolve it
92 return system.resolve_address(self.config['HOST_{}'.format(name)])
93
94 def resolve_hosts(self):
95 self.config["IMAP_ADDRESS"] = self.get_host_address("IMAP")
96 self.config["POP3_ADDRESS"] = self.get_host_address("POP3")
97 self.config["AUTHSMTP_ADDRESS"] = self.get_host_address("AUTHSMTP")
98 self.config["SMTP_ADDRESS"] = self.get_host_address("SMTP")
99 self.config["REDIS_ADDRESS"] = self.get_host_address("REDIS")
100 if self.config["WEBMAIL"] != "none":
101 self.config["WEBMAIL_ADDRESS"] = self.get_host_address("WEBMAIL")
102
103 def __coerce_value(self, value):
104 if isinstance(value, str) and value.lower() in ('true','yes'):
105 return True
106 elif isinstance(value, str) and value.lower() in ('false', 'no'):
107 return False
108 return value
109
110 def init_app(self, app):
111 self.config.update(app.config)
112 # get environment variables
113 self.config.update({
114 key: self.__coerce_value(os.environ.get(key, value))
115 for key, value in DEFAULT_CONFIG.items()
116 })
117 self.resolve_hosts()
118
119 # automatically set the sqlalchemy string
120 if self.config['DB_FLAVOR']:
121 template = self.DB_TEMPLATES[self.config['DB_FLAVOR']]
122 self.config['SQLALCHEMY_DATABASE_URI'] = template.format(**self.config)
123
124 self.config['RATELIMIT_STORAGE_URL'] = 'redis://{0}/2'.format(self.config['REDIS_ADDRESS'])
125 self.config['QUOTA_STORAGE_URL'] = 'redis://{0}/1'.format(self.config['REDIS_ADDRESS'])
126 # update the app config itself
127 app.config = self
128
129 def setdefault(self, key, value):
130 if key not in self.config:
131 self.config[key] = value
132 return self.config[key]
133
134 def get(self, *args):
135 return self.config.get(*args)
136
137 def keys(self):
138 return self.config.keys()
139
140 def __getitem__(self, key):
141 return self.config.get(key)
142
143 def __setitem__(self, key, value):
144 self.config[key] = value
145
146 def __contains__(self, key):
147 return key in self.config
148
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/core/admin/mailu/configuration.py b/core/admin/mailu/configuration.py
--- a/core/admin/mailu/configuration.py
+++ b/core/admin/mailu/configuration.py
@@ -24,7 +24,7 @@
'SQLALCHEMY_TRACK_MODIFICATIONS': False,
# Statistics management
'INSTANCE_ID_PATH': '/data/instance',
- 'STATS_ENDPOINT': '0.{}.stats.mailu.io',
+ 'STATS_ENDPOINT': '18.{}.stats.mailu.io',
# Common configuration variables
'SECRET_KEY': 'changeMe',
'DOMAIN': 'mailu.io',
diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -41,6 +41,7 @@
('1.5', '/1.5/'),
('1.6', '/1.6/'),
('1.7', '/1.7/'),
+ ('1.8', '/1.8/'),
('master', '/master/')
],
'conf_py_path': '/docs/'
diff --git a/setup/server.py b/setup/server.py
--- a/setup/server.py
+++ b/setup/server.py
@@ -53,7 +53,10 @@
@app.context_processor
def app_context():
- return dict(versions=os.getenv("VERSIONS","master").split(','))
+ return dict(
+ versions=os.getenv("VERSIONS","master").split(','),
+ stable_version = os.getenv("stable_version", "master")
+ )
prefix_bp = flask.Blueprint(version, __name__)
prefix_bp.jinja_loader = jinja2.ChoiceLoader([
|
{"golden_diff": "diff --git a/core/admin/mailu/configuration.py b/core/admin/mailu/configuration.py\n--- a/core/admin/mailu/configuration.py\n+++ b/core/admin/mailu/configuration.py\n@@ -24,7 +24,7 @@\n 'SQLALCHEMY_TRACK_MODIFICATIONS': False,\n # Statistics management\n 'INSTANCE_ID_PATH': '/data/instance',\n- 'STATS_ENDPOINT': '0.{}.stats.mailu.io',\n+ 'STATS_ENDPOINT': '18.{}.stats.mailu.io',\n # Common configuration variables\n 'SECRET_KEY': 'changeMe',\n 'DOMAIN': 'mailu.io',\ndiff --git a/docs/conf.py b/docs/conf.py\n--- a/docs/conf.py\n+++ b/docs/conf.py\n@@ -41,6 +41,7 @@\n ('1.5', '/1.5/'),\n ('1.6', '/1.6/'),\n ('1.7', '/1.7/'),\n+ ('1.8', '/1.8/'),\n ('master', '/master/')\n ],\n 'conf_py_path': '/docs/'\ndiff --git a/setup/server.py b/setup/server.py\n--- a/setup/server.py\n+++ b/setup/server.py\n@@ -53,7 +53,10 @@\n \n @app.context_processor\n def app_context():\n- return dict(versions=os.getenv(\"VERSIONS\",\"master\").split(','))\n+ return dict(\n+ versions=os.getenv(\"VERSIONS\",\"master\").split(','), \n+ stable_version = os.getenv(\"stable_version\", \"master\")\n+ )\n \n prefix_bp = flask.Blueprint(version, __name__)\n prefix_bp.jinja_loader = jinja2.ChoiceLoader([\n", "issue": "Remove setup stack \"replicas\" option\nIn preparation for 1.8-rc release, and according to #1582 , remove the \"replicas\" option from setup step 5 in the stack deployment option. Also add an additional notice that users should review their configuration by hand, as well as a big warning about \"replicas prone to data-corruption\" to the \"experimental\" box at the top of setup.\r\n(note that replicas is just *currently* not supported.)\r\n\r\nNote that if you know what you\u2019re doing, replicas can be easily configured by hand.\r\n\r\nRelates: #1222 #1224 #1637\n", "before_files": [{"content": "import flask\nimport flask_bootstrap\nimport redis\nimport json\nimport os\nimport jinja2\nimport uuid\nimport string\nimport random\nimport ipaddress\nimport hashlib\nimport time\n\n\nversion = os.getenv(\"this_version\", \"master\")\nstatic_url_path = \"/\" + version + \"/static\"\napp = flask.Flask(__name__, static_url_path=static_url_path)\nflask_bootstrap.Bootstrap(app)\ndb = redis.StrictRedis(host='redis', port=6379, db=0)\n\n\ndef render_flavor(flavor, template, data):\n return flask.render_template(\n os.path.join(flavor, template),\n **data\n )\n\n\[email protected]_template_global\ndef secret(length=16):\n charset = string.ascii_uppercase + string.digits\n return ''.join(\n random.SystemRandom().choice(charset)\n for _ in range(length)\n )\n\n#Original copied from https://github.com/andrewlkho/ulagen\ndef random_ipv6_subnet():\n eui64 = uuid.getnode() >> 24 << 48 | 0xfffe000000 | uuid.getnode() & 0xffffff\n eui64_canon = \"-\".join([format(eui64, \"02X\")[i:i+2] for i in range(0, 18, 2)])\n\n h = hashlib.sha1()\n h.update((eui64_canon + str(time.time() - time.mktime((1900, 1, 1, 0, 0, 0, 0, 1, -1)))).encode('utf-8'))\n globalid = h.hexdigest()[0:10]\n\n prefix = \":\".join((\"fd\" + globalid[0:2], globalid[2:6], globalid[6:10]))\n return prefix\n\ndef build_app(path):\n\n app.jinja_env.trim_blocks = True\n app.jinja_env.lstrip_blocks = True\n\n @app.context_processor\n def app_context():\n return dict(versions=os.getenv(\"VERSIONS\",\"master\").split(','))\n\n prefix_bp = flask.Blueprint(version, __name__)\n prefix_bp.jinja_loader = jinja2.ChoiceLoader([\n jinja2.FileSystemLoader(os.path.join(path, \"templates\")),\n jinja2.FileSystemLoader(os.path.join(path, \"flavors\"))\n ])\n\n root_bp = flask.Blueprint(\"root\", __name__)\n root_bp.jinja_loader = jinja2.ChoiceLoader([\n jinja2.FileSystemLoader(os.path.join(path, \"templates\")),\n jinja2.FileSystemLoader(os.path.join(path, \"flavors\"))\n ])\n\n @prefix_bp.context_processor\n @root_bp.context_processor\n def bp_context(version=version):\n return dict(version=version)\n\n @prefix_bp.route(\"/\")\n @root_bp.route(\"/\")\n def wizard():\n return flask.render_template('wizard.html')\n\n @prefix_bp.route(\"/submit_flavor\", methods=[\"POST\"])\n @root_bp.route(\"/submit_flavor\", methods=[\"POST\"])\n def submit_flavor():\n data = flask.request.form.copy()\n subnet6 = random_ipv6_subnet()\n steps = sorted(os.listdir(os.path.join(path, \"templates\", \"steps\", data[\"flavor\"])))\n return flask.render_template('wizard.html', flavor=data[\"flavor\"], steps=steps, subnet6=subnet6)\n\n @prefix_bp.route(\"/submit\", methods=[\"POST\"])\n @root_bp.route(\"/submit\", methods=[\"POST\"])\n def submit():\n data = flask.request.form.copy()\n data['uid'] = str(uuid.uuid4())\n try:\n data['dns'] = str(ipaddress.IPv4Network(data['subnet'], strict=False)[-2])\n except ValueError as err:\n return \"Error while generating files: \" + str(err)\n db.set(data['uid'], json.dumps(data))\n return flask.redirect(flask.url_for('.setup', uid=data['uid']))\n\n @prefix_bp.route(\"/setup/<uid>\", methods=[\"GET\"])\n @root_bp.route(\"/setup/<uid>\", methods=[\"GET\"])\n def setup(uid):\n data = json.loads(db.get(uid))\n flavor = data.get(\"flavor\", \"compose\")\n rendered = render_flavor(flavor, \"setup.html\", data)\n return flask.render_template(\"setup.html\", contents=rendered)\n\n @prefix_bp.route(\"/file/<uid>/<filepath>\", methods=[\"GET\"])\n @root_bp.route(\"/file/<uid>/<filepath>\", methods=[\"GET\"])\n def file(uid, filepath):\n data = json.loads(db.get(uid))\n flavor = data.get(\"flavor\", \"compose\")\n return flask.Response(\n render_flavor(flavor, filepath, data),\n mimetype=\"application/text\"\n )\n\n app.register_blueprint(prefix_bp, url_prefix=\"/{}\".format(version))\n app.register_blueprint(root_bp)\n\n\nif __name__ == \"__main__\":\n build_app(\"/tmp/mailutest\")\n app.run(debug=True)\n", "path": "setup/server.py"}, {"content": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n#\n\nimport os\n\nextensions = ['sphinx.ext.imgmath', 'sphinx.ext.viewcode']\ntemplates_path = ['_templates']\nsource_suffix = '.rst'\nmaster_doc = 'index'\nproject = 'Mailu'\ncopyright = '2018, Mailu authors'\nauthor = 'Mailu authors'\nversion = release = os.environ.get('VERSION', 'master')\nlanguage = None\nexclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'Dockerfile', 'docker-compose.yml']\npygments_style = 'sphinx'\ntodo_include_todos = False\nhtml_theme = 'sphinx_rtd_theme'\nhtml_title = 'Mailu, Docker based mail server'\nhtml_static_path = []\nhtmlhelp_basename = 'Mailudoc'\n\n# Custom sidebar templates, must be a dictionary that maps document names\n# to template names.\nhtml_sidebars = {\n '**': [\n 'relations.html', \n 'searchbox.html',\n ]\n}\n\n# Theme options\nhtml_context = {\n 'display_github': True,\n 'github_user': 'mailu',\n 'github_repo': 'mailu',\n 'github_version': version,\n 'stable_version': '1.7',\n 'versions': [\n ('1.5', '/1.5/'),\n ('1.6', '/1.6/'),\n ('1.7', '/1.7/'),\n ('master', '/master/')\n ],\n 'conf_py_path': '/docs/'\n}\n", "path": "docs/conf.py"}, {"content": "import os\n\nfrom socrate import system\n\nDEFAULT_CONFIG = {\n # Specific to the admin UI\n 'DOCKER_SOCKET': 'unix:///var/run/docker.sock',\n 'BABEL_DEFAULT_LOCALE': 'en',\n 'BABEL_DEFAULT_TIMEZONE': 'UTC',\n 'BOOTSTRAP_SERVE_LOCAL': True,\n 'RATELIMIT_STORAGE_URL': '',\n 'QUOTA_STORAGE_URL': '',\n 'DEBUG': False,\n 'DOMAIN_REGISTRATION': False,\n 'TEMPLATES_AUTO_RELOAD': True,\n # Database settings\n 'DB_FLAVOR': None,\n 'DB_USER': 'mailu',\n 'DB_PW': None,\n 'DB_HOST': 'database',\n 'DB_NAME': 'mailu',\n 'SQLITE_DATABASE_FILE':'data/main.db',\n 'SQLALCHEMY_DATABASE_URI': 'sqlite:////data/main.db',\n 'SQLALCHEMY_TRACK_MODIFICATIONS': False,\n # Statistics management\n 'INSTANCE_ID_PATH': '/data/instance',\n 'STATS_ENDPOINT': '0.{}.stats.mailu.io',\n # Common configuration variables\n 'SECRET_KEY': 'changeMe',\n 'DOMAIN': 'mailu.io',\n 'HOSTNAMES': 'mail.mailu.io,alternative.mailu.io,yetanother.mailu.io',\n 'POSTMASTER': 'postmaster',\n 'TLS_FLAVOR': 'cert',\n 'AUTH_RATELIMIT': '10/minute;1000/hour',\n 'AUTH_RATELIMIT_SUBNET': True,\n 'DISABLE_STATISTICS': False,\n # Mail settings\n 'DMARC_RUA': None,\n 'DMARC_RUF': None,\n 'WELCOME': False,\n 'WELCOME_SUBJECT': 'Dummy welcome topic',\n 'WELCOME_BODY': 'Dummy welcome body',\n 'DKIM_SELECTOR': 'dkim',\n 'DKIM_PATH': '/dkim/{domain}.{selector}.key',\n 'DEFAULT_QUOTA': 1000000000,\n # Web settings\n 'SITENAME': 'Mailu',\n 'WEBSITE': 'https://mailu.io',\n 'WEB_ADMIN': '/admin',\n 'WEB_WEBMAIL': '/webmail',\n 'WEBMAIL': 'none',\n 'RECAPTCHA_PUBLIC_KEY': '',\n 'RECAPTCHA_PRIVATE_KEY': '',\n # Advanced settings\n 'PASSWORD_SCHEME': 'PBKDF2',\n 'LOG_LEVEL': 'WARNING',\n # Host settings\n 'HOST_IMAP': 'imap',\n 'HOST_LMTP': 'imap:2525',\n 'HOST_POP3': 'imap',\n 'HOST_SMTP': 'smtp',\n 'HOST_AUTHSMTP': 'smtp',\n 'HOST_ADMIN': 'admin',\n 'WEBMAIL': 'none',\n 'HOST_WEBMAIL': 'webmail',\n 'HOST_WEBDAV': 'webdav:5232',\n 'HOST_REDIS': 'redis',\n 'HOST_FRONT': 'front',\n 'SUBNET': '192.168.203.0/24',\n 'SUBNET6': None,\n 'POD_ADDRESS_RANGE': None\n}\n\nclass ConfigManager(dict):\n \"\"\" Naive configuration manager that uses environment only\n \"\"\"\n\n DB_TEMPLATES = {\n 'sqlite': 'sqlite:////{SQLITE_DATABASE_FILE}',\n 'postgresql': 'postgresql://{DB_USER}:{DB_PW}@{DB_HOST}/{DB_NAME}',\n 'mysql': 'mysql://{DB_USER}:{DB_PW}@{DB_HOST}/{DB_NAME}'\n }\n\n def __init__(self):\n self.config = dict()\n\n def get_host_address(self, name):\n # if MYSERVICE_ADDRESS is defined, use this\n if '{}_ADDRESS'.format(name) in os.environ:\n return os.environ.get('{}_ADDRESS'.format(name))\n # otherwise use the host name and resolve it\n return system.resolve_address(self.config['HOST_{}'.format(name)])\n\n def resolve_hosts(self):\n self.config[\"IMAP_ADDRESS\"] = self.get_host_address(\"IMAP\")\n self.config[\"POP3_ADDRESS\"] = self.get_host_address(\"POP3\")\n self.config[\"AUTHSMTP_ADDRESS\"] = self.get_host_address(\"AUTHSMTP\")\n self.config[\"SMTP_ADDRESS\"] = self.get_host_address(\"SMTP\")\n self.config[\"REDIS_ADDRESS\"] = self.get_host_address(\"REDIS\")\n if self.config[\"WEBMAIL\"] != \"none\":\n self.config[\"WEBMAIL_ADDRESS\"] = self.get_host_address(\"WEBMAIL\")\n\n def __coerce_value(self, value):\n if isinstance(value, str) and value.lower() in ('true','yes'):\n return True\n elif isinstance(value, str) and value.lower() in ('false', 'no'):\n return False\n return value\n\n def init_app(self, app):\n self.config.update(app.config)\n # get environment variables\n self.config.update({\n key: self.__coerce_value(os.environ.get(key, value))\n for key, value in DEFAULT_CONFIG.items()\n })\n self.resolve_hosts()\n\n # automatically set the sqlalchemy string\n if self.config['DB_FLAVOR']:\n template = self.DB_TEMPLATES[self.config['DB_FLAVOR']]\n self.config['SQLALCHEMY_DATABASE_URI'] = template.format(**self.config)\n\n self.config['RATELIMIT_STORAGE_URL'] = 'redis://{0}/2'.format(self.config['REDIS_ADDRESS'])\n self.config['QUOTA_STORAGE_URL'] = 'redis://{0}/1'.format(self.config['REDIS_ADDRESS'])\n # update the app config itself\n app.config = self\n\n def setdefault(self, key, value):\n if key not in self.config:\n self.config[key] = value\n return self.config[key]\n\n def get(self, *args):\n return self.config.get(*args)\n\n def keys(self):\n return self.config.keys()\n\n def __getitem__(self, key):\n return self.config.get(key)\n\n def __setitem__(self, key, value):\n self.config[key] = value\n\n def __contains__(self, key):\n return key in self.config\n", "path": "core/admin/mailu/configuration.py"}], "after_files": [{"content": "import flask\nimport flask_bootstrap\nimport redis\nimport json\nimport os\nimport jinja2\nimport uuid\nimport string\nimport random\nimport ipaddress\nimport hashlib\nimport time\n\n\nversion = os.getenv(\"this_version\", \"master\")\nstatic_url_path = \"/\" + version + \"/static\"\napp = flask.Flask(__name__, static_url_path=static_url_path)\nflask_bootstrap.Bootstrap(app)\ndb = redis.StrictRedis(host='redis', port=6379, db=0)\n\n\ndef render_flavor(flavor, template, data):\n return flask.render_template(\n os.path.join(flavor, template),\n **data\n )\n\n\[email protected]_template_global\ndef secret(length=16):\n charset = string.ascii_uppercase + string.digits\n return ''.join(\n random.SystemRandom().choice(charset)\n for _ in range(length)\n )\n\n#Original copied from https://github.com/andrewlkho/ulagen\ndef random_ipv6_subnet():\n eui64 = uuid.getnode() >> 24 << 48 | 0xfffe000000 | uuid.getnode() & 0xffffff\n eui64_canon = \"-\".join([format(eui64, \"02X\")[i:i+2] for i in range(0, 18, 2)])\n\n h = hashlib.sha1()\n h.update((eui64_canon + str(time.time() - time.mktime((1900, 1, 1, 0, 0, 0, 0, 1, -1)))).encode('utf-8'))\n globalid = h.hexdigest()[0:10]\n\n prefix = \":\".join((\"fd\" + globalid[0:2], globalid[2:6], globalid[6:10]))\n return prefix\n\ndef build_app(path):\n\n app.jinja_env.trim_blocks = True\n app.jinja_env.lstrip_blocks = True\n\n @app.context_processor\n def app_context():\n return dict(\n versions=os.getenv(\"VERSIONS\",\"master\").split(','), \n stable_version = os.getenv(\"stable_version\", \"master\")\n )\n\n prefix_bp = flask.Blueprint(version, __name__)\n prefix_bp.jinja_loader = jinja2.ChoiceLoader([\n jinja2.FileSystemLoader(os.path.join(path, \"templates\")),\n jinja2.FileSystemLoader(os.path.join(path, \"flavors\"))\n ])\n\n root_bp = flask.Blueprint(\"root\", __name__)\n root_bp.jinja_loader = jinja2.ChoiceLoader([\n jinja2.FileSystemLoader(os.path.join(path, \"templates\")),\n jinja2.FileSystemLoader(os.path.join(path, \"flavors\"))\n ])\n\n @prefix_bp.context_processor\n @root_bp.context_processor\n def bp_context(version=version):\n return dict(version=version)\n\n @prefix_bp.route(\"/\")\n @root_bp.route(\"/\")\n def wizard():\n return flask.render_template('wizard.html')\n\n @prefix_bp.route(\"/submit_flavor\", methods=[\"POST\"])\n @root_bp.route(\"/submit_flavor\", methods=[\"POST\"])\n def submit_flavor():\n data = flask.request.form.copy()\n subnet6 = random_ipv6_subnet()\n steps = sorted(os.listdir(os.path.join(path, \"templates\", \"steps\", data[\"flavor\"])))\n return flask.render_template('wizard.html', flavor=data[\"flavor\"], steps=steps, subnet6=subnet6)\n\n @prefix_bp.route(\"/submit\", methods=[\"POST\"])\n @root_bp.route(\"/submit\", methods=[\"POST\"])\n def submit():\n data = flask.request.form.copy()\n data['uid'] = str(uuid.uuid4())\n try:\n data['dns'] = str(ipaddress.IPv4Network(data['subnet'], strict=False)[-2])\n except ValueError as err:\n return \"Error while generating files: \" + str(err)\n db.set(data['uid'], json.dumps(data))\n return flask.redirect(flask.url_for('.setup', uid=data['uid']))\n\n @prefix_bp.route(\"/setup/<uid>\", methods=[\"GET\"])\n @root_bp.route(\"/setup/<uid>\", methods=[\"GET\"])\n def setup(uid):\n data = json.loads(db.get(uid))\n flavor = data.get(\"flavor\", \"compose\")\n rendered = render_flavor(flavor, \"setup.html\", data)\n return flask.render_template(\"setup.html\", contents=rendered)\n\n @prefix_bp.route(\"/file/<uid>/<filepath>\", methods=[\"GET\"])\n @root_bp.route(\"/file/<uid>/<filepath>\", methods=[\"GET\"])\n def file(uid, filepath):\n data = json.loads(db.get(uid))\n flavor = data.get(\"flavor\", \"compose\")\n return flask.Response(\n render_flavor(flavor, filepath, data),\n mimetype=\"application/text\"\n )\n\n app.register_blueprint(prefix_bp, url_prefix=\"/{}\".format(version))\n app.register_blueprint(root_bp)\n\n\nif __name__ == \"__main__\":\n build_app(\"/tmp/mailutest\")\n app.run(debug=True)\n", "path": "setup/server.py"}, {"content": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n#\n\nimport os\n\nextensions = ['sphinx.ext.imgmath', 'sphinx.ext.viewcode']\ntemplates_path = ['_templates']\nsource_suffix = '.rst'\nmaster_doc = 'index'\nproject = 'Mailu'\ncopyright = '2018, Mailu authors'\nauthor = 'Mailu authors'\nversion = release = os.environ.get('VERSION', 'master')\nlanguage = None\nexclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'Dockerfile', 'docker-compose.yml']\npygments_style = 'sphinx'\ntodo_include_todos = False\nhtml_theme = 'sphinx_rtd_theme'\nhtml_title = 'Mailu, Docker based mail server'\nhtml_static_path = []\nhtmlhelp_basename = 'Mailudoc'\n\n# Custom sidebar templates, must be a dictionary that maps document names\n# to template names.\nhtml_sidebars = {\n '**': [\n 'relations.html', \n 'searchbox.html',\n ]\n}\n\n# Theme options\nhtml_context = {\n 'display_github': True,\n 'github_user': 'mailu',\n 'github_repo': 'mailu',\n 'github_version': version,\n 'stable_version': '1.7',\n 'versions': [\n ('1.5', '/1.5/'),\n ('1.6', '/1.6/'),\n ('1.7', '/1.7/'),\n ('1.8', '/1.8/'),\n ('master', '/master/')\n ],\n 'conf_py_path': '/docs/'\n}\n", "path": "docs/conf.py"}, {"content": "import os\n\nfrom socrate import system\n\nDEFAULT_CONFIG = {\n # Specific to the admin UI\n 'DOCKER_SOCKET': 'unix:///var/run/docker.sock',\n 'BABEL_DEFAULT_LOCALE': 'en',\n 'BABEL_DEFAULT_TIMEZONE': 'UTC',\n 'BOOTSTRAP_SERVE_LOCAL': True,\n 'RATELIMIT_STORAGE_URL': '',\n 'QUOTA_STORAGE_URL': '',\n 'DEBUG': False,\n 'DOMAIN_REGISTRATION': False,\n 'TEMPLATES_AUTO_RELOAD': True,\n # Database settings\n 'DB_FLAVOR': None,\n 'DB_USER': 'mailu',\n 'DB_PW': None,\n 'DB_HOST': 'database',\n 'DB_NAME': 'mailu',\n 'SQLITE_DATABASE_FILE':'data/main.db',\n 'SQLALCHEMY_DATABASE_URI': 'sqlite:////data/main.db',\n 'SQLALCHEMY_TRACK_MODIFICATIONS': False,\n # Statistics management\n 'INSTANCE_ID_PATH': '/data/instance',\n 'STATS_ENDPOINT': '18.{}.stats.mailu.io',\n # Common configuration variables\n 'SECRET_KEY': 'changeMe',\n 'DOMAIN': 'mailu.io',\n 'HOSTNAMES': 'mail.mailu.io,alternative.mailu.io,yetanother.mailu.io',\n 'POSTMASTER': 'postmaster',\n 'TLS_FLAVOR': 'cert',\n 'AUTH_RATELIMIT': '10/minute;1000/hour',\n 'AUTH_RATELIMIT_SUBNET': True,\n 'DISABLE_STATISTICS': False,\n # Mail settings\n 'DMARC_RUA': None,\n 'DMARC_RUF': None,\n 'WELCOME': False,\n 'WELCOME_SUBJECT': 'Dummy welcome topic',\n 'WELCOME_BODY': 'Dummy welcome body',\n 'DKIM_SELECTOR': 'dkim',\n 'DKIM_PATH': '/dkim/{domain}.{selector}.key',\n 'DEFAULT_QUOTA': 1000000000,\n # Web settings\n 'SITENAME': 'Mailu',\n 'WEBSITE': 'https://mailu.io',\n 'WEB_ADMIN': '/admin',\n 'WEB_WEBMAIL': '/webmail',\n 'WEBMAIL': 'none',\n 'RECAPTCHA_PUBLIC_KEY': '',\n 'RECAPTCHA_PRIVATE_KEY': '',\n # Advanced settings\n 'PASSWORD_SCHEME': 'PBKDF2',\n 'LOG_LEVEL': 'WARNING',\n # Host settings\n 'HOST_IMAP': 'imap',\n 'HOST_LMTP': 'imap:2525',\n 'HOST_POP3': 'imap',\n 'HOST_SMTP': 'smtp',\n 'HOST_AUTHSMTP': 'smtp',\n 'HOST_ADMIN': 'admin',\n 'WEBMAIL': 'none',\n 'HOST_WEBMAIL': 'webmail',\n 'HOST_WEBDAV': 'webdav:5232',\n 'HOST_REDIS': 'redis',\n 'HOST_FRONT': 'front',\n 'SUBNET': '192.168.203.0/24',\n 'SUBNET6': None,\n 'POD_ADDRESS_RANGE': None\n}\n\nclass ConfigManager(dict):\n \"\"\" Naive configuration manager that uses environment only\n \"\"\"\n\n DB_TEMPLATES = {\n 'sqlite': 'sqlite:////{SQLITE_DATABASE_FILE}',\n 'postgresql': 'postgresql://{DB_USER}:{DB_PW}@{DB_HOST}/{DB_NAME}',\n 'mysql': 'mysql://{DB_USER}:{DB_PW}@{DB_HOST}/{DB_NAME}'\n }\n\n def __init__(self):\n self.config = dict()\n\n def get_host_address(self, name):\n # if MYSERVICE_ADDRESS is defined, use this\n if '{}_ADDRESS'.format(name) in os.environ:\n return os.environ.get('{}_ADDRESS'.format(name))\n # otherwise use the host name and resolve it\n return system.resolve_address(self.config['HOST_{}'.format(name)])\n\n def resolve_hosts(self):\n self.config[\"IMAP_ADDRESS\"] = self.get_host_address(\"IMAP\")\n self.config[\"POP3_ADDRESS\"] = self.get_host_address(\"POP3\")\n self.config[\"AUTHSMTP_ADDRESS\"] = self.get_host_address(\"AUTHSMTP\")\n self.config[\"SMTP_ADDRESS\"] = self.get_host_address(\"SMTP\")\n self.config[\"REDIS_ADDRESS\"] = self.get_host_address(\"REDIS\")\n if self.config[\"WEBMAIL\"] != \"none\":\n self.config[\"WEBMAIL_ADDRESS\"] = self.get_host_address(\"WEBMAIL\")\n\n def __coerce_value(self, value):\n if isinstance(value, str) and value.lower() in ('true','yes'):\n return True\n elif isinstance(value, str) and value.lower() in ('false', 'no'):\n return False\n return value\n\n def init_app(self, app):\n self.config.update(app.config)\n # get environment variables\n self.config.update({\n key: self.__coerce_value(os.environ.get(key, value))\n for key, value in DEFAULT_CONFIG.items()\n })\n self.resolve_hosts()\n\n # automatically set the sqlalchemy string\n if self.config['DB_FLAVOR']:\n template = self.DB_TEMPLATES[self.config['DB_FLAVOR']]\n self.config['SQLALCHEMY_DATABASE_URI'] = template.format(**self.config)\n\n self.config['RATELIMIT_STORAGE_URL'] = 'redis://{0}/2'.format(self.config['REDIS_ADDRESS'])\n self.config['QUOTA_STORAGE_URL'] = 'redis://{0}/1'.format(self.config['REDIS_ADDRESS'])\n # update the app config itself\n app.config = self\n\n def setdefault(self, key, value):\n if key not in self.config:\n self.config[key] = value\n return self.config[key]\n\n def get(self, *args):\n return self.config.get(*args)\n\n def keys(self):\n return self.config.keys()\n\n def __getitem__(self, key):\n return self.config.get(key)\n\n def __setitem__(self, key, value):\n self.config[key] = value\n\n def __contains__(self, key):\n return key in self.config\n", "path": "core/admin/mailu/configuration.py"}]}
| 3,828 | 364 |
gh_patches_debug_32380
|
rasdani/github-patches
|
git_diff
|
spyder-ide__spyder-15905
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Pylint package not found with the Syder 5.0.4 Mac installer
<!--- *** BEFORE SUBMITTING: PASTE CLIPBOARD HERE TO COMPLETE YOUR REPORT *** ---!>
Step-1 Python file (.py) already opened in the Editor pane
Step-2 Cliked on "Source" on the Menu bar
Step-3 Slected "Run Code Analysis"
Error Message: "/Application/Spyder.app/Contents/MacOS/
python: No Module named
plynt.__main__; 'plynt' is a package and cannot be directly executed "
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `installers/macOS/setup.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright © Spyder Project Contributors
4 # Licensed under the terms of the MIT License
5 # (see spyder/__init__.py for details)
6
7 """
8 Create a stand-alone macOS app using py2app
9
10 To be used like this:
11 $ python setup.py
12 """
13
14 import os
15 import sys
16 from logging import getLogger, StreamHandler, Formatter
17 from setuptools import setup
18
19 # Setup logger
20 fmt = Formatter('%(asctime)s [%(levelname)s] [%(name)s] -> %(message)s')
21 h = StreamHandler()
22 h.setFormatter(fmt)
23 logger = getLogger('spyder-macOS')
24 logger.addHandler(h)
25 logger.setLevel('INFO')
26
27 # Define paths
28 HERE = os.path.abspath(__file__)
29 THISDIR = os.path.dirname(HERE)
30 SPYREPO = os.path.realpath(os.path.join(THISDIR, '..', '..'))
31 ICONFILE = os.path.join(SPYREPO, 'img_src', 'spyder.icns')
32 SPYLINK = os.path.join(THISDIR, 'spyder')
33
34 sys.path.append(SPYREPO)
35
36 # Python version
37 PYVER = [sys.version_info.major, sys.version_info.minor,
38 sys.version_info.micro]
39
40
41 def make_app_bundle(dist_dir, make_lite=False):
42 """
43 Make macOS application bundle.
44
45 Parameters
46 ----------
47 dist_dir : str
48 Directory in which to put the application bundle.
49 make_lite : bool, optional
50 Whether to create the application bundle with minimal packages.
51 The default is False.
52
53 NOTES
54 -----
55 py2app includes all packages in Spyder.app/Contents/Resources/lib/
56 python<ver>.zip, but some packages have issues when placed there.
57 The following packages are included in py2app's PACKAGES option so that
58 they will be placed in Spyder.app/Contents/Resources/lib/python<ver>
59 instead.
60
61 alabaster :
62 Error message: [Errno 20] Not a directory: '<path>/Resources/lib/
63 python38.zip/alabaster'
64 astroid :
65 ImportError: cannot import name 'context' from 'astroid'
66 (<path>/Resources/lib/python38.zip/astroid/__init__.pyc)
67 blib2to3 :
68 File "<frozen zipimport>", line 177, in get_data
69 KeyError: 'blib2to3/Users/rclary/Library/Caches/black/20.8b1/
70 Grammar3.8.6.final.0.pickle'
71 docutils :
72 [Errno 20] Not a directory: '<path>/Resources/lib/python39.zip/
73 docutils/writers/latex2e/docutils.sty'
74 ipython :
75 [IPKernelApp] WARNING | Could not copy README_STARTUP to startup dir.
76 Source file
77 <path>/Resources/lib/python38.zip/IPython/core/profile/README_STARTUP
78 does not exist
79 jedi :
80 jedi.api.environment.InvalidPythonEnvironment: Could not get version
81 information for '<path>/Contents/MacOS/python': InternalError("The
82 subprocess <path>/Contents/MacOS/python has crashed (EOFError('Ran out
83 of input'), stderr=).")
84 jinja2 :
85 No module named 'jinja2.ext'
86 keyring :
87 ModuleNotFoundError: No module named 'keyring.backends.<mod>'
88 pandas :
89 From Variable explorer: KeyError('pandas._libs.interval')
90 parso :
91 jedi.api.environment.InvalidPythonEnvironment: Could not get version
92 information for '/Users/rclary/opt/miniconda3/envs/c2w_37/bin/python':
93 InternalError("The subprocess /Users/rclary/opt/miniconda3/envs/c2w_37/
94 bin/python has crashed (EOFError('Ran out of input'), stderr=).")
95 PIL :
96 Library not loaded: @loader_path/.dylibs/libjpeg.9.dylib
97 Note: only applicable to not-Lite build
98 pygments :
99 ModuleNotFoundError: No module named 'pygments.formatters.latex'
100 pylsp :
101 <path>/Contents/MacOS/python: No module named pylsp
102 Note: still occurs in alias mode
103 pylsp_black :
104 Mandatory: python-pyls-black >=1.0.0 : None (NOK)
105 pyls_spyder :
106 Mandatory: pyls_spyder >=0.1.1 : None (NOK)
107 qtawesome :
108 NotADirectoryError: [Errno 20] Not a directory: '<path>/Resourses/lib/
109 python38.zip/qtawesome/fonts/fontawesome4.7-webfont.ttf'
110 setuptools :
111 Mandatory: setuptools >=49.6.0 : None (NOK)
112 sphinx :
113 No module named 'sphinx.builders.changes'
114 spyder :
115 NotADirectoryError: [Errno 20] Not a directory: '<path>/Resources/lib/
116 python38.zip/spyder/app/mac_stylesheet.qss'
117 spyder_kernels :
118 No module named spyder_kernels.console.__main__
119 textdistance :
120 NotADirectoryError: [Errno 20] Not a directory: '<path>/Resources/lib/
121 python39.zip/textdistance/libraries.json'
122 """
123 import shutil
124 import pkg_resources
125
126 from spyder import __version__ as SPYVER
127 from spyder.config.utils import EDIT_FILETYPES, _get_extensions
128 from spyder.config.base import MAC_APP_NAME
129
130 # Patch py2app for IPython help()
131 py2app_file = pkg_resources.pkgutil.get_loader('py2app').get_filename()
132 site_file = os.path.join(os.path.dirname(py2app_file), 'apptemplate',
133 'lib', 'site.py')
134 logger.info('Patching %s...', site_file)
135 with open(site_file, 'a+') as f:
136 f.seek(0)
137 content = f.read()
138 if 'builtins.help = _sitebuiltins._Helper()' not in content:
139 f.write('\nimport builtins'
140 '\nimport _sitebuiltins'
141 '\nbuiltins.help = _sitebuiltins._Helper()\n')
142
143 build_type = 'lite' if make_lite else 'full'
144 logger.info('Creating %s app bundle...', build_type)
145
146 PACKAGES = ['alabaster', 'astroid', 'docutils', 'blib2to3', 'IPython',
147 'jedi', 'jinja2', 'keyring', 'parso', 'pygments', 'pylsp',
148 'pylsp_black', 'pyls_spyder', 'qtawesome', 'setuptools',
149 'sphinx', 'spyder', 'spyder_kernels', 'textdistance',
150 ]
151 INCLUDES = ['_sitebuiltins', # required for IPython help()
152 # required for sphinx
153 'sphinxcontrib.applehelp', 'sphinxcontrib.devhelp',
154 'sphinxcontrib.htmlhelp', 'sphinxcontrib.jsmath',
155 'sphinxcontrib.qthelp', 'sphinxcontrib.serializinghtml']
156 EXCLUDES = []
157 EXCLUDE_EGG = ['py2app']
158
159 if make_lite:
160 EXCLUDES.extend([
161 'numpy', 'scipy', 'pandas', 'matplotlib', 'cython', 'sympy', 'PIL'
162 ])
163 EXCLUDE_EGG.extend(['pillow'])
164 else:
165 INCLUDES.extend([
166 'numpy', 'scipy', 'pandas', 'matplotlib', 'cython', 'sympy'
167 ])
168 PACKAGES.extend(['pandas', 'PIL'])
169
170 EXCLUDE_EGG.extend(EXCLUDES)
171 EDIT_EXT = [ext[1:] for ext in _get_extensions(EDIT_FILETYPES)]
172
173 # Get rtree dylibs
174 rtree_loc = pkg_resources.get_distribution('rtree').module_path
175 rtree_dylibs = os.scandir(os.path.join(rtree_loc, 'rtree', 'lib'))
176 FRAMEWORKS = [lib.path for lib in rtree_dylibs]
177
178 OPTIONS = {
179 'optimize': 0,
180 'packages': PACKAGES,
181 'includes': INCLUDES,
182 'excludes': EXCLUDES,
183 'iconfile': ICONFILE,
184 'dist_dir': dist_dir,
185 'frameworks': FRAMEWORKS,
186 'plist': {
187 'CFBundleDocumentTypes': [{'CFBundleTypeExtensions': EDIT_EXT,
188 'CFBundleTypeName': 'Text File',
189 'CFBundleTypeRole': 'Editor'}],
190 'CFBundleIdentifier': 'org.spyder-ide',
191 'CFBundleShortVersionString': SPYVER,
192 'NSRequiresAquaSystemAppearance': False # Darkmode support
193 }
194 }
195
196 # Copy main application script
197 app_script_name = MAC_APP_NAME.replace('.app', '.py')
198 app_script_path = os.path.join(SPYREPO, 'scripts', app_script_name)
199 shutil.copy2(os.path.join(SPYREPO, 'scripts', 'spyder'), app_script_path)
200
201 # Build the application
202 try:
203 os.symlink(os.path.join(SPYREPO, 'spyder'), SPYLINK)
204 setup(app=[app_script_path], options={'py2app': OPTIONS})
205 finally:
206 os.remove(app_script_path)
207 os.remove(SPYLINK)
208
209 # Copy egg info from site-packages: fixes several pkg_resources issues
210 dest_dir = os.path.join(dist_dir, MAC_APP_NAME, 'Contents', 'Resources',
211 'lib', f'python{PYVER[0]}.{PYVER[1]}')
212 pkg_resources.working_set.add_entry(SPYREPO)
213 for dist in pkg_resources.working_set:
214 if (dist.egg_info is None or dist.key.startswith('pyobjc')
215 or dist.key in EXCLUDE_EGG):
216 logger.info(f'Skipping egg {dist.key}')
217 continue
218 egg = os.path.basename(dist.egg_info)
219 dest = os.path.join(dest_dir, egg)
220 shutil.copytree(dist.egg_info, dest)
221 logger.info(f'Copied {egg}')
222
223 logger.info('App bundle complete.')
224
225 return
226
227
228 def make_disk_image(dist_dir, make_lite=False):
229 """
230 Make macOS disk image containing Spyder.app application bundle.
231
232 Parameters
233 ----------
234 dist_dir : str
235 Directory in which to put the disk image.
236 make_lite : bool, optional
237 Whether to append the disk image file and volume name with 'Lite'.
238 The default is False.
239
240 """
241 logger.info('Creating disk image...')
242
243 from dmgbuild import build_dmg
244 from dmgbuild.core import DMGError
245 from spyder import __version__ as SPYVER
246 from spyder.config.base import MAC_APP_NAME
247
248 volume_name = '{}-{} Py-{}.{}.{}'.format(MAC_APP_NAME[:-4], SPYVER, *PYVER)
249 dmgfile = os.path.join(dist_dir, 'Spyder')
250 if make_lite:
251 volume_name += ' Lite'
252 dmgfile += '-Lite'
253 dmgfile += '.dmg'
254
255 settings_file = os.path.join(THISDIR, 'dmg_settings.py')
256 settings = {
257 'files': [os.path.join(dist_dir, MAC_APP_NAME)],
258 'badge_icon': ICONFILE,
259 'icon_locations': {MAC_APP_NAME: (140, 120),
260 'Applications': (500, 120)}
261 }
262
263 try:
264 build_dmg(dmgfile, volume_name, settings_file=settings_file,
265 settings=settings, detach_retries=30)
266 logger.info('Building disk image complete.')
267 except DMGError as exc:
268 if exc.args[0] == 'Unable to detach device cleanly':
269 # don't raise this error since the dmg is forced to detach
270 logger.warning(exc.args[0])
271 else:
272 raise exc
273
274 return
275
276
277 if __name__ == '__main__':
278 import argparse
279 parser = argparse.ArgumentParser()
280 parser.add_argument('-n', '--no-app', dest='make_app',
281 action='store_false', default=True,
282 help='Do not create application bundle')
283 parser.add_argument('-l', '--lite', dest='make_lite', action='store_true',
284 default=False,
285 help='Build with minimal internal packages')
286 parser.add_argument('-i', '--dmg', dest='make_dmg', action='store_true',
287 default=False, help='Create disk image')
288 parser.add_argument('-d', '--dist-dir', dest='dist_dir', default='dist',
289 help='Distribution directory; passed to py2app')
290
291 args, rem = parser.parse_known_args()
292
293 # Groom sys.argv for py2app
294 sys.argv = sys.argv[:1] + ['py2app'] + rem
295
296 dist_dir = os.path.abspath(args.dist_dir)
297
298 if args.make_app:
299 make_app_bundle(dist_dir, make_lite=args.make_lite)
300 else:
301 logger.info('Skipping app bundle.')
302
303 if args.make_dmg:
304 make_disk_image(dist_dir, make_lite=args.make_lite)
305 else:
306 logger.info('Skipping disk image.')
307
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/installers/macOS/setup.py b/installers/macOS/setup.py
--- a/installers/macOS/setup.py
+++ b/installers/macOS/setup.py
@@ -71,7 +71,7 @@
docutils :
[Errno 20] Not a directory: '<path>/Resources/lib/python39.zip/
docutils/writers/latex2e/docutils.sty'
- ipython :
+ IPython :
[IPKernelApp] WARNING | Could not copy README_STARTUP to startup dir.
Source file
<path>/Resources/lib/python38.zip/IPython/core/profile/README_STARTUP
@@ -97,6 +97,8 @@
Note: only applicable to not-Lite build
pygments :
ModuleNotFoundError: No module named 'pygments.formatters.latex'
+ pylint :
+ <path>/Contents/MacOS/python: No module named pylint.__main__
pylsp :
<path>/Contents/MacOS/python: No module named pylsp
Note: still occurs in alias mode
@@ -143,10 +145,11 @@
build_type = 'lite' if make_lite else 'full'
logger.info('Creating %s app bundle...', build_type)
- PACKAGES = ['alabaster', 'astroid', 'docutils', 'blib2to3', 'IPython',
- 'jedi', 'jinja2', 'keyring', 'parso', 'pygments', 'pylsp',
- 'pylsp_black', 'pyls_spyder', 'qtawesome', 'setuptools',
- 'sphinx', 'spyder', 'spyder_kernels', 'textdistance',
+ PACKAGES = ['alabaster', 'astroid', 'blib2to3', 'docutils', 'IPython',
+ 'jedi', 'jinja2', 'keyring', 'parso', 'pygments', 'pylint',
+ 'pylsp', 'pylsp_black', 'pyls_spyder', 'qtawesome',
+ 'setuptools', 'sphinx', 'spyder', 'spyder_kernels',
+ 'textdistance',
]
INCLUDES = ['_sitebuiltins', # required for IPython help()
# required for sphinx
|
{"golden_diff": "diff --git a/installers/macOS/setup.py b/installers/macOS/setup.py\n--- a/installers/macOS/setup.py\n+++ b/installers/macOS/setup.py\n@@ -71,7 +71,7 @@\n docutils :\n [Errno 20] Not a directory: '<path>/Resources/lib/python39.zip/\n docutils/writers/latex2e/docutils.sty'\n- ipython :\n+ IPython :\n [IPKernelApp] WARNING | Could not copy README_STARTUP to startup dir.\n Source file\n <path>/Resources/lib/python38.zip/IPython/core/profile/README_STARTUP\n@@ -97,6 +97,8 @@\n Note: only applicable to not-Lite build\n pygments :\n ModuleNotFoundError: No module named 'pygments.formatters.latex'\n+ pylint :\n+ <path>/Contents/MacOS/python: No module named pylint.__main__\n pylsp :\n <path>/Contents/MacOS/python: No module named pylsp\n Note: still occurs in alias mode\n@@ -143,10 +145,11 @@\n build_type = 'lite' if make_lite else 'full'\n logger.info('Creating %s app bundle...', build_type)\n \n- PACKAGES = ['alabaster', 'astroid', 'docutils', 'blib2to3', 'IPython',\n- 'jedi', 'jinja2', 'keyring', 'parso', 'pygments', 'pylsp',\n- 'pylsp_black', 'pyls_spyder', 'qtawesome', 'setuptools',\n- 'sphinx', 'spyder', 'spyder_kernels', 'textdistance',\n+ PACKAGES = ['alabaster', 'astroid', 'blib2to3', 'docutils', 'IPython',\n+ 'jedi', 'jinja2', 'keyring', 'parso', 'pygments', 'pylint',\n+ 'pylsp', 'pylsp_black', 'pyls_spyder', 'qtawesome',\n+ 'setuptools', 'sphinx', 'spyder', 'spyder_kernels',\n+ 'textdistance',\n ]\n INCLUDES = ['_sitebuiltins', # required for IPython help()\n # required for sphinx\n", "issue": "Pylint package not found with the Syder 5.0.4 Mac installer\n \r\n<!--- *** BEFORE SUBMITTING: PASTE CLIPBOARD HERE TO COMPLETE YOUR REPORT *** ---!>\r\nStep-1 Python file (.py) already opened in the Editor pane\r\nStep-2 Cliked on \"Source\" on the Menu bar \r\nStep-3 Slected \"Run Code Analysis\"\r\n\r\nError Message: \"/Application/Spyder.app/Contents/MacOS/ \r\npython: No Module named\r\nplynt.__main__; 'plynt' is a package and cannot be directly executed \"\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# Copyright \u00a9 Spyder Project Contributors\n# Licensed under the terms of the MIT License\n# (see spyder/__init__.py for details)\n\n\"\"\"\nCreate a stand-alone macOS app using py2app\n\nTo be used like this:\n$ python setup.py\n\"\"\"\n\nimport os\nimport sys\nfrom logging import getLogger, StreamHandler, Formatter\nfrom setuptools import setup\n\n# Setup logger\nfmt = Formatter('%(asctime)s [%(levelname)s] [%(name)s] -> %(message)s')\nh = StreamHandler()\nh.setFormatter(fmt)\nlogger = getLogger('spyder-macOS')\nlogger.addHandler(h)\nlogger.setLevel('INFO')\n\n# Define paths\nHERE = os.path.abspath(__file__)\nTHISDIR = os.path.dirname(HERE)\nSPYREPO = os.path.realpath(os.path.join(THISDIR, '..', '..'))\nICONFILE = os.path.join(SPYREPO, 'img_src', 'spyder.icns')\nSPYLINK = os.path.join(THISDIR, 'spyder')\n\nsys.path.append(SPYREPO)\n\n# Python version\nPYVER = [sys.version_info.major, sys.version_info.minor,\n sys.version_info.micro]\n\n\ndef make_app_bundle(dist_dir, make_lite=False):\n \"\"\"\n Make macOS application bundle.\n\n Parameters\n ----------\n dist_dir : str\n Directory in which to put the application bundle.\n make_lite : bool, optional\n Whether to create the application bundle with minimal packages.\n The default is False.\n\n NOTES\n -----\n py2app includes all packages in Spyder.app/Contents/Resources/lib/\n python<ver>.zip, but some packages have issues when placed there.\n The following packages are included in py2app's PACKAGES option so that\n they will be placed in Spyder.app/Contents/Resources/lib/python<ver>\n instead.\n\n alabaster :\n Error message: [Errno 20] Not a directory: '<path>/Resources/lib/\n python38.zip/alabaster'\n astroid :\n ImportError: cannot import name 'context' from 'astroid'\n (<path>/Resources/lib/python38.zip/astroid/__init__.pyc)\n blib2to3 :\n File \"<frozen zipimport>\", line 177, in get_data\n KeyError: 'blib2to3/Users/rclary/Library/Caches/black/20.8b1/\n Grammar3.8.6.final.0.pickle'\n docutils :\n [Errno 20] Not a directory: '<path>/Resources/lib/python39.zip/\n docutils/writers/latex2e/docutils.sty'\n ipython :\n [IPKernelApp] WARNING | Could not copy README_STARTUP to startup dir.\n Source file\n <path>/Resources/lib/python38.zip/IPython/core/profile/README_STARTUP\n does not exist\n jedi :\n jedi.api.environment.InvalidPythonEnvironment: Could not get version\n information for '<path>/Contents/MacOS/python': InternalError(\"The\n subprocess <path>/Contents/MacOS/python has crashed (EOFError('Ran out\n of input'), stderr=).\")\n jinja2 :\n No module named 'jinja2.ext'\n keyring :\n ModuleNotFoundError: No module named 'keyring.backends.<mod>'\n pandas :\n From Variable explorer: KeyError('pandas._libs.interval')\n parso :\n jedi.api.environment.InvalidPythonEnvironment: Could not get version\n information for '/Users/rclary/opt/miniconda3/envs/c2w_37/bin/python':\n InternalError(\"The subprocess /Users/rclary/opt/miniconda3/envs/c2w_37/\n bin/python has crashed (EOFError('Ran out of input'), stderr=).\")\n PIL :\n Library not loaded: @loader_path/.dylibs/libjpeg.9.dylib\n Note: only applicable to not-Lite build\n pygments :\n ModuleNotFoundError: No module named 'pygments.formatters.latex'\n pylsp :\n <path>/Contents/MacOS/python: No module named pylsp\n Note: still occurs in alias mode\n pylsp_black :\n Mandatory: python-pyls-black >=1.0.0 : None (NOK)\n pyls_spyder :\n Mandatory: pyls_spyder >=0.1.1 : None (NOK)\n qtawesome :\n NotADirectoryError: [Errno 20] Not a directory: '<path>/Resourses/lib/\n python38.zip/qtawesome/fonts/fontawesome4.7-webfont.ttf'\n setuptools :\n Mandatory: setuptools >=49.6.0 : None (NOK)\n sphinx :\n No module named 'sphinx.builders.changes'\n spyder :\n NotADirectoryError: [Errno 20] Not a directory: '<path>/Resources/lib/\n python38.zip/spyder/app/mac_stylesheet.qss'\n spyder_kernels :\n No module named spyder_kernels.console.__main__\n textdistance :\n NotADirectoryError: [Errno 20] Not a directory: '<path>/Resources/lib/\n python39.zip/textdistance/libraries.json'\n \"\"\"\n import shutil\n import pkg_resources\n\n from spyder import __version__ as SPYVER\n from spyder.config.utils import EDIT_FILETYPES, _get_extensions\n from spyder.config.base import MAC_APP_NAME\n\n # Patch py2app for IPython help()\n py2app_file = pkg_resources.pkgutil.get_loader('py2app').get_filename()\n site_file = os.path.join(os.path.dirname(py2app_file), 'apptemplate',\n 'lib', 'site.py')\n logger.info('Patching %s...', site_file)\n with open(site_file, 'a+') as f:\n f.seek(0)\n content = f.read()\n if 'builtins.help = _sitebuiltins._Helper()' not in content:\n f.write('\\nimport builtins'\n '\\nimport _sitebuiltins'\n '\\nbuiltins.help = _sitebuiltins._Helper()\\n')\n\n build_type = 'lite' if make_lite else 'full'\n logger.info('Creating %s app bundle...', build_type)\n\n PACKAGES = ['alabaster', 'astroid', 'docutils', 'blib2to3', 'IPython',\n 'jedi', 'jinja2', 'keyring', 'parso', 'pygments', 'pylsp',\n 'pylsp_black', 'pyls_spyder', 'qtawesome', 'setuptools',\n 'sphinx', 'spyder', 'spyder_kernels', 'textdistance',\n ]\n INCLUDES = ['_sitebuiltins', # required for IPython help()\n # required for sphinx\n 'sphinxcontrib.applehelp', 'sphinxcontrib.devhelp',\n 'sphinxcontrib.htmlhelp', 'sphinxcontrib.jsmath',\n 'sphinxcontrib.qthelp', 'sphinxcontrib.serializinghtml']\n EXCLUDES = []\n EXCLUDE_EGG = ['py2app']\n\n if make_lite:\n EXCLUDES.extend([\n 'numpy', 'scipy', 'pandas', 'matplotlib', 'cython', 'sympy', 'PIL'\n ])\n EXCLUDE_EGG.extend(['pillow'])\n else:\n INCLUDES.extend([\n 'numpy', 'scipy', 'pandas', 'matplotlib', 'cython', 'sympy'\n ])\n PACKAGES.extend(['pandas', 'PIL'])\n\n EXCLUDE_EGG.extend(EXCLUDES)\n EDIT_EXT = [ext[1:] for ext in _get_extensions(EDIT_FILETYPES)]\n\n # Get rtree dylibs\n rtree_loc = pkg_resources.get_distribution('rtree').module_path\n rtree_dylibs = os.scandir(os.path.join(rtree_loc, 'rtree', 'lib'))\n FRAMEWORKS = [lib.path for lib in rtree_dylibs]\n\n OPTIONS = {\n 'optimize': 0,\n 'packages': PACKAGES,\n 'includes': INCLUDES,\n 'excludes': EXCLUDES,\n 'iconfile': ICONFILE,\n 'dist_dir': dist_dir,\n 'frameworks': FRAMEWORKS,\n 'plist': {\n 'CFBundleDocumentTypes': [{'CFBundleTypeExtensions': EDIT_EXT,\n 'CFBundleTypeName': 'Text File',\n 'CFBundleTypeRole': 'Editor'}],\n 'CFBundleIdentifier': 'org.spyder-ide',\n 'CFBundleShortVersionString': SPYVER,\n 'NSRequiresAquaSystemAppearance': False # Darkmode support\n }\n }\n\n # Copy main application script\n app_script_name = MAC_APP_NAME.replace('.app', '.py')\n app_script_path = os.path.join(SPYREPO, 'scripts', app_script_name)\n shutil.copy2(os.path.join(SPYREPO, 'scripts', 'spyder'), app_script_path)\n\n # Build the application\n try:\n os.symlink(os.path.join(SPYREPO, 'spyder'), SPYLINK)\n setup(app=[app_script_path], options={'py2app': OPTIONS})\n finally:\n os.remove(app_script_path)\n os.remove(SPYLINK)\n\n # Copy egg info from site-packages: fixes several pkg_resources issues\n dest_dir = os.path.join(dist_dir, MAC_APP_NAME, 'Contents', 'Resources',\n 'lib', f'python{PYVER[0]}.{PYVER[1]}')\n pkg_resources.working_set.add_entry(SPYREPO)\n for dist in pkg_resources.working_set:\n if (dist.egg_info is None or dist.key.startswith('pyobjc')\n or dist.key in EXCLUDE_EGG):\n logger.info(f'Skipping egg {dist.key}')\n continue\n egg = os.path.basename(dist.egg_info)\n dest = os.path.join(dest_dir, egg)\n shutil.copytree(dist.egg_info, dest)\n logger.info(f'Copied {egg}')\n\n logger.info('App bundle complete.')\n\n return\n\n\ndef make_disk_image(dist_dir, make_lite=False):\n \"\"\"\n Make macOS disk image containing Spyder.app application bundle.\n\n Parameters\n ----------\n dist_dir : str\n Directory in which to put the disk image.\n make_lite : bool, optional\n Whether to append the disk image file and volume name with 'Lite'.\n The default is False.\n\n \"\"\"\n logger.info('Creating disk image...')\n\n from dmgbuild import build_dmg\n from dmgbuild.core import DMGError\n from spyder import __version__ as SPYVER\n from spyder.config.base import MAC_APP_NAME\n\n volume_name = '{}-{} Py-{}.{}.{}'.format(MAC_APP_NAME[:-4], SPYVER, *PYVER)\n dmgfile = os.path.join(dist_dir, 'Spyder')\n if make_lite:\n volume_name += ' Lite'\n dmgfile += '-Lite'\n dmgfile += '.dmg'\n\n settings_file = os.path.join(THISDIR, 'dmg_settings.py')\n settings = {\n 'files': [os.path.join(dist_dir, MAC_APP_NAME)],\n 'badge_icon': ICONFILE,\n 'icon_locations': {MAC_APP_NAME: (140, 120),\n 'Applications': (500, 120)}\n }\n\n try:\n build_dmg(dmgfile, volume_name, settings_file=settings_file,\n settings=settings, detach_retries=30)\n logger.info('Building disk image complete.')\n except DMGError as exc:\n if exc.args[0] == 'Unable to detach device cleanly':\n # don't raise this error since the dmg is forced to detach\n logger.warning(exc.args[0])\n else:\n raise exc\n\n return\n\n\nif __name__ == '__main__':\n import argparse\n parser = argparse.ArgumentParser()\n parser.add_argument('-n', '--no-app', dest='make_app',\n action='store_false', default=True,\n help='Do not create application bundle')\n parser.add_argument('-l', '--lite', dest='make_lite', action='store_true',\n default=False,\n help='Build with minimal internal packages')\n parser.add_argument('-i', '--dmg', dest='make_dmg', action='store_true',\n default=False, help='Create disk image')\n parser.add_argument('-d', '--dist-dir', dest='dist_dir', default='dist',\n help='Distribution directory; passed to py2app')\n\n args, rem = parser.parse_known_args()\n\n # Groom sys.argv for py2app\n sys.argv = sys.argv[:1] + ['py2app'] + rem\n\n dist_dir = os.path.abspath(args.dist_dir)\n\n if args.make_app:\n make_app_bundle(dist_dir, make_lite=args.make_lite)\n else:\n logger.info('Skipping app bundle.')\n\n if args.make_dmg:\n make_disk_image(dist_dir, make_lite=args.make_lite)\n else:\n logger.info('Skipping disk image.')\n", "path": "installers/macOS/setup.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# Copyright \u00a9 Spyder Project Contributors\n# Licensed under the terms of the MIT License\n# (see spyder/__init__.py for details)\n\n\"\"\"\nCreate a stand-alone macOS app using py2app\n\nTo be used like this:\n$ python setup.py\n\"\"\"\n\nimport os\nimport sys\nfrom logging import getLogger, StreamHandler, Formatter\nfrom setuptools import setup\n\n# Setup logger\nfmt = Formatter('%(asctime)s [%(levelname)s] [%(name)s] -> %(message)s')\nh = StreamHandler()\nh.setFormatter(fmt)\nlogger = getLogger('spyder-macOS')\nlogger.addHandler(h)\nlogger.setLevel('INFO')\n\n# Define paths\nHERE = os.path.abspath(__file__)\nTHISDIR = os.path.dirname(HERE)\nSPYREPO = os.path.realpath(os.path.join(THISDIR, '..', '..'))\nICONFILE = os.path.join(SPYREPO, 'img_src', 'spyder.icns')\nSPYLINK = os.path.join(THISDIR, 'spyder')\n\nsys.path.append(SPYREPO)\n\n# Python version\nPYVER = [sys.version_info.major, sys.version_info.minor,\n sys.version_info.micro]\n\n\ndef make_app_bundle(dist_dir, make_lite=False):\n \"\"\"\n Make macOS application bundle.\n\n Parameters\n ----------\n dist_dir : str\n Directory in which to put the application bundle.\n make_lite : bool, optional\n Whether to create the application bundle with minimal packages.\n The default is False.\n\n NOTES\n -----\n py2app includes all packages in Spyder.app/Contents/Resources/lib/\n python<ver>.zip, but some packages have issues when placed there.\n The following packages are included in py2app's PACKAGES option so that\n they will be placed in Spyder.app/Contents/Resources/lib/python<ver>\n instead.\n\n alabaster :\n Error message: [Errno 20] Not a directory: '<path>/Resources/lib/\n python38.zip/alabaster'\n astroid :\n ImportError: cannot import name 'context' from 'astroid'\n (<path>/Resources/lib/python38.zip/astroid/__init__.pyc)\n blib2to3 :\n File \"<frozen zipimport>\", line 177, in get_data\n KeyError: 'blib2to3/Users/rclary/Library/Caches/black/20.8b1/\n Grammar3.8.6.final.0.pickle'\n docutils :\n [Errno 20] Not a directory: '<path>/Resources/lib/python39.zip/\n docutils/writers/latex2e/docutils.sty'\n IPython :\n [IPKernelApp] WARNING | Could not copy README_STARTUP to startup dir.\n Source file\n <path>/Resources/lib/python38.zip/IPython/core/profile/README_STARTUP\n does not exist\n jedi :\n jedi.api.environment.InvalidPythonEnvironment: Could not get version\n information for '<path>/Contents/MacOS/python': InternalError(\"The\n subprocess <path>/Contents/MacOS/python has crashed (EOFError('Ran out\n of input'), stderr=).\")\n jinja2 :\n No module named 'jinja2.ext'\n keyring :\n ModuleNotFoundError: No module named 'keyring.backends.<mod>'\n pandas :\n From Variable explorer: KeyError('pandas._libs.interval')\n parso :\n jedi.api.environment.InvalidPythonEnvironment: Could not get version\n information for '/Users/rclary/opt/miniconda3/envs/c2w_37/bin/python':\n InternalError(\"The subprocess /Users/rclary/opt/miniconda3/envs/c2w_37/\n bin/python has crashed (EOFError('Ran out of input'), stderr=).\")\n PIL :\n Library not loaded: @loader_path/.dylibs/libjpeg.9.dylib\n Note: only applicable to not-Lite build\n pygments :\n ModuleNotFoundError: No module named 'pygments.formatters.latex'\n pylint :\n <path>/Contents/MacOS/python: No module named pylint.__main__\n pylsp :\n <path>/Contents/MacOS/python: No module named pylsp\n Note: still occurs in alias mode\n pylsp_black :\n Mandatory: python-pyls-black >=1.0.0 : None (NOK)\n pyls_spyder :\n Mandatory: pyls_spyder >=0.1.1 : None (NOK)\n qtawesome :\n NotADirectoryError: [Errno 20] Not a directory: '<path>/Resourses/lib/\n python38.zip/qtawesome/fonts/fontawesome4.7-webfont.ttf'\n setuptools :\n Mandatory: setuptools >=49.6.0 : None (NOK)\n sphinx :\n No module named 'sphinx.builders.changes'\n spyder :\n NotADirectoryError: [Errno 20] Not a directory: '<path>/Resources/lib/\n python38.zip/spyder/app/mac_stylesheet.qss'\n spyder_kernels :\n No module named spyder_kernels.console.__main__\n textdistance :\n NotADirectoryError: [Errno 20] Not a directory: '<path>/Resources/lib/\n python39.zip/textdistance/libraries.json'\n \"\"\"\n import shutil\n import pkg_resources\n\n from spyder import __version__ as SPYVER\n from spyder.config.utils import EDIT_FILETYPES, _get_extensions\n from spyder.config.base import MAC_APP_NAME\n\n # Patch py2app for IPython help()\n py2app_file = pkg_resources.pkgutil.get_loader('py2app').get_filename()\n site_file = os.path.join(os.path.dirname(py2app_file), 'apptemplate',\n 'lib', 'site.py')\n logger.info('Patching %s...', site_file)\n with open(site_file, 'a+') as f:\n f.seek(0)\n content = f.read()\n if 'builtins.help = _sitebuiltins._Helper()' not in content:\n f.write('\\nimport builtins'\n '\\nimport _sitebuiltins'\n '\\nbuiltins.help = _sitebuiltins._Helper()\\n')\n\n build_type = 'lite' if make_lite else 'full'\n logger.info('Creating %s app bundle...', build_type)\n\n PACKAGES = ['alabaster', 'astroid', 'blib2to3', 'docutils', 'IPython',\n 'jedi', 'jinja2', 'keyring', 'parso', 'pygments', 'pylint',\n 'pylsp', 'pylsp_black', 'pyls_spyder', 'qtawesome',\n 'setuptools', 'sphinx', 'spyder', 'spyder_kernels',\n 'textdistance',\n ]\n INCLUDES = ['_sitebuiltins', # required for IPython help()\n # required for sphinx\n 'sphinxcontrib.applehelp', 'sphinxcontrib.devhelp',\n 'sphinxcontrib.htmlhelp', 'sphinxcontrib.jsmath',\n 'sphinxcontrib.qthelp', 'sphinxcontrib.serializinghtml']\n EXCLUDES = []\n EXCLUDE_EGG = ['py2app']\n\n if make_lite:\n EXCLUDES.extend([\n 'numpy', 'scipy', 'pandas', 'matplotlib', 'cython', 'sympy', 'PIL'\n ])\n EXCLUDE_EGG.extend(['pillow'])\n else:\n INCLUDES.extend([\n 'numpy', 'scipy', 'pandas', 'matplotlib', 'cython', 'sympy'\n ])\n PACKAGES.extend(['pandas', 'PIL'])\n\n EXCLUDE_EGG.extend(EXCLUDES)\n EDIT_EXT = [ext[1:] for ext in _get_extensions(EDIT_FILETYPES)]\n\n # Get rtree dylibs\n rtree_loc = pkg_resources.get_distribution('rtree').module_path\n rtree_dylibs = os.scandir(os.path.join(rtree_loc, 'rtree', 'lib'))\n FRAMEWORKS = [lib.path for lib in rtree_dylibs]\n\n OPTIONS = {\n 'optimize': 0,\n 'packages': PACKAGES,\n 'includes': INCLUDES,\n 'excludes': EXCLUDES,\n 'iconfile': ICONFILE,\n 'dist_dir': dist_dir,\n 'frameworks': FRAMEWORKS,\n 'plist': {\n 'CFBundleDocumentTypes': [{'CFBundleTypeExtensions': EDIT_EXT,\n 'CFBundleTypeName': 'Text File',\n 'CFBundleTypeRole': 'Editor'}],\n 'CFBundleIdentifier': 'org.spyder-ide',\n 'CFBundleShortVersionString': SPYVER,\n 'NSRequiresAquaSystemAppearance': False # Darkmode support\n }\n }\n\n # Copy main application script\n app_script_name = MAC_APP_NAME.replace('.app', '.py')\n app_script_path = os.path.join(SPYREPO, 'scripts', app_script_name)\n shutil.copy2(os.path.join(SPYREPO, 'scripts', 'spyder'), app_script_path)\n\n # Build the application\n try:\n os.symlink(os.path.join(SPYREPO, 'spyder'), SPYLINK)\n setup(app=[app_script_path], options={'py2app': OPTIONS})\n finally:\n os.remove(app_script_path)\n os.remove(SPYLINK)\n\n # Copy egg info from site-packages: fixes several pkg_resources issues\n dest_dir = os.path.join(dist_dir, MAC_APP_NAME, 'Contents', 'Resources',\n 'lib', f'python{PYVER[0]}.{PYVER[1]}')\n pkg_resources.working_set.add_entry(SPYREPO)\n for dist in pkg_resources.working_set:\n if (dist.egg_info is None or dist.key.startswith('pyobjc')\n or dist.key in EXCLUDE_EGG):\n logger.info(f'Skipping egg {dist.key}')\n continue\n egg = os.path.basename(dist.egg_info)\n dest = os.path.join(dest_dir, egg)\n shutil.copytree(dist.egg_info, dest)\n logger.info(f'Copied {egg}')\n\n logger.info('App bundle complete.')\n\n return\n\n\ndef make_disk_image(dist_dir, make_lite=False):\n \"\"\"\n Make macOS disk image containing Spyder.app application bundle.\n\n Parameters\n ----------\n dist_dir : str\n Directory in which to put the disk image.\n make_lite : bool, optional\n Whether to append the disk image file and volume name with 'Lite'.\n The default is False.\n\n \"\"\"\n logger.info('Creating disk image...')\n\n from dmgbuild import build_dmg\n from dmgbuild.core import DMGError\n from spyder import __version__ as SPYVER\n from spyder.config.base import MAC_APP_NAME\n\n volume_name = '{}-{} Py-{}.{}.{}'.format(MAC_APP_NAME[:-4], SPYVER, *PYVER)\n dmgfile = os.path.join(dist_dir, 'Spyder')\n if make_lite:\n volume_name += ' Lite'\n dmgfile += '-Lite'\n dmgfile += '.dmg'\n\n settings_file = os.path.join(THISDIR, 'dmg_settings.py')\n settings = {\n 'files': [os.path.join(dist_dir, MAC_APP_NAME)],\n 'badge_icon': ICONFILE,\n 'icon_locations': {MAC_APP_NAME: (140, 120),\n 'Applications': (500, 120)}\n }\n\n try:\n build_dmg(dmgfile, volume_name, settings_file=settings_file,\n settings=settings, detach_retries=30)\n logger.info('Building disk image complete.')\n except DMGError as exc:\n if exc.args[0] == 'Unable to detach device cleanly':\n # don't raise this error since the dmg is forced to detach\n logger.warning(exc.args[0])\n else:\n raise exc\n\n return\n\n\nif __name__ == '__main__':\n import argparse\n parser = argparse.ArgumentParser()\n parser.add_argument('-n', '--no-app', dest='make_app',\n action='store_false', default=True,\n help='Do not create application bundle')\n parser.add_argument('-l', '--lite', dest='make_lite', action='store_true',\n default=False,\n help='Build with minimal internal packages')\n parser.add_argument('-i', '--dmg', dest='make_dmg', action='store_true',\n default=False, help='Create disk image')\n parser.add_argument('-d', '--dist-dir', dest='dist_dir', default='dist',\n help='Distribution directory; passed to py2app')\n\n args, rem = parser.parse_known_args()\n\n # Groom sys.argv for py2app\n sys.argv = sys.argv[:1] + ['py2app'] + rem\n\n dist_dir = os.path.abspath(args.dist_dir)\n\n if args.make_app:\n make_app_bundle(dist_dir, make_lite=args.make_lite)\n else:\n logger.info('Skipping app bundle.')\n\n if args.make_dmg:\n make_disk_image(dist_dir, make_lite=args.make_lite)\n else:\n logger.info('Skipping disk image.')\n", "path": "installers/macOS/setup.py"}]}
| 4,035 | 509 |
gh_patches_debug_24806
|
rasdani/github-patches
|
git_diff
|
lutris__lutris-825
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Close Lutris after CLI rungame/rungameid
When using `lutris lutris:rungame/%`, the Lutris window stays open after the game has quit. Would be great to:
1. Not have Lutris pop up at all
2. Quit Lutris after it was run
Originally reported in https://github.com/RobLoach/lutris-kodi-addon/issues/14 by @solbero . Another solution might be a `--no-gui` option or something.
## Testing
1. Open up your terminal
2. Run `lutris lutris:rungame/tuxracer`
3. Lutris window pops up and runs the game
4. When you quit the game, Lutris stays open.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lutris/gui/application.py`
Content:
```
1 # application.py
2 #
3 # Copyright (C) 2016 Patrick Griffis <[email protected]>
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU General Public License as published by
7 # the Free Software Foundation, either version 3 of the License, or
8 # (at your option) any later version.
9 #
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU General Public License for more details.
14 #
15 # You should have received a copy of the GNU General Public License
16 # along with this program. If not, see <http://www.gnu.org/licenses/>.
17
18 import json
19 import logging
20 import os
21 import signal
22 import gettext
23 from gettext import gettext as _
24
25 import gi
26 gi.require_version('Gdk', '3.0')
27 gi.require_version('Gtk', '3.0')
28 from gi.repository import Gio, GLib, Gtk
29
30 from lutris import pga
31 from lutris.config import check_config
32 from lutris.platforms import update_platforms
33 from lutris.gui.dialogs import ErrorDialog, InstallOrPlayDialog
34 from lutris.migrations import migrate
35 from lutris.thread import exec_in_thread
36 from lutris.util import datapath
37 from lutris.util.log import logger
38 from lutris.util.resources import parse_installer_url
39 from lutris.services.steam import (AppManifest, get_appmanifests,
40 get_steamapps_paths)
41
42 from .lutriswindow import LutrisWindow
43
44
45 class Application(Gtk.Application):
46 def __init__(self):
47
48 Gtk.Application.__init__(self, application_id='net.lutris.Lutris',
49 flags=Gio.ApplicationFlags.HANDLES_COMMAND_LINE)
50
51 gettext.bindtextdomain("lutris", "/usr/share/locale")
52 gettext.textdomain("lutris")
53
54 check_config()
55 migrate()
56 update_platforms()
57
58 GLib.set_application_name(_('Lutris'))
59 self.window = None
60 self.css_provider = Gtk.CssProvider.new()
61
62 try:
63 self.css_provider.load_from_path(os.path.join(datapath.get(), 'ui', 'lutris.css'))
64 except GLib.Error as e:
65 logger.exception(e)
66
67 if hasattr(self, 'add_main_option'):
68 self.add_arguments()
69 else:
70 ErrorDialog("Your Linux distribution is too old, Lutris won't function properly")
71
72 def add_arguments(self):
73 self.add_main_option('debug',
74 ord('d'),
75 GLib.OptionFlags.NONE,
76 GLib.OptionArg.NONE,
77 _('Show debug messages'),
78 None)
79 self.add_main_option('install',
80 ord('i'),
81 GLib.OptionFlags.NONE,
82 GLib.OptionArg.STRING,
83 _('Install a game from a yml file'),
84 None)
85 self.add_main_option('exec',
86 ord('e'),
87 GLib.OptionFlags.NONE,
88 GLib.OptionArg.STRING,
89 _('Execute a program with the lutris runtime'),
90 None)
91 self.add_main_option('list-games',
92 ord('l'),
93 GLib.OptionFlags.NONE,
94 GLib.OptionArg.NONE,
95 _('List all games in database'),
96 None)
97 self.add_main_option('installed',
98 ord('o'),
99 GLib.OptionFlags.NONE,
100 GLib.OptionArg.NONE,
101 _('Only list installed games'),
102 None)
103 self.add_main_option('list-steam-games',
104 ord('s'),
105 GLib.OptionFlags.NONE,
106 GLib.OptionArg.NONE,
107 _('List available Steam games'),
108 None)
109 self.add_main_option('list-steam-folders',
110 0,
111 GLib.OptionFlags.NONE,
112 GLib.OptionArg.NONE,
113 _('List all known Steam library folders'),
114 None)
115 self.add_main_option('json',
116 ord('j'),
117 GLib.OptionFlags.NONE,
118 GLib.OptionArg.NONE,
119 _('Display the list of games in JSON format'),
120 None)
121 self.add_main_option('reinstall',
122 0,
123 GLib.OptionFlags.NONE,
124 GLib.OptionArg.NONE,
125 _('Reinstall game'),
126 None)
127 self.add_main_option(GLib.OPTION_REMAINING,
128 0,
129 GLib.OptionFlags.NONE,
130 GLib.OptionArg.STRING_ARRAY,
131 _('uri to open'),
132 'URI')
133
134 def set_connect_state(self, connected):
135 # We fiddle with the menu directly which is rather ugly
136 menu = self.get_menubar().get_item_link(0, 'submenu').get_item_link(0, 'section')
137 menu.remove(0) # Assert that it is the very first item
138 if connected:
139 item = Gio.MenuItem.new('Disconnect', 'win.disconnect')
140 else:
141 item = Gio.MenuItem.new('Connect', 'win.connect')
142 menu.prepend_item(item)
143
144 def do_startup(self):
145 Gtk.Application.do_startup(self)
146 signal.signal(signal.SIGINT, signal.SIG_DFL)
147
148 action = Gio.SimpleAction.new('quit')
149 action.connect('activate', lambda *x: self.quit())
150 self.add_action(action)
151 self.add_accelerator('<Primary>q', 'app.quit')
152
153 builder = Gtk.Builder.new_from_file(
154 os.path.join(datapath.get(), 'ui', 'menus-traditional.ui')
155 )
156 menubar = builder.get_object('menubar')
157 self.set_menubar(menubar)
158
159 def do_activate(self):
160 if not self.window:
161 self.window = LutrisWindow(application=self)
162 screen = self.window.props.screen
163 Gtk.StyleContext.add_provider_for_screen(
164 screen,
165 self.css_provider,
166 Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION
167 )
168 self.window.present()
169
170 @staticmethod
171 def _print(command_line, string):
172 # Workaround broken pygobject bindings
173 command_line.do_print_literal(command_line, string + '\n')
174
175 def do_command_line(self, command_line):
176 options = command_line.get_options_dict()
177
178 # Set up logger
179 if options.contains('debug'):
180 logger.setLevel(logging.DEBUG)
181
182 # Text only commands
183
184 # List game
185 if options.contains('list-games'):
186 game_list = pga.get_games()
187 if options.contains('installed'):
188 game_list = [game for game in game_list if game['installed']]
189 if options.contains('json'):
190 self.print_game_json(command_line, game_list)
191 else:
192 self.print_game_list(command_line, game_list)
193 return 0
194 # List Steam games
195 elif options.contains('list-steam-games'):
196 self.print_steam_list(command_line)
197 return 0
198 # List Steam folders
199 elif options.contains('list-steam-folders'):
200 self.print_steam_folders(command_line)
201 return 0
202
203 # Execute command in Lutris context
204 elif options.contains('exec'):
205 command = options.lookup_value('exec').get_string()
206 self.execute_command(command)
207 return 0
208
209 try:
210 url = options.lookup_value(GLib.OPTION_REMAINING)
211 installer_info = self.get_lutris_action(url)
212 except ValueError:
213 self._print(command_line, '%s is not a valid URI' % url.get_strv())
214 return 1
215 game_slug = installer_info['game_slug']
216 action = installer_info['action']
217 revision = installer_info['revision']
218
219 installer_file = None
220 if options.contains('install'):
221 installer_file = options.lookup_value('install').get_string()
222 installer_file = os.path.abspath(installer_file)
223 action = 'install'
224 if not os.path.isfile(installer_file):
225 self._print(command_line, "No such file: %s" % installer_file)
226 return 1
227
228 # Graphical commands
229 self.activate()
230
231 db_game = None
232 if game_slug:
233 if action == 'rungameid':
234 # Force db_game to use game id
235 db_game = pga.get_game_by_field(game_slug, 'id')
236 elif action == 'rungame':
237 # Force db_game to use game slug
238 db_game = pga.get_game_by_field(game_slug, 'slug')
239 elif action == 'install':
240 # Installers can use game or installer slugs
241 db_game = (pga.get_game_by_field(game_slug, 'slug') or
242 pga.get_game_by_field(game_slug, 'installer_slug'))
243
244 else:
245 # Dazed and confused, try anything that might works
246 db_game = (pga.get_game_by_field(game_slug, 'id') or
247 pga.get_game_by_field(game_slug, 'slug') or
248 pga.get_game_by_field(game_slug, 'installer_slug'))
249
250 if not action:
251 if db_game and db_game['installed']:
252 # Game found but no action provided, ask what to do
253 dlg = InstallOrPlayDialog(db_game['name'])
254 if not dlg.action_confirmed:
255 action = None
256 if dlg.action == 'play':
257 action = 'rungame'
258 elif dlg.action == 'install':
259 action = 'install'
260 elif game_slug or installer_file:
261 # No game found, default to install if a game_slug or
262 # installer_file is provided
263 action = 'install'
264
265 if action == 'install':
266 self.window.on_install_clicked(game_slug=game_slug,
267 installer_file=installer_file,
268 revision=revision)
269 elif action in ('rungame', 'rungameid'):
270 logger.info("Launching %s" % db_game['name'])
271 self.window.on_game_run(game_id=db_game['id'])
272
273 return 0
274
275 def get_lutris_action(self, url):
276 installer_info = {
277 'game_slug': None,
278 'revision': None,
279 'action': None
280 }
281
282 if url:
283 url = url.get_strv()
284
285 if url and len(url):
286 url = url[0] # TODO: Support multiple
287 installer_info = parse_installer_url(url)
288 if installer_info is False:
289 raise ValueError
290 return installer_info
291
292 def print_game_list(self, command_line, game_list):
293 for game in game_list:
294 self._print(
295 command_line,
296 "{:4} | {:<40} | {:<40} | {:<15} | {:<64}".format(
297 game['id'],
298 game['name'][:40],
299 game['slug'][:40],
300 game['runner'] or '-',
301 game['directory'] or '-'
302 )
303 )
304
305 def print_game_json(self, command_line, game_list):
306 games = [
307 {
308 'id': game['id'],
309 'slug': game['slug'],
310 'name': game['name'],
311 'runner': game['runner'],
312 'directory': game['directory']
313 }
314 for game in game_list
315 ]
316 self._print(command_line, json.dumps(games, indent=2))
317
318 def print_steam_list(self, command_line):
319 steamapps_paths = get_steamapps_paths()
320 for platform in ('linux', 'windows'):
321 for path in steamapps_paths[platform]:
322 appmanifest_files = get_appmanifests(path)
323 for appmanifest_file in appmanifest_files:
324 appmanifest = AppManifest(os.path.join(path, appmanifest_file))
325 self._print(
326 command_line,
327 " {:8} | {:<60} | {:10} | {}".format(
328 appmanifest.steamid,
329 appmanifest.name or '-',
330 platform,
331 ", ".join(appmanifest.states)
332 )
333 )
334
335 def execute_command(self, command):
336 """
337 Execute an arbitrary command in a Lutris context
338 with the runtime enabled and monitored by LutrisThread
339 """
340 logger.info("Running command '{}'".format(command))
341 thread = exec_in_thread(command)
342 try:
343 GLib.MainLoop().run()
344 except KeyboardInterrupt:
345 thread.stop()
346
347 def print_steam_folders(self, command_line):
348 steamapps_paths = get_steamapps_paths()
349 for platform in ('linux', 'windows'):
350 for path in steamapps_paths[platform]:
351 self._print(command_line, path)
352
353 def do_shutdown(self):
354 logger.info("Shutting down Lutris")
355 Gtk.Application.do_shutdown(self)
356 if self.window:
357 self.window.destroy()
358
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/lutris/gui/application.py b/lutris/gui/application.py
--- a/lutris/gui/application.py
+++ b/lutris/gui/application.py
@@ -267,11 +267,32 @@
installer_file=installer_file,
revision=revision)
elif action in ('rungame', 'rungameid'):
+ if not db_game or not db_game['id']:
+ logger.info("No game found in library, shutting down")
+ self.do_shutdown()
+ return 0
+
logger.info("Launching %s" % db_game['name'])
- self.window.on_game_run(game_id=db_game['id'])
+
+ # If game is installed, run it without showing the GUI
+ # Also set a timer to shut down lutris when game ends
+ if db_game['installed']:
+ self.window.hide()
+ self.window.on_game_run(game_id=db_game['id'])
+ GLib.timeout_add(300, self.refresh_status)
+ # If game is not installed, show the GUI
+ else:
+ self.window.on_game_run(game_id=db_game['id'])
+
return 0
+ def refresh_status(self):
+ if self.window.running_game.state == self.window.running_game.STATE_STOPPED:
+ self.do_shutdown()
+ return False
+ return True
+
def get_lutris_action(self, url):
installer_info = {
'game_slug': None,
|
{"golden_diff": "diff --git a/lutris/gui/application.py b/lutris/gui/application.py\n--- a/lutris/gui/application.py\n+++ b/lutris/gui/application.py\n@@ -267,11 +267,32 @@\n installer_file=installer_file,\n revision=revision)\n elif action in ('rungame', 'rungameid'):\n+ if not db_game or not db_game['id']:\n+ logger.info(\"No game found in library, shutting down\")\n+ self.do_shutdown()\n+ return 0\n+ \n logger.info(\"Launching %s\" % db_game['name'])\n- self.window.on_game_run(game_id=db_game['id'])\n+\n+ # If game is installed, run it without showing the GUI\n+ # Also set a timer to shut down lutris when game ends\n+ if db_game['installed']:\n+ self.window.hide()\n+ self.window.on_game_run(game_id=db_game['id'])\n+ GLib.timeout_add(300, self.refresh_status)\n+ # If game is not installed, show the GUI\n+ else:\n+ self.window.on_game_run(game_id=db_game['id'])\n+\n \n return 0\n \n+ def refresh_status(self):\n+ if self.window.running_game.state == self.window.running_game.STATE_STOPPED:\n+ self.do_shutdown()\n+ return False\n+ return True\n+\n def get_lutris_action(self, url):\n installer_info = {\n 'game_slug': None,\n", "issue": "Close Lutris after CLI rungame/rungameid\nWhen using `lutris lutris:rungame/%`, the Lutris window stays open after the game has quit. Would be great to:\r\n\r\n1. Not have Lutris pop up at all\r\n2. Quit Lutris after it was run\r\n\r\nOriginally reported in https://github.com/RobLoach/lutris-kodi-addon/issues/14 by @solbero . Another solution might be a `--no-gui` option or something.\r\n\r\n## Testing\r\n\r\n1. Open up your terminal\r\n2. Run `lutris lutris:rungame/tuxracer`\r\n3. Lutris window pops up and runs the game\r\n4. When you quit the game, Lutris stays open.\n", "before_files": [{"content": "# application.py\n#\n# Copyright (C) 2016 Patrick Griffis <[email protected]>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>.\n\nimport json\nimport logging\nimport os\nimport signal\nimport gettext\nfrom gettext import gettext as _\n\nimport gi\ngi.require_version('Gdk', '3.0')\ngi.require_version('Gtk', '3.0')\nfrom gi.repository import Gio, GLib, Gtk\n\nfrom lutris import pga\nfrom lutris.config import check_config\nfrom lutris.platforms import update_platforms\nfrom lutris.gui.dialogs import ErrorDialog, InstallOrPlayDialog\nfrom lutris.migrations import migrate\nfrom lutris.thread import exec_in_thread\nfrom lutris.util import datapath\nfrom lutris.util.log import logger\nfrom lutris.util.resources import parse_installer_url\nfrom lutris.services.steam import (AppManifest, get_appmanifests,\n get_steamapps_paths)\n\nfrom .lutriswindow import LutrisWindow\n\n\nclass Application(Gtk.Application):\n def __init__(self):\n\n Gtk.Application.__init__(self, application_id='net.lutris.Lutris',\n flags=Gio.ApplicationFlags.HANDLES_COMMAND_LINE)\n\n gettext.bindtextdomain(\"lutris\", \"/usr/share/locale\")\n gettext.textdomain(\"lutris\")\n\n check_config()\n migrate()\n update_platforms()\n\n GLib.set_application_name(_('Lutris'))\n self.window = None\n self.css_provider = Gtk.CssProvider.new()\n\n try:\n self.css_provider.load_from_path(os.path.join(datapath.get(), 'ui', 'lutris.css'))\n except GLib.Error as e:\n logger.exception(e)\n\n if hasattr(self, 'add_main_option'):\n self.add_arguments()\n else:\n ErrorDialog(\"Your Linux distribution is too old, Lutris won't function properly\")\n\n def add_arguments(self):\n self.add_main_option('debug',\n ord('d'),\n GLib.OptionFlags.NONE,\n GLib.OptionArg.NONE,\n _('Show debug messages'),\n None)\n self.add_main_option('install',\n ord('i'),\n GLib.OptionFlags.NONE,\n GLib.OptionArg.STRING,\n _('Install a game from a yml file'),\n None)\n self.add_main_option('exec',\n ord('e'),\n GLib.OptionFlags.NONE,\n GLib.OptionArg.STRING,\n _('Execute a program with the lutris runtime'),\n None)\n self.add_main_option('list-games',\n ord('l'),\n GLib.OptionFlags.NONE,\n GLib.OptionArg.NONE,\n _('List all games in database'),\n None)\n self.add_main_option('installed',\n ord('o'),\n GLib.OptionFlags.NONE,\n GLib.OptionArg.NONE,\n _('Only list installed games'),\n None)\n self.add_main_option('list-steam-games',\n ord('s'),\n GLib.OptionFlags.NONE,\n GLib.OptionArg.NONE,\n _('List available Steam games'),\n None)\n self.add_main_option('list-steam-folders',\n 0,\n GLib.OptionFlags.NONE,\n GLib.OptionArg.NONE,\n _('List all known Steam library folders'),\n None)\n self.add_main_option('json',\n ord('j'),\n GLib.OptionFlags.NONE,\n GLib.OptionArg.NONE,\n _('Display the list of games in JSON format'),\n None)\n self.add_main_option('reinstall',\n 0,\n GLib.OptionFlags.NONE,\n GLib.OptionArg.NONE,\n _('Reinstall game'),\n None)\n self.add_main_option(GLib.OPTION_REMAINING,\n 0,\n GLib.OptionFlags.NONE,\n GLib.OptionArg.STRING_ARRAY,\n _('uri to open'),\n 'URI')\n\n def set_connect_state(self, connected):\n # We fiddle with the menu directly which is rather ugly\n menu = self.get_menubar().get_item_link(0, 'submenu').get_item_link(0, 'section')\n menu.remove(0) # Assert that it is the very first item\n if connected:\n item = Gio.MenuItem.new('Disconnect', 'win.disconnect')\n else:\n item = Gio.MenuItem.new('Connect', 'win.connect')\n menu.prepend_item(item)\n\n def do_startup(self):\n Gtk.Application.do_startup(self)\n signal.signal(signal.SIGINT, signal.SIG_DFL)\n\n action = Gio.SimpleAction.new('quit')\n action.connect('activate', lambda *x: self.quit())\n self.add_action(action)\n self.add_accelerator('<Primary>q', 'app.quit')\n\n builder = Gtk.Builder.new_from_file(\n os.path.join(datapath.get(), 'ui', 'menus-traditional.ui')\n )\n menubar = builder.get_object('menubar')\n self.set_menubar(menubar)\n\n def do_activate(self):\n if not self.window:\n self.window = LutrisWindow(application=self)\n screen = self.window.props.screen\n Gtk.StyleContext.add_provider_for_screen(\n screen,\n self.css_provider,\n Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION\n )\n self.window.present()\n\n @staticmethod\n def _print(command_line, string):\n # Workaround broken pygobject bindings\n command_line.do_print_literal(command_line, string + '\\n')\n\n def do_command_line(self, command_line):\n options = command_line.get_options_dict()\n\n # Set up logger\n if options.contains('debug'):\n logger.setLevel(logging.DEBUG)\n\n # Text only commands\n\n # List game\n if options.contains('list-games'):\n game_list = pga.get_games()\n if options.contains('installed'):\n game_list = [game for game in game_list if game['installed']]\n if options.contains('json'):\n self.print_game_json(command_line, game_list)\n else:\n self.print_game_list(command_line, game_list)\n return 0\n # List Steam games\n elif options.contains('list-steam-games'):\n self.print_steam_list(command_line)\n return 0\n # List Steam folders\n elif options.contains('list-steam-folders'):\n self.print_steam_folders(command_line)\n return 0\n\n # Execute command in Lutris context\n elif options.contains('exec'):\n command = options.lookup_value('exec').get_string()\n self.execute_command(command)\n return 0\n\n try:\n url = options.lookup_value(GLib.OPTION_REMAINING)\n installer_info = self.get_lutris_action(url)\n except ValueError:\n self._print(command_line, '%s is not a valid URI' % url.get_strv())\n return 1\n game_slug = installer_info['game_slug']\n action = installer_info['action']\n revision = installer_info['revision']\n\n installer_file = None\n if options.contains('install'):\n installer_file = options.lookup_value('install').get_string()\n installer_file = os.path.abspath(installer_file)\n action = 'install'\n if not os.path.isfile(installer_file):\n self._print(command_line, \"No such file: %s\" % installer_file)\n return 1\n\n # Graphical commands\n self.activate()\n\n db_game = None\n if game_slug:\n if action == 'rungameid':\n # Force db_game to use game id\n db_game = pga.get_game_by_field(game_slug, 'id')\n elif action == 'rungame':\n # Force db_game to use game slug\n db_game = pga.get_game_by_field(game_slug, 'slug')\n elif action == 'install':\n # Installers can use game or installer slugs\n db_game = (pga.get_game_by_field(game_slug, 'slug') or\n pga.get_game_by_field(game_slug, 'installer_slug'))\n\n else:\n # Dazed and confused, try anything that might works\n db_game = (pga.get_game_by_field(game_slug, 'id') or\n pga.get_game_by_field(game_slug, 'slug') or\n pga.get_game_by_field(game_slug, 'installer_slug'))\n\n if not action:\n if db_game and db_game['installed']:\n # Game found but no action provided, ask what to do\n dlg = InstallOrPlayDialog(db_game['name'])\n if not dlg.action_confirmed:\n action = None\n if dlg.action == 'play':\n action = 'rungame'\n elif dlg.action == 'install':\n action = 'install'\n elif game_slug or installer_file:\n # No game found, default to install if a game_slug or\n # installer_file is provided\n action = 'install'\n\n if action == 'install':\n self.window.on_install_clicked(game_slug=game_slug,\n installer_file=installer_file,\n revision=revision)\n elif action in ('rungame', 'rungameid'):\n logger.info(\"Launching %s\" % db_game['name'])\n self.window.on_game_run(game_id=db_game['id'])\n\n return 0\n\n def get_lutris_action(self, url):\n installer_info = {\n 'game_slug': None,\n 'revision': None,\n 'action': None\n }\n\n if url:\n url = url.get_strv()\n\n if url and len(url):\n url = url[0] # TODO: Support multiple\n installer_info = parse_installer_url(url)\n if installer_info is False:\n raise ValueError\n return installer_info\n\n def print_game_list(self, command_line, game_list):\n for game in game_list:\n self._print(\n command_line,\n \"{:4} | {:<40} | {:<40} | {:<15} | {:<64}\".format(\n game['id'],\n game['name'][:40],\n game['slug'][:40],\n game['runner'] or '-',\n game['directory'] or '-'\n )\n )\n\n def print_game_json(self, command_line, game_list):\n games = [\n {\n 'id': game['id'],\n 'slug': game['slug'],\n 'name': game['name'],\n 'runner': game['runner'],\n 'directory': game['directory']\n }\n for game in game_list\n ]\n self._print(command_line, json.dumps(games, indent=2))\n\n def print_steam_list(self, command_line):\n steamapps_paths = get_steamapps_paths()\n for platform in ('linux', 'windows'):\n for path in steamapps_paths[platform]:\n appmanifest_files = get_appmanifests(path)\n for appmanifest_file in appmanifest_files:\n appmanifest = AppManifest(os.path.join(path, appmanifest_file))\n self._print(\n command_line,\n \" {:8} | {:<60} | {:10} | {}\".format(\n appmanifest.steamid,\n appmanifest.name or '-',\n platform,\n \", \".join(appmanifest.states)\n )\n )\n\n def execute_command(self, command):\n \"\"\"\n Execute an arbitrary command in a Lutris context\n with the runtime enabled and monitored by LutrisThread\n \"\"\"\n logger.info(\"Running command '{}'\".format(command))\n thread = exec_in_thread(command)\n try:\n GLib.MainLoop().run()\n except KeyboardInterrupt:\n thread.stop()\n\n def print_steam_folders(self, command_line):\n steamapps_paths = get_steamapps_paths()\n for platform in ('linux', 'windows'):\n for path in steamapps_paths[platform]:\n self._print(command_line, path)\n\n def do_shutdown(self):\n logger.info(\"Shutting down Lutris\")\n Gtk.Application.do_shutdown(self)\n if self.window:\n self.window.destroy()\n", "path": "lutris/gui/application.py"}], "after_files": [{"content": "# application.py\n#\n# Copyright (C) 2016 Patrick Griffis <[email protected]>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>.\n\nimport json\nimport logging\nimport os\nimport signal\nimport gettext\nfrom gettext import gettext as _\n\nimport gi\ngi.require_version('Gdk', '3.0')\ngi.require_version('Gtk', '3.0')\nfrom gi.repository import Gio, GLib, Gtk\n\nfrom lutris import pga\nfrom lutris.config import check_config\nfrom lutris.platforms import update_platforms\nfrom lutris.gui.dialogs import ErrorDialog, InstallOrPlayDialog\nfrom lutris.migrations import migrate\nfrom lutris.thread import exec_in_thread\nfrom lutris.util import datapath\nfrom lutris.util.log import logger\nfrom lutris.util.resources import parse_installer_url\nfrom lutris.services.steam import (AppManifest, get_appmanifests,\n get_steamapps_paths)\n\nfrom .lutriswindow import LutrisWindow\n\n\nclass Application(Gtk.Application):\n def __init__(self):\n\n Gtk.Application.__init__(self, application_id='net.lutris.Lutris',\n flags=Gio.ApplicationFlags.HANDLES_COMMAND_LINE)\n\n gettext.bindtextdomain(\"lutris\", \"/usr/share/locale\")\n gettext.textdomain(\"lutris\")\n\n check_config()\n migrate()\n update_platforms()\n\n GLib.set_application_name(_('Lutris'))\n self.window = None\n self.css_provider = Gtk.CssProvider.new()\n\n try:\n self.css_provider.load_from_path(os.path.join(datapath.get(), 'ui', 'lutris.css'))\n except GLib.Error as e:\n logger.exception(e)\n\n if hasattr(self, 'add_main_option'):\n self.add_arguments()\n else:\n ErrorDialog(\"Your Linux distribution is too old, Lutris won't function properly\")\n\n def add_arguments(self):\n self.add_main_option('debug',\n ord('d'),\n GLib.OptionFlags.NONE,\n GLib.OptionArg.NONE,\n _('Show debug messages'),\n None)\n self.add_main_option('install',\n ord('i'),\n GLib.OptionFlags.NONE,\n GLib.OptionArg.STRING,\n _('Install a game from a yml file'),\n None)\n self.add_main_option('exec',\n ord('e'),\n GLib.OptionFlags.NONE,\n GLib.OptionArg.STRING,\n _('Execute a program with the lutris runtime'),\n None)\n self.add_main_option('list-games',\n ord('l'),\n GLib.OptionFlags.NONE,\n GLib.OptionArg.NONE,\n _('List all games in database'),\n None)\n self.add_main_option('installed',\n ord('o'),\n GLib.OptionFlags.NONE,\n GLib.OptionArg.NONE,\n _('Only list installed games'),\n None)\n self.add_main_option('list-steam-games',\n ord('s'),\n GLib.OptionFlags.NONE,\n GLib.OptionArg.NONE,\n _('List available Steam games'),\n None)\n self.add_main_option('list-steam-folders',\n 0,\n GLib.OptionFlags.NONE,\n GLib.OptionArg.NONE,\n _('List all known Steam library folders'),\n None)\n self.add_main_option('json',\n ord('j'),\n GLib.OptionFlags.NONE,\n GLib.OptionArg.NONE,\n _('Display the list of games in JSON format'),\n None)\n self.add_main_option('reinstall',\n 0,\n GLib.OptionFlags.NONE,\n GLib.OptionArg.NONE,\n _('Reinstall game'),\n None)\n self.add_main_option(GLib.OPTION_REMAINING,\n 0,\n GLib.OptionFlags.NONE,\n GLib.OptionArg.STRING_ARRAY,\n _('uri to open'),\n 'URI')\n\n def set_connect_state(self, connected):\n # We fiddle with the menu directly which is rather ugly\n menu = self.get_menubar().get_item_link(0, 'submenu').get_item_link(0, 'section')\n menu.remove(0) # Assert that it is the very first item\n if connected:\n item = Gio.MenuItem.new('Disconnect', 'win.disconnect')\n else:\n item = Gio.MenuItem.new('Connect', 'win.connect')\n menu.prepend_item(item)\n\n def do_startup(self):\n Gtk.Application.do_startup(self)\n signal.signal(signal.SIGINT, signal.SIG_DFL)\n\n action = Gio.SimpleAction.new('quit')\n action.connect('activate', lambda *x: self.quit())\n self.add_action(action)\n self.add_accelerator('<Primary>q', 'app.quit')\n\n builder = Gtk.Builder.new_from_file(\n os.path.join(datapath.get(), 'ui', 'menus-traditional.ui')\n )\n menubar = builder.get_object('menubar')\n self.set_menubar(menubar)\n\n def do_activate(self):\n if not self.window:\n self.window = LutrisWindow(application=self)\n screen = self.window.props.screen\n Gtk.StyleContext.add_provider_for_screen(\n screen,\n self.css_provider,\n Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION\n )\n self.window.present()\n\n @staticmethod\n def _print(command_line, string):\n # Workaround broken pygobject bindings\n command_line.do_print_literal(command_line, string + '\\n')\n\n def do_command_line(self, command_line):\n options = command_line.get_options_dict()\n\n # Set up logger\n if options.contains('debug'):\n logger.setLevel(logging.DEBUG)\n\n # Text only commands\n\n # List game\n if options.contains('list-games'):\n game_list = pga.get_games()\n if options.contains('installed'):\n game_list = [game for game in game_list if game['installed']]\n if options.contains('json'):\n self.print_game_json(command_line, game_list)\n else:\n self.print_game_list(command_line, game_list)\n return 0\n # List Steam games\n elif options.contains('list-steam-games'):\n self.print_steam_list(command_line)\n return 0\n # List Steam folders\n elif options.contains('list-steam-folders'):\n self.print_steam_folders(command_line)\n return 0\n\n # Execute command in Lutris context\n elif options.contains('exec'):\n command = options.lookup_value('exec').get_string()\n self.execute_command(command)\n return 0\n\n try:\n url = options.lookup_value(GLib.OPTION_REMAINING)\n installer_info = self.get_lutris_action(url)\n except ValueError:\n self._print(command_line, '%s is not a valid URI' % url.get_strv())\n return 1\n game_slug = installer_info['game_slug']\n action = installer_info['action']\n revision = installer_info['revision']\n\n installer_file = None\n if options.contains('install'):\n installer_file = options.lookup_value('install').get_string()\n installer_file = os.path.abspath(installer_file)\n action = 'install'\n if not os.path.isfile(installer_file):\n self._print(command_line, \"No such file: %s\" % installer_file)\n return 1\n\n # Graphical commands\n self.activate()\n\n db_game = None\n if game_slug:\n if action == 'rungameid':\n # Force db_game to use game id\n db_game = pga.get_game_by_field(game_slug, 'id')\n elif action == 'rungame':\n # Force db_game to use game slug\n db_game = pga.get_game_by_field(game_slug, 'slug')\n elif action == 'install':\n # Installers can use game or installer slugs\n db_game = (pga.get_game_by_field(game_slug, 'slug') or\n pga.get_game_by_field(game_slug, 'installer_slug'))\n\n else:\n # Dazed and confused, try anything that might works\n db_game = (pga.get_game_by_field(game_slug, 'id') or\n pga.get_game_by_field(game_slug, 'slug') or\n pga.get_game_by_field(game_slug, 'installer_slug'))\n\n if not action:\n if db_game and db_game['installed']:\n # Game found but no action provided, ask what to do\n dlg = InstallOrPlayDialog(db_game['name'])\n if not dlg.action_confirmed:\n action = None\n if dlg.action == 'play':\n action = 'rungame'\n elif dlg.action == 'install':\n action = 'install'\n elif game_slug or installer_file:\n # No game found, default to install if a game_slug or\n # installer_file is provided\n action = 'install'\n\n if action == 'install':\n self.window.on_install_clicked(game_slug=game_slug,\n installer_file=installer_file,\n revision=revision)\n elif action in ('rungame', 'rungameid'):\n if not db_game or not db_game['id']:\n logger.info(\"No game found in library, shutting down\")\n self.do_shutdown()\n return 0\n \n logger.info(\"Launching %s\" % db_game['name'])\n\n # If game is installed, run it without showing the GUI\n # Also set a timer to shut down lutris when game ends\n if db_game['installed']:\n self.window.hide()\n self.window.on_game_run(game_id=db_game['id'])\n GLib.timeout_add(300, self.refresh_status)\n # If game is not installed, show the GUI\n else:\n self.window.on_game_run(game_id=db_game['id'])\n\n\n return 0\n\n def refresh_status(self):\n if self.window.running_game.state == self.window.running_game.STATE_STOPPED:\n self.do_shutdown()\n return False\n return True\n\n def get_lutris_action(self, url):\n installer_info = {\n 'game_slug': None,\n 'revision': None,\n 'action': None\n }\n\n if url:\n url = url.get_strv()\n\n if url and len(url):\n url = url[0] # TODO: Support multiple\n installer_info = parse_installer_url(url)\n if installer_info is False:\n raise ValueError\n return installer_info\n\n def print_game_list(self, command_line, game_list):\n for game in game_list:\n self._print(\n command_line,\n \"{:4} | {:<40} | {:<40} | {:<15} | {:<64}\".format(\n game['id'],\n game['name'][:40],\n game['slug'][:40],\n game['runner'] or '-',\n game['directory'] or '-'\n )\n )\n\n def print_game_json(self, command_line, game_list):\n games = [\n {\n 'id': game['id'],\n 'slug': game['slug'],\n 'name': game['name'],\n 'runner': game['runner'],\n 'directory': game['directory']\n }\n for game in game_list\n ]\n self._print(command_line, json.dumps(games, indent=2))\n\n def print_steam_list(self, command_line):\n steamapps_paths = get_steamapps_paths()\n for platform in ('linux', 'windows'):\n for path in steamapps_paths[platform]:\n appmanifest_files = get_appmanifests(path)\n for appmanifest_file in appmanifest_files:\n appmanifest = AppManifest(os.path.join(path, appmanifest_file))\n self._print(\n command_line,\n \" {:8} | {:<60} | {:10} | {}\".format(\n appmanifest.steamid,\n appmanifest.name or '-',\n platform,\n \", \".join(appmanifest.states)\n )\n )\n\n def execute_command(self, command):\n \"\"\"\n Execute an arbitrary command in a Lutris context\n with the runtime enabled and monitored by LutrisThread\n \"\"\"\n logger.info(\"Running command '{}'\".format(command))\n thread = exec_in_thread(command)\n try:\n GLib.MainLoop().run()\n except KeyboardInterrupt:\n thread.stop()\n\n def print_steam_folders(self, command_line):\n steamapps_paths = get_steamapps_paths()\n for platform in ('linux', 'windows'):\n for path in steamapps_paths[platform]:\n self._print(command_line, path)\n\n def do_shutdown(self):\n logger.info(\"Shutting down Lutris\")\n Gtk.Application.do_shutdown(self)\n if self.window:\n self.window.destroy()\n", "path": "lutris/gui/application.py"}]}
| 4,054 | 325 |
gh_patches_debug_33930
|
rasdani/github-patches
|
git_diff
|
jazzband__pip-tools-267
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
pip-sync has no --find-links option
`pip-compile --find-links=some/dir/ requirements.in` isn't particularly useful if you can't then run `pip-sync --find-links=some/dir/ requirements.txt` to install dependencies from `some/dir/`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `piptools/scripts/sync.py`
Content:
```
1 # coding: utf-8
2 from __future__ import (absolute_import, division, print_function,
3 unicode_literals)
4
5 import sys
6
7 import pip
8
9 # Make sure we're using a reasonably modern version of pip
10 if not tuple(int(digit) for digit in pip.__version__.split('.')[:2]) >= (6, 1):
11 print('pip-compile requires at least version 6.1 of pip ({} found), '
12 'perhaps run `pip install --upgrade pip`?'.format(pip.__version__))
13 sys.exit(4)
14
15 import os # noqa
16 from .. import click # noqa
17 from .. import sync # noqa
18 from ..exceptions import PipToolsError # noqa
19 from ..logging import log # noqa
20 from ..utils import flat_map # noqa
21
22 DEFAULT_REQUIREMENTS_FILE = 'requirements.txt'
23
24
25 @click.command()
26 @click.option('--dry-run', is_flag=True, help="Only show what would happen, don't change anything")
27 @click.option('--force', is_flag=True, help="Proceed even if conflicts are found")
28 @click.argument('src_files', required=False, type=click.Path(exists=True), nargs=-1)
29 def cli(dry_run, force, src_files):
30 if not src_files:
31 if os.path.exists(DEFAULT_REQUIREMENTS_FILE):
32 src_files = (DEFAULT_REQUIREMENTS_FILE,)
33 else:
34 msg = 'No requirement files given and no {} found in the current directory'
35 log.error(msg.format(DEFAULT_REQUIREMENTS_FILE))
36 sys.exit(2)
37
38 if any(src_file.endswith('.in') for src_file in src_files):
39 msg = ('Some input files have the .in extension, which is most likely an error and can '
40 'cause weird behaviour. You probably meant to use the corresponding *.txt file?')
41 if force:
42 log.warning('WARNING: ' + msg)
43 else:
44 log.error('ERROR: ' + msg)
45 sys.exit(2)
46
47 requirements = flat_map(lambda src: pip.req.parse_requirements(src, session=True),
48 src_files)
49
50 try:
51 requirements = sync.merge(requirements, ignore_conflicts=force)
52 except PipToolsError as e:
53 log.error(str(e))
54 sys.exit(2)
55
56 installed_dists = pip.get_installed_distributions()
57 to_install, to_uninstall = sync.diff(requirements, installed_dists)
58
59 sys.exit(sync.sync(to_install, to_uninstall, verbose=True, dry_run=dry_run))
60
```
Path: `piptools/sync.py`
Content:
```
1 import collections
2
3 from . import click
4 import pip
5
6 from .exceptions import IncompatibleRequirements, UnsupportedConstraint
7 from .utils import flat_map
8
9 PACKAGES_TO_IGNORE = [
10 'pip',
11 'pip-tools',
12 'pip-review',
13 'setuptools',
14 'wheel',
15 ]
16
17
18 def dependency_tree(installed_keys, root_key):
19 """
20 Calculate the dependency tree for the package `root_key` and return
21 a collection of all its dependencies. Uses a DFS traversal algorithm.
22
23 `installed_keys` should be a {key: requirement} mapping, e.g.
24 {'django': from_line('django==1.8')}
25 `root_key` should be the key to return the dependency tree for.
26 """
27 dependencies = set()
28 queue = collections.deque()
29
30 if root_key in installed_keys:
31 dep = installed_keys[root_key]
32 queue.append(dep)
33
34 while queue:
35 v = queue.popleft()
36 if v.key in dependencies:
37 continue
38
39 dependencies.add(v.key)
40
41 for dep_specifier in v.requires():
42 dep_name = dep_specifier.key
43 if dep_name in installed_keys:
44 dep = installed_keys[dep_name]
45
46 if dep_specifier.specifier.contains(dep.version):
47 queue.append(dep)
48
49 return dependencies
50
51
52 def get_dists_to_ignore(installed):
53 """
54 Returns a collection of package names to ignore when performing pip-sync,
55 based on the currently installed environment. For example, when pip-tools
56 is installed in the local environment, it should be ignored, including all
57 of its dependencies (e.g. click). When pip-tools is not installed
58 locally, click should also be installed/uninstalled depending on the given
59 requirements.
60 """
61 installed_keys = {r.key: r for r in installed}
62 return list(flat_map(lambda req: dependency_tree(installed_keys, req), PACKAGES_TO_IGNORE))
63
64
65 def merge(requirements, ignore_conflicts):
66 by_key = {}
67
68 for ireq in requirements:
69 if ireq.link is not None and not ireq.editable:
70 msg = ('pip-compile does not support URLs as packages, unless they are editable. '
71 'Perhaps add -e option?')
72 raise UnsupportedConstraint(msg, ireq)
73
74 key = ireq.link or ireq.req.key
75
76 if not ignore_conflicts:
77 existing_ireq = by_key.get(key)
78 if existing_ireq:
79 # NOTE: We check equality here since we can assume that the
80 # requirements are all pinned
81 if ireq.specifier != existing_ireq.specifier:
82 raise IncompatibleRequirements(ireq, existing_ireq)
83
84 # TODO: Always pick the largest specifier in case of a conflict
85 by_key[key] = ireq
86
87 return by_key.values()
88
89
90 def diff(compiled_requirements, installed_dists):
91 """
92 Calculate which packages should be installed or uninstalled, given a set
93 of compiled requirements and a list of currently installed modules.
94 """
95 requirements_lut = {r.link or r.req.key: r for r in compiled_requirements}
96
97 satisfied = set() # holds keys
98 to_install = set() # holds keys-and-versions
99 to_uninstall = set() # holds keys
100
101 pkgs_to_ignore = get_dists_to_ignore(installed_dists)
102 for dist in installed_dists:
103 key = dist.key
104 if key not in requirements_lut:
105 to_uninstall.add(dist.key)
106 elif requirements_lut[key].specifier.contains(dist.version):
107 satisfied.add(key)
108
109 for key, requirement in requirements_lut.items():
110 if key not in satisfied:
111 to_install.add(str(requirement.link or requirement.req))
112
113 # Make sure to not uninstall any packages that should be ignored
114 to_uninstall -= set(pkgs_to_ignore)
115
116 return (to_install, to_uninstall)
117
118
119 def sync(to_install, to_uninstall, verbose=False, dry_run=False):
120 """
121 Install and uninstalls the given sets of modules.
122 """
123 if not to_uninstall and not to_install:
124 click.echo("Everything up-to-date")
125
126 pip_flags = []
127 if not verbose:
128 pip_flags.append('-q')
129
130 if to_uninstall:
131 if dry_run:
132 click.echo("Would uninstall:")
133 for pkg in to_uninstall:
134 click.echo(" {}".format(pkg))
135 else:
136 pip.main(["uninstall", '-y'] + pip_flags + [pkg for pkg in to_uninstall])
137
138 if to_install:
139 if dry_run:
140 click.echo("Would install:")
141 for pkg in to_install:
142 click.echo(" {}".format(pkg))
143 else:
144 return pip.main(["install"] + pip_flags + [pkg for pkg in to_install])
145 return 0
146
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/piptools/scripts/sync.py b/piptools/scripts/sync.py
--- a/piptools/scripts/sync.py
+++ b/piptools/scripts/sync.py
@@ -25,8 +25,10 @@
@click.command()
@click.option('--dry-run', is_flag=True, help="Only show what would happen, don't change anything")
@click.option('--force', is_flag=True, help="Proceed even if conflicts are found")
[email protected]('-f', '--find-links', multiple=True, help="Look for archives in this directory or on this HTML page", envvar='PIP_FIND_LINKS') # noqa
[email protected]('--no-index', is_flag=True, help="Ignore package index (only looking at --find-links URLs instead)")
@click.argument('src_files', required=False, type=click.Path(exists=True), nargs=-1)
-def cli(dry_run, force, src_files):
+def cli(dry_run, force, find_links, no_index, src_files):
if not src_files:
if os.path.exists(DEFAULT_REQUIREMENTS_FILE):
src_files = (DEFAULT_REQUIREMENTS_FILE,)
@@ -56,4 +58,11 @@
installed_dists = pip.get_installed_distributions()
to_install, to_uninstall = sync.diff(requirements, installed_dists)
- sys.exit(sync.sync(to_install, to_uninstall, verbose=True, dry_run=dry_run))
+ pip_flags = []
+ for link in find_links or []:
+ pip_flags.extend(['-f', link])
+ if no_index:
+ pip_flags.append('--no-index')
+
+ sys.exit(sync.sync(to_install, to_uninstall, verbose=True, dry_run=dry_run,
+ pip_flags=pip_flags))
diff --git a/piptools/sync.py b/piptools/sync.py
--- a/piptools/sync.py
+++ b/piptools/sync.py
@@ -116,14 +116,14 @@
return (to_install, to_uninstall)
-def sync(to_install, to_uninstall, verbose=False, dry_run=False):
+def sync(to_install, to_uninstall, verbose=False, dry_run=False, pip_flags=[]):
"""
Install and uninstalls the given sets of modules.
"""
if not to_uninstall and not to_install:
click.echo("Everything up-to-date")
- pip_flags = []
+ pip_flags = pip_flags[:]
if not verbose:
pip_flags.append('-q')
|
{"golden_diff": "diff --git a/piptools/scripts/sync.py b/piptools/scripts/sync.py\n--- a/piptools/scripts/sync.py\n+++ b/piptools/scripts/sync.py\n@@ -25,8 +25,10 @@\n @click.command()\n @click.option('--dry-run', is_flag=True, help=\"Only show what would happen, don't change anything\")\n @click.option('--force', is_flag=True, help=\"Proceed even if conflicts are found\")\[email protected]('-f', '--find-links', multiple=True, help=\"Look for archives in this directory or on this HTML page\", envvar='PIP_FIND_LINKS') # noqa\[email protected]('--no-index', is_flag=True, help=\"Ignore package index (only looking at --find-links URLs instead)\")\n @click.argument('src_files', required=False, type=click.Path(exists=True), nargs=-1)\n-def cli(dry_run, force, src_files):\n+def cli(dry_run, force, find_links, no_index, src_files):\n if not src_files:\n if os.path.exists(DEFAULT_REQUIREMENTS_FILE):\n src_files = (DEFAULT_REQUIREMENTS_FILE,)\n@@ -56,4 +58,11 @@\n installed_dists = pip.get_installed_distributions()\n to_install, to_uninstall = sync.diff(requirements, installed_dists)\n \n- sys.exit(sync.sync(to_install, to_uninstall, verbose=True, dry_run=dry_run))\n+ pip_flags = []\n+ for link in find_links or []:\n+ pip_flags.extend(['-f', link])\n+ if no_index:\n+ pip_flags.append('--no-index')\n+\n+ sys.exit(sync.sync(to_install, to_uninstall, verbose=True, dry_run=dry_run,\n+ pip_flags=pip_flags))\ndiff --git a/piptools/sync.py b/piptools/sync.py\n--- a/piptools/sync.py\n+++ b/piptools/sync.py\n@@ -116,14 +116,14 @@\n return (to_install, to_uninstall)\n \n \n-def sync(to_install, to_uninstall, verbose=False, dry_run=False):\n+def sync(to_install, to_uninstall, verbose=False, dry_run=False, pip_flags=[]):\n \"\"\"\n Install and uninstalls the given sets of modules.\n \"\"\"\n if not to_uninstall and not to_install:\n click.echo(\"Everything up-to-date\")\n \n- pip_flags = []\n+ pip_flags = pip_flags[:]\n if not verbose:\n pip_flags.append('-q')\n", "issue": "pip-sync has no --find-links option\n`pip-compile --find-links=some/dir/ requirements.in` isn't particularly useful if you can't then run `pip-sync --find-links=some/dir/ requirements.txt` to install dependencies from `some/dir/`.\n\n", "before_files": [{"content": "# coding: utf-8\nfrom __future__ import (absolute_import, division, print_function,\n unicode_literals)\n\nimport sys\n\nimport pip\n\n# Make sure we're using a reasonably modern version of pip\nif not tuple(int(digit) for digit in pip.__version__.split('.')[:2]) >= (6, 1):\n print('pip-compile requires at least version 6.1 of pip ({} found), '\n 'perhaps run `pip install --upgrade pip`?'.format(pip.__version__))\n sys.exit(4)\n\nimport os # noqa\nfrom .. import click # noqa\nfrom .. import sync # noqa\nfrom ..exceptions import PipToolsError # noqa\nfrom ..logging import log # noqa\nfrom ..utils import flat_map # noqa\n\nDEFAULT_REQUIREMENTS_FILE = 'requirements.txt'\n\n\[email protected]()\[email protected]('--dry-run', is_flag=True, help=\"Only show what would happen, don't change anything\")\[email protected]('--force', is_flag=True, help=\"Proceed even if conflicts are found\")\[email protected]('src_files', required=False, type=click.Path(exists=True), nargs=-1)\ndef cli(dry_run, force, src_files):\n if not src_files:\n if os.path.exists(DEFAULT_REQUIREMENTS_FILE):\n src_files = (DEFAULT_REQUIREMENTS_FILE,)\n else:\n msg = 'No requirement files given and no {} found in the current directory'\n log.error(msg.format(DEFAULT_REQUIREMENTS_FILE))\n sys.exit(2)\n\n if any(src_file.endswith('.in') for src_file in src_files):\n msg = ('Some input files have the .in extension, which is most likely an error and can '\n 'cause weird behaviour. You probably meant to use the corresponding *.txt file?')\n if force:\n log.warning('WARNING: ' + msg)\n else:\n log.error('ERROR: ' + msg)\n sys.exit(2)\n\n requirements = flat_map(lambda src: pip.req.parse_requirements(src, session=True),\n src_files)\n\n try:\n requirements = sync.merge(requirements, ignore_conflicts=force)\n except PipToolsError as e:\n log.error(str(e))\n sys.exit(2)\n\n installed_dists = pip.get_installed_distributions()\n to_install, to_uninstall = sync.diff(requirements, installed_dists)\n\n sys.exit(sync.sync(to_install, to_uninstall, verbose=True, dry_run=dry_run))\n", "path": "piptools/scripts/sync.py"}, {"content": "import collections\n\nfrom . import click\nimport pip\n\nfrom .exceptions import IncompatibleRequirements, UnsupportedConstraint\nfrom .utils import flat_map\n\nPACKAGES_TO_IGNORE = [\n 'pip',\n 'pip-tools',\n 'pip-review',\n 'setuptools',\n 'wheel',\n]\n\n\ndef dependency_tree(installed_keys, root_key):\n \"\"\"\n Calculate the dependency tree for the package `root_key` and return\n a collection of all its dependencies. Uses a DFS traversal algorithm.\n\n `installed_keys` should be a {key: requirement} mapping, e.g.\n {'django': from_line('django==1.8')}\n `root_key` should be the key to return the dependency tree for.\n \"\"\"\n dependencies = set()\n queue = collections.deque()\n\n if root_key in installed_keys:\n dep = installed_keys[root_key]\n queue.append(dep)\n\n while queue:\n v = queue.popleft()\n if v.key in dependencies:\n continue\n\n dependencies.add(v.key)\n\n for dep_specifier in v.requires():\n dep_name = dep_specifier.key\n if dep_name in installed_keys:\n dep = installed_keys[dep_name]\n\n if dep_specifier.specifier.contains(dep.version):\n queue.append(dep)\n\n return dependencies\n\n\ndef get_dists_to_ignore(installed):\n \"\"\"\n Returns a collection of package names to ignore when performing pip-sync,\n based on the currently installed environment. For example, when pip-tools\n is installed in the local environment, it should be ignored, including all\n of its dependencies (e.g. click). When pip-tools is not installed\n locally, click should also be installed/uninstalled depending on the given\n requirements.\n \"\"\"\n installed_keys = {r.key: r for r in installed}\n return list(flat_map(lambda req: dependency_tree(installed_keys, req), PACKAGES_TO_IGNORE))\n\n\ndef merge(requirements, ignore_conflicts):\n by_key = {}\n\n for ireq in requirements:\n if ireq.link is not None and not ireq.editable:\n msg = ('pip-compile does not support URLs as packages, unless they are editable. '\n 'Perhaps add -e option?')\n raise UnsupportedConstraint(msg, ireq)\n\n key = ireq.link or ireq.req.key\n\n if not ignore_conflicts:\n existing_ireq = by_key.get(key)\n if existing_ireq:\n # NOTE: We check equality here since we can assume that the\n # requirements are all pinned\n if ireq.specifier != existing_ireq.specifier:\n raise IncompatibleRequirements(ireq, existing_ireq)\n\n # TODO: Always pick the largest specifier in case of a conflict\n by_key[key] = ireq\n\n return by_key.values()\n\n\ndef diff(compiled_requirements, installed_dists):\n \"\"\"\n Calculate which packages should be installed or uninstalled, given a set\n of compiled requirements and a list of currently installed modules.\n \"\"\"\n requirements_lut = {r.link or r.req.key: r for r in compiled_requirements}\n\n satisfied = set() # holds keys\n to_install = set() # holds keys-and-versions\n to_uninstall = set() # holds keys\n\n pkgs_to_ignore = get_dists_to_ignore(installed_dists)\n for dist in installed_dists:\n key = dist.key\n if key not in requirements_lut:\n to_uninstall.add(dist.key)\n elif requirements_lut[key].specifier.contains(dist.version):\n satisfied.add(key)\n\n for key, requirement in requirements_lut.items():\n if key not in satisfied:\n to_install.add(str(requirement.link or requirement.req))\n\n # Make sure to not uninstall any packages that should be ignored\n to_uninstall -= set(pkgs_to_ignore)\n\n return (to_install, to_uninstall)\n\n\ndef sync(to_install, to_uninstall, verbose=False, dry_run=False):\n \"\"\"\n Install and uninstalls the given sets of modules.\n \"\"\"\n if not to_uninstall and not to_install:\n click.echo(\"Everything up-to-date\")\n\n pip_flags = []\n if not verbose:\n pip_flags.append('-q')\n\n if to_uninstall:\n if dry_run:\n click.echo(\"Would uninstall:\")\n for pkg in to_uninstall:\n click.echo(\" {}\".format(pkg))\n else:\n pip.main([\"uninstall\", '-y'] + pip_flags + [pkg for pkg in to_uninstall])\n\n if to_install:\n if dry_run:\n click.echo(\"Would install:\")\n for pkg in to_install:\n click.echo(\" {}\".format(pkg))\n else:\n return pip.main([\"install\"] + pip_flags + [pkg for pkg in to_install])\n return 0\n", "path": "piptools/sync.py"}], "after_files": [{"content": "# coding: utf-8\nfrom __future__ import (absolute_import, division, print_function,\n unicode_literals)\n\nimport sys\n\nimport pip\n\n# Make sure we're using a reasonably modern version of pip\nif not tuple(int(digit) for digit in pip.__version__.split('.')[:2]) >= (6, 1):\n print('pip-compile requires at least version 6.1 of pip ({} found), '\n 'perhaps run `pip install --upgrade pip`?'.format(pip.__version__))\n sys.exit(4)\n\nimport os # noqa\nfrom .. import click # noqa\nfrom .. import sync # noqa\nfrom ..exceptions import PipToolsError # noqa\nfrom ..logging import log # noqa\nfrom ..utils import flat_map # noqa\n\nDEFAULT_REQUIREMENTS_FILE = 'requirements.txt'\n\n\[email protected]()\[email protected]('--dry-run', is_flag=True, help=\"Only show what would happen, don't change anything\")\[email protected]('--force', is_flag=True, help=\"Proceed even if conflicts are found\")\[email protected]('-f', '--find-links', multiple=True, help=\"Look for archives in this directory or on this HTML page\", envvar='PIP_FIND_LINKS') # noqa\[email protected]('--no-index', is_flag=True, help=\"Ignore package index (only looking at --find-links URLs instead)\")\[email protected]('src_files', required=False, type=click.Path(exists=True), nargs=-1)\ndef cli(dry_run, force, find_links, no_index, src_files):\n if not src_files:\n if os.path.exists(DEFAULT_REQUIREMENTS_FILE):\n src_files = (DEFAULT_REQUIREMENTS_FILE,)\n else:\n msg = 'No requirement files given and no {} found in the current directory'\n log.error(msg.format(DEFAULT_REQUIREMENTS_FILE))\n sys.exit(2)\n\n if any(src_file.endswith('.in') for src_file in src_files):\n msg = ('Some input files have the .in extension, which is most likely an error and can '\n 'cause weird behaviour. You probably meant to use the corresponding *.txt file?')\n if force:\n log.warning('WARNING: ' + msg)\n else:\n log.error('ERROR: ' + msg)\n sys.exit(2)\n\n requirements = flat_map(lambda src: pip.req.parse_requirements(src, session=True),\n src_files)\n\n try:\n requirements = sync.merge(requirements, ignore_conflicts=force)\n except PipToolsError as e:\n log.error(str(e))\n sys.exit(2)\n\n installed_dists = pip.get_installed_distributions()\n to_install, to_uninstall = sync.diff(requirements, installed_dists)\n\n pip_flags = []\n for link in find_links or []:\n pip_flags.extend(['-f', link])\n if no_index:\n pip_flags.append('--no-index')\n\n sys.exit(sync.sync(to_install, to_uninstall, verbose=True, dry_run=dry_run,\n pip_flags=pip_flags))\n", "path": "piptools/scripts/sync.py"}, {"content": "import collections\n\nfrom . import click\nimport pip\n\nfrom .exceptions import IncompatibleRequirements, UnsupportedConstraint\nfrom .utils import flat_map\n\nPACKAGES_TO_IGNORE = [\n 'pip',\n 'pip-tools',\n 'pip-review',\n 'setuptools',\n 'wheel',\n]\n\n\ndef dependency_tree(installed_keys, root_key):\n \"\"\"\n Calculate the dependency tree for the package `root_key` and return\n a collection of all its dependencies. Uses a DFS traversal algorithm.\n\n `installed_keys` should be a {key: requirement} mapping, e.g.\n {'django': from_line('django==1.8')}\n `root_key` should be the key to return the dependency tree for.\n \"\"\"\n dependencies = set()\n queue = collections.deque()\n\n if root_key in installed_keys:\n dep = installed_keys[root_key]\n queue.append(dep)\n\n while queue:\n v = queue.popleft()\n if v.key in dependencies:\n continue\n\n dependencies.add(v.key)\n\n for dep_specifier in v.requires():\n dep_name = dep_specifier.key\n if dep_name in installed_keys:\n dep = installed_keys[dep_name]\n\n if dep_specifier.specifier.contains(dep.version):\n queue.append(dep)\n\n return dependencies\n\n\ndef get_dists_to_ignore(installed):\n \"\"\"\n Returns a collection of package names to ignore when performing pip-sync,\n based on the currently installed environment. For example, when pip-tools\n is installed in the local environment, it should be ignored, including all\n of its dependencies (e.g. click). When pip-tools is not installed\n locally, click should also be installed/uninstalled depending on the given\n requirements.\n \"\"\"\n installed_keys = {r.key: r for r in installed}\n return list(flat_map(lambda req: dependency_tree(installed_keys, req), PACKAGES_TO_IGNORE))\n\n\ndef merge(requirements, ignore_conflicts):\n by_key = {}\n\n for ireq in requirements:\n if ireq.link is not None and not ireq.editable:\n msg = ('pip-compile does not support URLs as packages, unless they are editable. '\n 'Perhaps add -e option?')\n raise UnsupportedConstraint(msg, ireq)\n\n key = ireq.link or ireq.req.key\n\n if not ignore_conflicts:\n existing_ireq = by_key.get(key)\n if existing_ireq:\n # NOTE: We check equality here since we can assume that the\n # requirements are all pinned\n if ireq.specifier != existing_ireq.specifier:\n raise IncompatibleRequirements(ireq, existing_ireq)\n\n # TODO: Always pick the largest specifier in case of a conflict\n by_key[key] = ireq\n\n return by_key.values()\n\n\ndef diff(compiled_requirements, installed_dists):\n \"\"\"\n Calculate which packages should be installed or uninstalled, given a set\n of compiled requirements and a list of currently installed modules.\n \"\"\"\n requirements_lut = {r.link or r.req.key: r for r in compiled_requirements}\n\n satisfied = set() # holds keys\n to_install = set() # holds keys-and-versions\n to_uninstall = set() # holds keys\n\n pkgs_to_ignore = get_dists_to_ignore(installed_dists)\n for dist in installed_dists:\n key = dist.key\n if key not in requirements_lut:\n to_uninstall.add(dist.key)\n elif requirements_lut[key].specifier.contains(dist.version):\n satisfied.add(key)\n\n for key, requirement in requirements_lut.items():\n if key not in satisfied:\n to_install.add(str(requirement.link or requirement.req))\n\n # Make sure to not uninstall any packages that should be ignored\n to_uninstall -= set(pkgs_to_ignore)\n\n return (to_install, to_uninstall)\n\n\ndef sync(to_install, to_uninstall, verbose=False, dry_run=False, pip_flags=[]):\n \"\"\"\n Install and uninstalls the given sets of modules.\n \"\"\"\n if not to_uninstall and not to_install:\n click.echo(\"Everything up-to-date\")\n\n pip_flags = pip_flags[:]\n if not verbose:\n pip_flags.append('-q')\n\n if to_uninstall:\n if dry_run:\n click.echo(\"Would uninstall:\")\n for pkg in to_uninstall:\n click.echo(\" {}\".format(pkg))\n else:\n pip.main([\"uninstall\", '-y'] + pip_flags + [pkg for pkg in to_uninstall])\n\n if to_install:\n if dry_run:\n click.echo(\"Would install:\")\n for pkg in to_install:\n click.echo(\" {}\".format(pkg))\n else:\n return pip.main([\"install\"] + pip_flags + [pkg for pkg in to_install])\n return 0\n", "path": "piptools/sync.py"}]}
| 2,339 | 543 |
gh_patches_debug_16207
|
rasdani/github-patches
|
git_diff
|
ckan__ckan-5024
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
RFE: allow Reply-To header on emails
### CKAN Version if known (or site URL)
2.7.3
### Please describe the expected behaviour
We would like to send system emails that come from a real address (so we can catch bounces etc), but which don't reply to a real address when used by humans (ie use a 'no-reply' address as the Reply-To header).
### Please describe the actual behaviour
Only the 'From' address is configurable.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ckan/lib/mailer.py`
Content:
```
1 # encoding: utf-8
2
3 import codecs
4 import os
5 import smtplib
6 import socket
7 import logging
8 import uuid
9 from time import time
10 from email.mime.text import MIMEText
11 from email.header import Header
12 from email import Utils
13
14 from ckan.common import config
15 import ckan.common
16 from six import text_type
17
18 import ckan
19 import ckan.model as model
20 import ckan.lib.helpers as h
21 from ckan.lib.base import render_jinja2
22
23 from ckan.common import _
24
25 log = logging.getLogger(__name__)
26
27
28 class MailerException(Exception):
29 pass
30
31
32 def _mail_recipient(recipient_name, recipient_email,
33 sender_name, sender_url, subject,
34 body, headers=None):
35
36 if not headers:
37 headers = {}
38
39 mail_from = config.get('smtp.mail_from')
40 msg = MIMEText(body.encode('utf-8'), 'plain', 'utf-8')
41 for k, v in headers.items():
42 if k in msg.keys():
43 msg.replace_header(k, v)
44 else:
45 msg.add_header(k, v)
46 subject = Header(subject.encode('utf-8'), 'utf-8')
47 msg['Subject'] = subject
48 msg['From'] = _("%s <%s>") % (sender_name, mail_from)
49 recipient = u"%s <%s>" % (recipient_name, recipient_email)
50 msg['To'] = Header(recipient, 'utf-8')
51 msg['Date'] = Utils.formatdate(time())
52 msg['X-Mailer'] = "CKAN %s" % ckan.__version__
53
54 # Send the email using Python's smtplib.
55 smtp_connection = smtplib.SMTP()
56 if 'smtp.test_server' in config:
57 # If 'smtp.test_server' is configured we assume we're running tests,
58 # and don't use the smtp.server, starttls, user, password etc. options.
59 smtp_server = config['smtp.test_server']
60 smtp_starttls = False
61 smtp_user = None
62 smtp_password = None
63 else:
64 smtp_server = config.get('smtp.server', 'localhost')
65 smtp_starttls = ckan.common.asbool(
66 config.get('smtp.starttls'))
67 smtp_user = config.get('smtp.user')
68 smtp_password = config.get('smtp.password')
69
70 try:
71 smtp_connection.connect(smtp_server)
72 except socket.error as e:
73 log.exception(e)
74 raise MailerException('SMTP server could not be connected to: "%s" %s'
75 % (smtp_server, e))
76 try:
77 # Identify ourselves and prompt the server for supported features.
78 smtp_connection.ehlo()
79
80 # If 'smtp.starttls' is on in CKAN config, try to put the SMTP
81 # connection into TLS mode.
82 if smtp_starttls:
83 if smtp_connection.has_extn('STARTTLS'):
84 smtp_connection.starttls()
85 # Re-identify ourselves over TLS connection.
86 smtp_connection.ehlo()
87 else:
88 raise MailerException("SMTP server does not support STARTTLS")
89
90 # If 'smtp.user' is in CKAN config, try to login to SMTP server.
91 if smtp_user:
92 assert smtp_password, ("If smtp.user is configured then "
93 "smtp.password must be configured as well.")
94 smtp_connection.login(smtp_user, smtp_password)
95
96 smtp_connection.sendmail(mail_from, [recipient_email], msg.as_string())
97 log.info("Sent email to {0}".format(recipient_email))
98
99 except smtplib.SMTPException as e:
100 msg = '%r' % e
101 log.exception(msg)
102 raise MailerException(msg)
103 finally:
104 smtp_connection.quit()
105
106
107 def mail_recipient(recipient_name, recipient_email, subject,
108 body, headers={}):
109 site_title = config.get('ckan.site_title')
110 site_url = config.get('ckan.site_url')
111 return _mail_recipient(recipient_name, recipient_email,
112 site_title, site_url, subject, body,
113 headers=headers)
114
115
116 def mail_user(recipient, subject, body, headers={}):
117 if (recipient.email is None) or not len(recipient.email):
118 raise MailerException(_("No recipient email address available!"))
119 mail_recipient(recipient.display_name, recipient.email, subject,
120 body, headers=headers)
121
122
123 def get_reset_link_body(user):
124 extra_vars = {
125 'reset_link': get_reset_link(user),
126 'site_title': config.get('ckan.site_title'),
127 'site_url': config.get('ckan.site_url'),
128 'user_name': user.name,
129 }
130 # NOTE: This template is translated
131 return render_jinja2('emails/reset_password.txt', extra_vars)
132
133
134 def get_invite_body(user, group_dict=None, role=None):
135 if group_dict:
136 group_type = (_('organization') if group_dict['is_organization']
137 else _('group'))
138
139 extra_vars = {
140 'reset_link': get_reset_link(user),
141 'site_title': config.get('ckan.site_title'),
142 'site_url': config.get('ckan.site_url'),
143 'user_name': user.name,
144 }
145 if role:
146 extra_vars['role_name'] = h.roles_translated().get(role, _(role))
147 if group_dict:
148 extra_vars['group_type'] = group_type
149 extra_vars['group_title'] = group_dict.get('title')
150
151 # NOTE: This template is translated
152 return render_jinja2('emails/invite_user.txt', extra_vars)
153
154
155 def get_reset_link(user):
156 return h.url_for(controller='user',
157 action='perform_reset',
158 id=user.id,
159 key=user.reset_key,
160 qualified=True)
161
162
163 def send_reset_link(user):
164 create_reset_key(user)
165 body = get_reset_link_body(user)
166 extra_vars = {
167 'site_title': config.get('ckan.site_title')
168 }
169 subject = render_jinja2('emails/reset_password_subject.txt', extra_vars)
170
171 # Make sure we only use the first line
172 subject = subject.split('\n')[0]
173
174 mail_user(user, subject, body)
175
176
177 def send_invite(user, group_dict=None, role=None):
178 create_reset_key(user)
179 body = get_invite_body(user, group_dict, role)
180 extra_vars = {
181 'site_title': config.get('ckan.site_title')
182 }
183 subject = render_jinja2('emails/invite_user_subject.txt', extra_vars)
184
185 # Make sure we only use the first line
186 subject = subject.split('\n')[0]
187
188 mail_user(user, subject, body)
189
190
191 def create_reset_key(user):
192 user.reset_key = text_type(make_key())
193 model.repo.commit_and_remove()
194
195
196 def make_key():
197 return codecs.encode(os.urandom(16), 'hex')
198
199
200 def verify_reset_link(user, key):
201 if not key:
202 return False
203 if not user.reset_key or len(user.reset_key) < 5:
204 return False
205 return key.strip() == user.reset_key
206
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/ckan/lib/mailer.py b/ckan/lib/mailer.py
--- a/ckan/lib/mailer.py
+++ b/ckan/lib/mailer.py
@@ -37,6 +37,7 @@
headers = {}
mail_from = config.get('smtp.mail_from')
+ reply_to = config.get('smtp.reply_to')
msg = MIMEText(body.encode('utf-8'), 'plain', 'utf-8')
for k, v in headers.items():
if k in msg.keys():
@@ -50,6 +51,8 @@
msg['To'] = Header(recipient, 'utf-8')
msg['Date'] = Utils.formatdate(time())
msg['X-Mailer'] = "CKAN %s" % ckan.__version__
+ if reply_to and reply_to != '':
+ msg['Reply-to'] = reply_to
# Send the email using Python's smtplib.
smtp_connection = smtplib.SMTP()
|
{"golden_diff": "diff --git a/ckan/lib/mailer.py b/ckan/lib/mailer.py\n--- a/ckan/lib/mailer.py\n+++ b/ckan/lib/mailer.py\n@@ -37,6 +37,7 @@\n headers = {}\n \n mail_from = config.get('smtp.mail_from')\n+ reply_to = config.get('smtp.reply_to')\n msg = MIMEText(body.encode('utf-8'), 'plain', 'utf-8')\n for k, v in headers.items():\n if k in msg.keys():\n@@ -50,6 +51,8 @@\n msg['To'] = Header(recipient, 'utf-8')\n msg['Date'] = Utils.formatdate(time())\n msg['X-Mailer'] = \"CKAN %s\" % ckan.__version__\n+ if reply_to and reply_to != '':\n+ msg['Reply-to'] = reply_to\n \n # Send the email using Python's smtplib.\n smtp_connection = smtplib.SMTP()\n", "issue": "RFE: allow Reply-To header on emails\n### CKAN Version if known (or site URL)\r\n\r\n2.7.3\r\n\r\n### Please describe the expected behaviour\r\n\r\nWe would like to send system emails that come from a real address (so we can catch bounces etc), but which don't reply to a real address when used by humans (ie use a 'no-reply' address as the Reply-To header).\r\n\r\n### Please describe the actual behaviour\r\n\r\nOnly the 'From' address is configurable.\r\n\n", "before_files": [{"content": "# encoding: utf-8\n\nimport codecs\nimport os\nimport smtplib\nimport socket\nimport logging\nimport uuid\nfrom time import time\nfrom email.mime.text import MIMEText\nfrom email.header import Header\nfrom email import Utils\n\nfrom ckan.common import config\nimport ckan.common\nfrom six import text_type\n\nimport ckan\nimport ckan.model as model\nimport ckan.lib.helpers as h\nfrom ckan.lib.base import render_jinja2\n\nfrom ckan.common import _\n\nlog = logging.getLogger(__name__)\n\n\nclass MailerException(Exception):\n pass\n\n\ndef _mail_recipient(recipient_name, recipient_email,\n sender_name, sender_url, subject,\n body, headers=None):\n\n if not headers:\n headers = {}\n\n mail_from = config.get('smtp.mail_from')\n msg = MIMEText(body.encode('utf-8'), 'plain', 'utf-8')\n for k, v in headers.items():\n if k in msg.keys():\n msg.replace_header(k, v)\n else:\n msg.add_header(k, v)\n subject = Header(subject.encode('utf-8'), 'utf-8')\n msg['Subject'] = subject\n msg['From'] = _(\"%s <%s>\") % (sender_name, mail_from)\n recipient = u\"%s <%s>\" % (recipient_name, recipient_email)\n msg['To'] = Header(recipient, 'utf-8')\n msg['Date'] = Utils.formatdate(time())\n msg['X-Mailer'] = \"CKAN %s\" % ckan.__version__\n\n # Send the email using Python's smtplib.\n smtp_connection = smtplib.SMTP()\n if 'smtp.test_server' in config:\n # If 'smtp.test_server' is configured we assume we're running tests,\n # and don't use the smtp.server, starttls, user, password etc. options.\n smtp_server = config['smtp.test_server']\n smtp_starttls = False\n smtp_user = None\n smtp_password = None\n else:\n smtp_server = config.get('smtp.server', 'localhost')\n smtp_starttls = ckan.common.asbool(\n config.get('smtp.starttls'))\n smtp_user = config.get('smtp.user')\n smtp_password = config.get('smtp.password')\n\n try:\n smtp_connection.connect(smtp_server)\n except socket.error as e:\n log.exception(e)\n raise MailerException('SMTP server could not be connected to: \"%s\" %s'\n % (smtp_server, e))\n try:\n # Identify ourselves and prompt the server for supported features.\n smtp_connection.ehlo()\n\n # If 'smtp.starttls' is on in CKAN config, try to put the SMTP\n # connection into TLS mode.\n if smtp_starttls:\n if smtp_connection.has_extn('STARTTLS'):\n smtp_connection.starttls()\n # Re-identify ourselves over TLS connection.\n smtp_connection.ehlo()\n else:\n raise MailerException(\"SMTP server does not support STARTTLS\")\n\n # If 'smtp.user' is in CKAN config, try to login to SMTP server.\n if smtp_user:\n assert smtp_password, (\"If smtp.user is configured then \"\n \"smtp.password must be configured as well.\")\n smtp_connection.login(smtp_user, smtp_password)\n\n smtp_connection.sendmail(mail_from, [recipient_email], msg.as_string())\n log.info(\"Sent email to {0}\".format(recipient_email))\n\n except smtplib.SMTPException as e:\n msg = '%r' % e\n log.exception(msg)\n raise MailerException(msg)\n finally:\n smtp_connection.quit()\n\n\ndef mail_recipient(recipient_name, recipient_email, subject,\n body, headers={}):\n site_title = config.get('ckan.site_title')\n site_url = config.get('ckan.site_url')\n return _mail_recipient(recipient_name, recipient_email,\n site_title, site_url, subject, body,\n headers=headers)\n\n\ndef mail_user(recipient, subject, body, headers={}):\n if (recipient.email is None) or not len(recipient.email):\n raise MailerException(_(\"No recipient email address available!\"))\n mail_recipient(recipient.display_name, recipient.email, subject,\n body, headers=headers)\n\n\ndef get_reset_link_body(user):\n extra_vars = {\n 'reset_link': get_reset_link(user),\n 'site_title': config.get('ckan.site_title'),\n 'site_url': config.get('ckan.site_url'),\n 'user_name': user.name,\n }\n # NOTE: This template is translated\n return render_jinja2('emails/reset_password.txt', extra_vars)\n\n\ndef get_invite_body(user, group_dict=None, role=None):\n if group_dict:\n group_type = (_('organization') if group_dict['is_organization']\n else _('group'))\n\n extra_vars = {\n 'reset_link': get_reset_link(user),\n 'site_title': config.get('ckan.site_title'),\n 'site_url': config.get('ckan.site_url'),\n 'user_name': user.name,\n }\n if role:\n extra_vars['role_name'] = h.roles_translated().get(role, _(role))\n if group_dict:\n extra_vars['group_type'] = group_type\n extra_vars['group_title'] = group_dict.get('title')\n\n # NOTE: This template is translated\n return render_jinja2('emails/invite_user.txt', extra_vars)\n\n\ndef get_reset_link(user):\n return h.url_for(controller='user',\n action='perform_reset',\n id=user.id,\n key=user.reset_key,\n qualified=True)\n\n\ndef send_reset_link(user):\n create_reset_key(user)\n body = get_reset_link_body(user)\n extra_vars = {\n 'site_title': config.get('ckan.site_title')\n }\n subject = render_jinja2('emails/reset_password_subject.txt', extra_vars)\n\n # Make sure we only use the first line\n subject = subject.split('\\n')[0]\n\n mail_user(user, subject, body)\n\n\ndef send_invite(user, group_dict=None, role=None):\n create_reset_key(user)\n body = get_invite_body(user, group_dict, role)\n extra_vars = {\n 'site_title': config.get('ckan.site_title')\n }\n subject = render_jinja2('emails/invite_user_subject.txt', extra_vars)\n\n # Make sure we only use the first line\n subject = subject.split('\\n')[0]\n\n mail_user(user, subject, body)\n\n\ndef create_reset_key(user):\n user.reset_key = text_type(make_key())\n model.repo.commit_and_remove()\n\n\ndef make_key():\n return codecs.encode(os.urandom(16), 'hex')\n\n\ndef verify_reset_link(user, key):\n if not key:\n return False\n if not user.reset_key or len(user.reset_key) < 5:\n return False\n return key.strip() == user.reset_key\n", "path": "ckan/lib/mailer.py"}], "after_files": [{"content": "# encoding: utf-8\n\nimport codecs\nimport os\nimport smtplib\nimport socket\nimport logging\nimport uuid\nfrom time import time\nfrom email.mime.text import MIMEText\nfrom email.header import Header\nfrom email import Utils\n\nfrom ckan.common import config\nimport ckan.common\nfrom six import text_type\n\nimport ckan\nimport ckan.model as model\nimport ckan.lib.helpers as h\nfrom ckan.lib.base import render_jinja2\n\nfrom ckan.common import _\n\nlog = logging.getLogger(__name__)\n\n\nclass MailerException(Exception):\n pass\n\n\ndef _mail_recipient(recipient_name, recipient_email,\n sender_name, sender_url, subject,\n body, headers=None):\n\n if not headers:\n headers = {}\n\n mail_from = config.get('smtp.mail_from')\n reply_to = config.get('smtp.reply_to')\n msg = MIMEText(body.encode('utf-8'), 'plain', 'utf-8')\n for k, v in headers.items():\n if k in msg.keys():\n msg.replace_header(k, v)\n else:\n msg.add_header(k, v)\n subject = Header(subject.encode('utf-8'), 'utf-8')\n msg['Subject'] = subject\n msg['From'] = _(\"%s <%s>\") % (sender_name, mail_from)\n recipient = u\"%s <%s>\" % (recipient_name, recipient_email)\n msg['To'] = Header(recipient, 'utf-8')\n msg['Date'] = Utils.formatdate(time())\n msg['X-Mailer'] = \"CKAN %s\" % ckan.__version__\n if reply_to and reply_to != '':\n msg['Reply-to'] = reply_to\n\n # Send the email using Python's smtplib.\n smtp_connection = smtplib.SMTP()\n if 'smtp.test_server' in config:\n # If 'smtp.test_server' is configured we assume we're running tests,\n # and don't use the smtp.server, starttls, user, password etc. options.\n smtp_server = config['smtp.test_server']\n smtp_starttls = False\n smtp_user = None\n smtp_password = None\n else:\n smtp_server = config.get('smtp.server', 'localhost')\n smtp_starttls = ckan.common.asbool(\n config.get('smtp.starttls'))\n smtp_user = config.get('smtp.user')\n smtp_password = config.get('smtp.password')\n\n try:\n smtp_connection.connect(smtp_server)\n except socket.error as e:\n log.exception(e)\n raise MailerException('SMTP server could not be connected to: \"%s\" %s'\n % (smtp_server, e))\n try:\n # Identify ourselves and prompt the server for supported features.\n smtp_connection.ehlo()\n\n # If 'smtp.starttls' is on in CKAN config, try to put the SMTP\n # connection into TLS mode.\n if smtp_starttls:\n if smtp_connection.has_extn('STARTTLS'):\n smtp_connection.starttls()\n # Re-identify ourselves over TLS connection.\n smtp_connection.ehlo()\n else:\n raise MailerException(\"SMTP server does not support STARTTLS\")\n\n # If 'smtp.user' is in CKAN config, try to login to SMTP server.\n if smtp_user:\n assert smtp_password, (\"If smtp.user is configured then \"\n \"smtp.password must be configured as well.\")\n smtp_connection.login(smtp_user, smtp_password)\n\n smtp_connection.sendmail(mail_from, [recipient_email], msg.as_string())\n log.info(\"Sent email to {0}\".format(recipient_email))\n\n except smtplib.SMTPException as e:\n msg = '%r' % e\n log.exception(msg)\n raise MailerException(msg)\n finally:\n smtp_connection.quit()\n\n\ndef mail_recipient(recipient_name, recipient_email, subject,\n body, headers={}):\n site_title = config.get('ckan.site_title')\n site_url = config.get('ckan.site_url')\n return _mail_recipient(recipient_name, recipient_email,\n site_title, site_url, subject, body,\n headers=headers)\n\n\ndef mail_user(recipient, subject, body, headers={}):\n if (recipient.email is None) or not len(recipient.email):\n raise MailerException(_(\"No recipient email address available!\"))\n mail_recipient(recipient.display_name, recipient.email, subject,\n body, headers=headers)\n\n\ndef get_reset_link_body(user):\n extra_vars = {\n 'reset_link': get_reset_link(user),\n 'site_title': config.get('ckan.site_title'),\n 'site_url': config.get('ckan.site_url'),\n 'user_name': user.name,\n }\n # NOTE: This template is translated\n return render_jinja2('emails/reset_password.txt', extra_vars)\n\n\ndef get_invite_body(user, group_dict=None, role=None):\n if group_dict:\n group_type = (_('organization') if group_dict['is_organization']\n else _('group'))\n\n extra_vars = {\n 'reset_link': get_reset_link(user),\n 'site_title': config.get('ckan.site_title'),\n 'site_url': config.get('ckan.site_url'),\n 'user_name': user.name,\n }\n if role:\n extra_vars['role_name'] = h.roles_translated().get(role, _(role))\n if group_dict:\n extra_vars['group_type'] = group_type\n extra_vars['group_title'] = group_dict.get('title')\n\n # NOTE: This template is translated\n return render_jinja2('emails/invite_user.txt', extra_vars)\n\n\ndef get_reset_link(user):\n return h.url_for(controller='user',\n action='perform_reset',\n id=user.id,\n key=user.reset_key,\n qualified=True)\n\n\ndef send_reset_link(user):\n create_reset_key(user)\n body = get_reset_link_body(user)\n extra_vars = {\n 'site_title': config.get('ckan.site_title')\n }\n subject = render_jinja2('emails/reset_password_subject.txt', extra_vars)\n\n # Make sure we only use the first line\n subject = subject.split('\\n')[0]\n\n mail_user(user, subject, body)\n\n\ndef send_invite(user, group_dict=None, role=None):\n create_reset_key(user)\n body = get_invite_body(user, group_dict, role)\n extra_vars = {\n 'site_title': config.get('ckan.site_title')\n }\n subject = render_jinja2('emails/invite_user_subject.txt', extra_vars)\n\n # Make sure we only use the first line\n subject = subject.split('\\n')[0]\n\n mail_user(user, subject, body)\n\n\ndef create_reset_key(user):\n user.reset_key = text_type(make_key())\n model.repo.commit_and_remove()\n\n\ndef make_key():\n return codecs.encode(os.urandom(16), 'hex')\n\n\ndef verify_reset_link(user, key):\n if not key:\n return False\n if not user.reset_key or len(user.reset_key) < 5:\n return False\n return key.strip() == user.reset_key\n", "path": "ckan/lib/mailer.py"}]}
| 2,375 | 215 |
gh_patches_debug_6445
|
rasdani/github-patches
|
git_diff
|
python__peps-765
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Incorrect rending of Martin von Löwis' name in the PEP index
If you look at https://www.python.org/dev/peps/ you will notice that for e.g. PEP 384, Martin's last name is rendered as "22. Löwis". It turns out the template has an `<ol>` tag that is set to start at 22 for part of Martin's last name.
```html
<ol class="first last loweralpha simple" start="22">
<li>Löwis</li>
</ol>
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pep0/pep.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 """Code for handling object representation of a PEP."""
3 from __future__ import absolute_import
4 import re
5 import sys
6 import textwrap
7 import unicodedata
8
9 from email.parser import HeaderParser
10
11 from . import constants
12
13
14 class PEPError(Exception):
15
16 def __init__(self, error, pep_file, pep_number=None):
17 super(PEPError, self).__init__(error)
18 self.filename = pep_file
19 self.number = pep_number
20
21 def __str__(self):
22 error_msg = super(PEPError, self).__str__()
23 if self.number is not None:
24 return "PEP %d: %r" % (self.number, error_msg)
25 else:
26 return "(%s): %r" % (self.filename, error_msg)
27
28
29 class PEPParseError(PEPError):
30
31 pass
32
33
34 class Author(object):
35
36 """Represent PEP authors.
37
38 Attributes:
39
40 + first_last : str
41 The author's full name.
42
43 + last_first : str
44 Output the author's name in Last, First, Suffix order.
45
46 + first : str
47 The author's first name. A middle initial may be included.
48
49 + last : str
50 The author's last name.
51
52 + suffix : str
53 A person's suffix (can be the empty string).
54
55 + sort_by : str
56 Modification of the author's last name that should be used for
57 sorting.
58
59 + email : str
60 The author's email address.
61 """
62
63 def __init__(self, author_and_email_tuple):
64 """Parse the name and email address of an author."""
65 name, email = author_and_email_tuple
66 self.first_last = name.strip()
67 self.email = email.lower()
68 last_name_fragment, suffix = self._last_name(name)
69 name_sep = name.index(last_name_fragment)
70 self.first = name[:name_sep].rstrip()
71 self.last = last_name_fragment
72 self.suffix = suffix
73 if not self.first:
74 self.last_first = self.last
75 else:
76 self.last_first = u', '.join([self.last, self.first])
77 if self.suffix:
78 self.last_first += u', ' + self.suffix
79 if self.last == "van Rossum":
80 # Special case for our beloved BDFL. :)
81 if self.first == "Guido":
82 self.nick = "GvR"
83 elif self.first == "Just":
84 self.nick = "JvR"
85 else:
86 raise ValueError("unknown van Rossum %r!" % self)
87 self.last_first += " (%s)" % (self.nick,)
88 else:
89 self.nick = self.last
90
91 def __hash__(self):
92 return hash(self.first_last)
93
94 def __eq__(self, other):
95 return self.first_last == other.first_last
96
97 @property
98 def sort_by(self):
99 name_parts = self.last.split()
100 for index, part in enumerate(name_parts):
101 if part[0].isupper():
102 base = u' '.join(name_parts[index:]).lower()
103 break
104 else:
105 # If no capitals, use the whole string
106 base = self.last.lower()
107 return unicodedata.normalize('NFKD', base).encode('ASCII', 'ignore')
108
109 def _last_name(self, full_name):
110 """Find the last name (or nickname) of a full name.
111
112 If no last name (e.g, 'Aahz') then return the full name. If there is
113 a leading, lowercase portion to the last name (e.g., 'van' or 'von')
114 then include it. If there is a suffix (e.g., 'Jr.') that is appended
115 through a comma, then drop the suffix.
116
117 """
118 name_partition = full_name.partition(u',')
119 no_suffix = name_partition[0].strip()
120 suffix = name_partition[2].strip()
121 name_parts = no_suffix.split()
122 part_count = len(name_parts)
123 if part_count == 1 or part_count == 2:
124 return name_parts[-1], suffix
125 else:
126 assert part_count > 2
127 if name_parts[-2].islower():
128 return u' '.join(name_parts[-2:]), suffix
129 else:
130 return name_parts[-1], suffix
131
132
133 class PEP(object):
134
135 """Representation of PEPs.
136
137 Attributes:
138
139 + number : int
140 PEP number.
141
142 + title : str
143 PEP title.
144
145 + type_ : str
146 The type of PEP. Can only be one of the values from
147 PEP.type_values.
148
149 + status : str
150 The PEP's status. Value must be found in PEP.status_values.
151
152 + authors : Sequence(Author)
153 A list of the authors.
154 """
155
156 # The various RFC 822 headers that are supported.
157 # The second item in the nested tuples represents if the header is
158 # required or not.
159 headers = (('PEP', True), ('Title', True), ('Version', False),
160 ('Last-Modified', False), ('Author', True),
161 ('BDFL-Delegate', False),
162 ('Discussions-To', False), ('Status', True), ('Type', True),
163 ('Content-Type', False), ('Requires', False),
164 ('Created', True), ('Python-Version', False),
165 ('Post-History', False), ('Replaces', False),
166 ('Superseded-By', False), ('Resolution', False),
167 )
168 # Valid values for the Type header.
169 type_values = (u"Standards Track", u"Informational", u"Process")
170 # Valid values for the Status header.
171 # Active PEPs can only be for Informational or Process PEPs.
172 status_values = (u"Accepted", u"Provisional",
173 u"Rejected", u"Withdrawn", u"Deferred",
174 u"Final", u"Active", u"Draft", u"Superseded")
175
176 def __init__(self, pep_file):
177 """Init object from an open PEP file object."""
178 # Parse the headers.
179 self.filename = pep_file
180 pep_parser = HeaderParser()
181 metadata = pep_parser.parse(pep_file)
182 header_order = iter(self.headers)
183 try:
184 for header_name in metadata.keys():
185 current_header, required = next(header_order)
186 while header_name != current_header and not required:
187 current_header, required = next(header_order)
188 if header_name != current_header:
189 raise PEPError("did not deal with "
190 "%r before having to handle %r" %
191 (header_name, current_header),
192 pep_file.name)
193 except StopIteration:
194 raise PEPError("headers missing or out of order",
195 pep_file.name)
196 required = False
197 try:
198 while not required:
199 current_header, required = next(header_order)
200 else:
201 raise PEPError("PEP is missing its %r" % (current_header,),
202 pep_file.name)
203 except StopIteration:
204 pass
205 # 'PEP'.
206 try:
207 self.number = int(metadata['PEP'])
208 except ValueError:
209 raise PEPParseError("PEP number isn't an integer", pep_file.name)
210 # 'Title'.
211 self.title = metadata['Title']
212 # 'Type'.
213 type_ = metadata['Type']
214 if type_ not in self.type_values:
215 raise PEPError('%r is not a valid Type value' % (type_,),
216 pep_file.name, self.number)
217 self.type_ = type_
218 # 'Status'.
219 status = metadata['Status']
220 if status not in self.status_values:
221 if status == "April Fool!":
222 # See PEP 401 :)
223 status = "Rejected"
224 else:
225 raise PEPError("%r is not a valid Status value" %
226 (status,), pep_file.name, self.number)
227 # Special case for Active PEPs.
228 if (status == u"Active" and
229 self.type_ not in ("Process", "Informational")):
230 raise PEPError("Only Process and Informational PEPs may "
231 "have an Active status", pep_file.name,
232 self.number)
233 # Special case for Provisional PEPs.
234 if (status == u"Provisional" and self.type_ != "Standards Track"):
235 raise PEPError("Only Standards Track PEPs may "
236 "have a Provisional status", pep_file.name,
237 self.number)
238 self.status = status
239 # 'Author'.
240 authors_and_emails = self._parse_author(metadata['Author'])
241 if len(authors_and_emails) < 1:
242 raise PEPError("no authors found", pep_file.name,
243 self.number)
244 self.authors = list(map(Author, authors_and_emails))
245
246 def _parse_author(self, data):
247 """Return a list of author names and emails."""
248 # XXX Consider using email.utils.parseaddr (doesn't work with names
249 # lacking an email address.
250 angled = constants.text_type(r'(?P<author>.+?) <(?P<email>.+?)>')
251 paren = constants.text_type(r'(?P<email>.+?) \((?P<author>.+?)\)')
252 simple = constants.text_type(r'(?P<author>[^,]+)')
253 author_list = []
254 for regex in (angled, paren, simple):
255 # Watch out for commas separating multiple names.
256 regex += u'(,\s*)?'
257 for match in re.finditer(regex, data):
258 # Watch out for suffixes like 'Jr.' when they are comma-separated
259 # from the name and thus cause issues when *all* names are only
260 # separated by commas.
261 match_dict = match.groupdict()
262 author = match_dict['author']
263 if not author.partition(' ')[1] and author.endswith('.'):
264 prev_author = author_list.pop()
265 author = ', '.join([prev_author, author])
266 if u'email' not in match_dict:
267 email = ''
268 else:
269 email = match_dict['email']
270 author_list.append((author, email))
271 else:
272 # If authors were found then stop searching as only expect one
273 # style of author citation.
274 if author_list:
275 break
276 return author_list
277
278 @property
279 def type_abbr(self):
280 """Return the how the type is to be represented in the index."""
281 return self.type_[0].upper()
282
283 @property
284 def status_abbr(self):
285 """Return how the status should be represented in the index."""
286 if self.status in ('Draft', 'Active'):
287 return u' '
288 else:
289 return self.status[0].upper()
290
291 @property
292 def author_abbr(self):
293 """Return the author list as a comma-separated with only last names."""
294 return u', '.join(x.nick for x in self.authors)
295
296 @property
297 def title_abbr(self):
298 """Shorten the title to be no longer than the max title length."""
299 if len(self.title) <= constants.title_length:
300 return self.title
301 wrapped_title = textwrap.wrap(self.title, constants.title_length - 4)
302 return wrapped_title[0] + u' ...'
303
304 def __unicode__(self):
305 """Return the line entry for the PEP."""
306 pep_info = {'type': self.type_abbr, 'number': str(self.number),
307 'title': self.title_abbr, 'status': self.status_abbr,
308 'authors': self.author_abbr}
309 return constants.column_format % pep_info
310
311 if sys.version_info[0] > 2:
312 __str__ = __unicode__
313
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pep0/pep.py b/pep0/pep.py
--- a/pep0/pep.py
+++ b/pep0/pep.py
@@ -69,6 +69,9 @@
name_sep = name.index(last_name_fragment)
self.first = name[:name_sep].rstrip()
self.last = last_name_fragment
+ if self.last[1] == u'.':
+ # Add an escape to avoid docutils turning `v.` into `22.`.
+ self.last = u'\\' + self.last
self.suffix = suffix
if not self.first:
self.last_first = self.last
|
{"golden_diff": "diff --git a/pep0/pep.py b/pep0/pep.py\n--- a/pep0/pep.py\n+++ b/pep0/pep.py\n@@ -69,6 +69,9 @@\n name_sep = name.index(last_name_fragment)\n self.first = name[:name_sep].rstrip()\n self.last = last_name_fragment\n+ if self.last[1] == u'.':\n+ # Add an escape to avoid docutils turning `v.` into `22.`.\n+ self.last = u'\\\\' + self.last\n self.suffix = suffix\n if not self.first:\n self.last_first = self.last\n", "issue": "Incorrect rending of Martin von L\u00f6wis' name in the PEP index\nIf you look at https://www.python.org/dev/peps/ you will notice that for e.g. PEP 384, Martin's last name is rendered as \"22. L\u00f6wis\". It turns out the template has an `<ol>` tag that is set to start at 22 for part of Martin's last name.\r\n```html\r\n<ol class=\"first last loweralpha simple\" start=\"22\">\r\n<li>L\u00f6wis</li>\r\n</ol>\r\n```\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"Code for handling object representation of a PEP.\"\"\"\nfrom __future__ import absolute_import\nimport re\nimport sys\nimport textwrap\nimport unicodedata\n\nfrom email.parser import HeaderParser\n\nfrom . import constants\n\n\nclass PEPError(Exception):\n\n def __init__(self, error, pep_file, pep_number=None):\n super(PEPError, self).__init__(error)\n self.filename = pep_file\n self.number = pep_number\n\n def __str__(self):\n error_msg = super(PEPError, self).__str__()\n if self.number is not None:\n return \"PEP %d: %r\" % (self.number, error_msg)\n else:\n return \"(%s): %r\" % (self.filename, error_msg)\n\n\nclass PEPParseError(PEPError):\n\n pass\n\n\nclass Author(object):\n\n \"\"\"Represent PEP authors.\n\n Attributes:\n\n + first_last : str\n The author's full name.\n\n + last_first : str\n Output the author's name in Last, First, Suffix order.\n\n + first : str\n The author's first name. A middle initial may be included.\n\n + last : str\n The author's last name.\n\n + suffix : str\n A person's suffix (can be the empty string).\n\n + sort_by : str\n Modification of the author's last name that should be used for\n sorting.\n\n + email : str\n The author's email address.\n \"\"\"\n\n def __init__(self, author_and_email_tuple):\n \"\"\"Parse the name and email address of an author.\"\"\"\n name, email = author_and_email_tuple\n self.first_last = name.strip()\n self.email = email.lower()\n last_name_fragment, suffix = self._last_name(name)\n name_sep = name.index(last_name_fragment)\n self.first = name[:name_sep].rstrip()\n self.last = last_name_fragment\n self.suffix = suffix\n if not self.first:\n self.last_first = self.last\n else:\n self.last_first = u', '.join([self.last, self.first])\n if self.suffix:\n self.last_first += u', ' + self.suffix\n if self.last == \"van Rossum\":\n # Special case for our beloved BDFL. :)\n if self.first == \"Guido\":\n self.nick = \"GvR\"\n elif self.first == \"Just\":\n self.nick = \"JvR\"\n else:\n raise ValueError(\"unknown van Rossum %r!\" % self)\n self.last_first += \" (%s)\" % (self.nick,)\n else:\n self.nick = self.last\n\n def __hash__(self):\n return hash(self.first_last)\n\n def __eq__(self, other):\n return self.first_last == other.first_last\n\n @property\n def sort_by(self):\n name_parts = self.last.split()\n for index, part in enumerate(name_parts):\n if part[0].isupper():\n base = u' '.join(name_parts[index:]).lower()\n break\n else:\n # If no capitals, use the whole string\n base = self.last.lower()\n return unicodedata.normalize('NFKD', base).encode('ASCII', 'ignore')\n\n def _last_name(self, full_name):\n \"\"\"Find the last name (or nickname) of a full name.\n\n If no last name (e.g, 'Aahz') then return the full name. If there is\n a leading, lowercase portion to the last name (e.g., 'van' or 'von')\n then include it. If there is a suffix (e.g., 'Jr.') that is appended\n through a comma, then drop the suffix.\n\n \"\"\"\n name_partition = full_name.partition(u',')\n no_suffix = name_partition[0].strip()\n suffix = name_partition[2].strip()\n name_parts = no_suffix.split()\n part_count = len(name_parts)\n if part_count == 1 or part_count == 2:\n return name_parts[-1], suffix\n else:\n assert part_count > 2\n if name_parts[-2].islower():\n return u' '.join(name_parts[-2:]), suffix\n else:\n return name_parts[-1], suffix\n\n\nclass PEP(object):\n\n \"\"\"Representation of PEPs.\n\n Attributes:\n\n + number : int\n PEP number.\n\n + title : str\n PEP title.\n\n + type_ : str\n The type of PEP. Can only be one of the values from\n PEP.type_values.\n\n + status : str\n The PEP's status. Value must be found in PEP.status_values.\n\n + authors : Sequence(Author)\n A list of the authors.\n \"\"\"\n\n # The various RFC 822 headers that are supported.\n # The second item in the nested tuples represents if the header is\n # required or not.\n headers = (('PEP', True), ('Title', True), ('Version', False),\n ('Last-Modified', False), ('Author', True),\n ('BDFL-Delegate', False),\n ('Discussions-To', False), ('Status', True), ('Type', True),\n ('Content-Type', False), ('Requires', False),\n ('Created', True), ('Python-Version', False),\n ('Post-History', False), ('Replaces', False),\n ('Superseded-By', False), ('Resolution', False),\n )\n # Valid values for the Type header.\n type_values = (u\"Standards Track\", u\"Informational\", u\"Process\")\n # Valid values for the Status header.\n # Active PEPs can only be for Informational or Process PEPs.\n status_values = (u\"Accepted\", u\"Provisional\",\n u\"Rejected\", u\"Withdrawn\", u\"Deferred\",\n u\"Final\", u\"Active\", u\"Draft\", u\"Superseded\")\n\n def __init__(self, pep_file):\n \"\"\"Init object from an open PEP file object.\"\"\"\n # Parse the headers.\n self.filename = pep_file\n pep_parser = HeaderParser()\n metadata = pep_parser.parse(pep_file)\n header_order = iter(self.headers)\n try:\n for header_name in metadata.keys():\n current_header, required = next(header_order)\n while header_name != current_header and not required:\n current_header, required = next(header_order)\n if header_name != current_header:\n raise PEPError(\"did not deal with \"\n \"%r before having to handle %r\" %\n (header_name, current_header),\n pep_file.name)\n except StopIteration:\n raise PEPError(\"headers missing or out of order\",\n pep_file.name)\n required = False\n try:\n while not required:\n current_header, required = next(header_order)\n else:\n raise PEPError(\"PEP is missing its %r\" % (current_header,),\n pep_file.name)\n except StopIteration:\n pass\n # 'PEP'.\n try:\n self.number = int(metadata['PEP'])\n except ValueError:\n raise PEPParseError(\"PEP number isn't an integer\", pep_file.name)\n # 'Title'.\n self.title = metadata['Title']\n # 'Type'.\n type_ = metadata['Type']\n if type_ not in self.type_values:\n raise PEPError('%r is not a valid Type value' % (type_,),\n pep_file.name, self.number)\n self.type_ = type_\n # 'Status'.\n status = metadata['Status']\n if status not in self.status_values:\n if status == \"April Fool!\":\n # See PEP 401 :)\n status = \"Rejected\"\n else:\n raise PEPError(\"%r is not a valid Status value\" %\n (status,), pep_file.name, self.number)\n # Special case for Active PEPs.\n if (status == u\"Active\" and\n self.type_ not in (\"Process\", \"Informational\")):\n raise PEPError(\"Only Process and Informational PEPs may \"\n \"have an Active status\", pep_file.name,\n self.number)\n # Special case for Provisional PEPs.\n if (status == u\"Provisional\" and self.type_ != \"Standards Track\"):\n raise PEPError(\"Only Standards Track PEPs may \"\n \"have a Provisional status\", pep_file.name,\n self.number)\n self.status = status\n # 'Author'.\n authors_and_emails = self._parse_author(metadata['Author'])\n if len(authors_and_emails) < 1:\n raise PEPError(\"no authors found\", pep_file.name,\n self.number)\n self.authors = list(map(Author, authors_and_emails))\n\n def _parse_author(self, data):\n \"\"\"Return a list of author names and emails.\"\"\"\n # XXX Consider using email.utils.parseaddr (doesn't work with names\n # lacking an email address.\n angled = constants.text_type(r'(?P<author>.+?) <(?P<email>.+?)>')\n paren = constants.text_type(r'(?P<email>.+?) \\((?P<author>.+?)\\)')\n simple = constants.text_type(r'(?P<author>[^,]+)')\n author_list = []\n for regex in (angled, paren, simple):\n # Watch out for commas separating multiple names.\n regex += u'(,\\s*)?'\n for match in re.finditer(regex, data):\n # Watch out for suffixes like 'Jr.' when they are comma-separated\n # from the name and thus cause issues when *all* names are only\n # separated by commas.\n match_dict = match.groupdict()\n author = match_dict['author']\n if not author.partition(' ')[1] and author.endswith('.'):\n prev_author = author_list.pop()\n author = ', '.join([prev_author, author])\n if u'email' not in match_dict:\n email = ''\n else:\n email = match_dict['email']\n author_list.append((author, email))\n else:\n # If authors were found then stop searching as only expect one\n # style of author citation.\n if author_list:\n break\n return author_list\n\n @property\n def type_abbr(self):\n \"\"\"Return the how the type is to be represented in the index.\"\"\"\n return self.type_[0].upper()\n\n @property\n def status_abbr(self):\n \"\"\"Return how the status should be represented in the index.\"\"\"\n if self.status in ('Draft', 'Active'):\n return u' '\n else:\n return self.status[0].upper()\n\n @property\n def author_abbr(self):\n \"\"\"Return the author list as a comma-separated with only last names.\"\"\"\n return u', '.join(x.nick for x in self.authors)\n\n @property\n def title_abbr(self):\n \"\"\"Shorten the title to be no longer than the max title length.\"\"\"\n if len(self.title) <= constants.title_length:\n return self.title\n wrapped_title = textwrap.wrap(self.title, constants.title_length - 4)\n return wrapped_title[0] + u' ...'\n\n def __unicode__(self):\n \"\"\"Return the line entry for the PEP.\"\"\"\n pep_info = {'type': self.type_abbr, 'number': str(self.number),\n 'title': self.title_abbr, 'status': self.status_abbr,\n 'authors': self.author_abbr}\n return constants.column_format % pep_info\n\n if sys.version_info[0] > 2:\n __str__ = __unicode__\n", "path": "pep0/pep.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"Code for handling object representation of a PEP.\"\"\"\nfrom __future__ import absolute_import\nimport re\nimport sys\nimport textwrap\nimport unicodedata\n\nfrom email.parser import HeaderParser\n\nfrom . import constants\n\n\nclass PEPError(Exception):\n\n def __init__(self, error, pep_file, pep_number=None):\n super(PEPError, self).__init__(error)\n self.filename = pep_file\n self.number = pep_number\n\n def __str__(self):\n error_msg = super(PEPError, self).__str__()\n if self.number is not None:\n return \"PEP %d: %r\" % (self.number, error_msg)\n else:\n return \"(%s): %r\" % (self.filename, error_msg)\n\n\nclass PEPParseError(PEPError):\n\n pass\n\n\nclass Author(object):\n\n \"\"\"Represent PEP authors.\n\n Attributes:\n\n + first_last : str\n The author's full name.\n\n + last_first : str\n Output the author's name in Last, First, Suffix order.\n\n + first : str\n The author's first name. A middle initial may be included.\n\n + last : str\n The author's last name.\n\n + suffix : str\n A person's suffix (can be the empty string).\n\n + sort_by : str\n Modification of the author's last name that should be used for\n sorting.\n\n + email : str\n The author's email address.\n \"\"\"\n\n def __init__(self, author_and_email_tuple):\n \"\"\"Parse the name and email address of an author.\"\"\"\n name, email = author_and_email_tuple\n self.first_last = name.strip()\n self.email = email.lower()\n last_name_fragment, suffix = self._last_name(name)\n name_sep = name.index(last_name_fragment)\n self.first = name[:name_sep].rstrip()\n self.last = last_name_fragment\n if self.last[1] == u'.':\n # Add an escape to avoid docutils turning `v.` into `22.`.\n self.last = u'\\\\' + self.last\n self.suffix = suffix\n if not self.first:\n self.last_first = self.last\n else:\n self.last_first = u', '.join([self.last, self.first])\n if self.suffix:\n self.last_first += u', ' + self.suffix\n if self.last == \"van Rossum\":\n # Special case for our beloved BDFL. :)\n if self.first == \"Guido\":\n self.nick = \"GvR\"\n elif self.first == \"Just\":\n self.nick = \"JvR\"\n else:\n raise ValueError(\"unknown van Rossum %r!\" % self)\n self.last_first += \" (%s)\" % (self.nick,)\n else:\n self.nick = self.last\n\n def __hash__(self):\n return hash(self.first_last)\n\n def __eq__(self, other):\n return self.first_last == other.first_last\n\n @property\n def sort_by(self):\n name_parts = self.last.split()\n for index, part in enumerate(name_parts):\n if part[0].isupper():\n base = u' '.join(name_parts[index:]).lower()\n break\n else:\n # If no capitals, use the whole string\n base = self.last.lower()\n return unicodedata.normalize('NFKD', base).encode('ASCII', 'ignore')\n\n def _last_name(self, full_name):\n \"\"\"Find the last name (or nickname) of a full name.\n\n If no last name (e.g, 'Aahz') then return the full name. If there is\n a leading, lowercase portion to the last name (e.g., 'van' or 'von')\n then include it. If there is a suffix (e.g., 'Jr.') that is appended\n through a comma, then drop the suffix.\n\n \"\"\"\n name_partition = full_name.partition(u',')\n no_suffix = name_partition[0].strip()\n suffix = name_partition[2].strip()\n name_parts = no_suffix.split()\n part_count = len(name_parts)\n if part_count == 1 or part_count == 2:\n return name_parts[-1], suffix\n else:\n assert part_count > 2\n if name_parts[-2].islower():\n return u' '.join(name_parts[-2:]), suffix\n else:\n return name_parts[-1], suffix\n\n\nclass PEP(object):\n\n \"\"\"Representation of PEPs.\n\n Attributes:\n\n + number : int\n PEP number.\n\n + title : str\n PEP title.\n\n + type_ : str\n The type of PEP. Can only be one of the values from\n PEP.type_values.\n\n + status : str\n The PEP's status. Value must be found in PEP.status_values.\n\n + authors : Sequence(Author)\n A list of the authors.\n \"\"\"\n\n # The various RFC 822 headers that are supported.\n # The second item in the nested tuples represents if the header is\n # required or not.\n headers = (('PEP', True), ('Title', True), ('Version', False),\n ('Last-Modified', False), ('Author', True),\n ('BDFL-Delegate', False),\n ('Discussions-To', False), ('Status', True), ('Type', True),\n ('Content-Type', False), ('Requires', False),\n ('Created', True), ('Python-Version', False),\n ('Post-History', False), ('Replaces', False),\n ('Superseded-By', False), ('Resolution', False),\n )\n # Valid values for the Type header.\n type_values = (u\"Standards Track\", u\"Informational\", u\"Process\")\n # Valid values for the Status header.\n # Active PEPs can only be for Informational or Process PEPs.\n status_values = (u\"Accepted\", u\"Provisional\",\n u\"Rejected\", u\"Withdrawn\", u\"Deferred\",\n u\"Final\", u\"Active\", u\"Draft\", u\"Superseded\")\n\n def __init__(self, pep_file):\n \"\"\"Init object from an open PEP file object.\"\"\"\n # Parse the headers.\n self.filename = pep_file\n pep_parser = HeaderParser()\n metadata = pep_parser.parse(pep_file)\n header_order = iter(self.headers)\n try:\n for header_name in metadata.keys():\n current_header, required = next(header_order)\n while header_name != current_header and not required:\n current_header, required = next(header_order)\n if header_name != current_header:\n raise PEPError(\"did not deal with \"\n \"%r before having to handle %r\" %\n (header_name, current_header),\n pep_file.name)\n except StopIteration:\n raise PEPError(\"headers missing or out of order\",\n pep_file.name)\n required = False\n try:\n while not required:\n current_header, required = next(header_order)\n else:\n raise PEPError(\"PEP is missing its %r\" % (current_header,),\n pep_file.name)\n except StopIteration:\n pass\n # 'PEP'.\n try:\n self.number = int(metadata['PEP'])\n except ValueError:\n raise PEPParseError(\"PEP number isn't an integer\", pep_file.name)\n # 'Title'.\n self.title = metadata['Title']\n # 'Type'.\n type_ = metadata['Type']\n if type_ not in self.type_values:\n raise PEPError('%r is not a valid Type value' % (type_,),\n pep_file.name, self.number)\n self.type_ = type_\n # 'Status'.\n status = metadata['Status']\n if status not in self.status_values:\n if status == \"April Fool!\":\n # See PEP 401 :)\n status = \"Rejected\"\n else:\n raise PEPError(\"%r is not a valid Status value\" %\n (status,), pep_file.name, self.number)\n # Special case for Active PEPs.\n if (status == u\"Active\" and\n self.type_ not in (\"Process\", \"Informational\")):\n raise PEPError(\"Only Process and Informational PEPs may \"\n \"have an Active status\", pep_file.name,\n self.number)\n # Special case for Provisional PEPs.\n if (status == u\"Provisional\" and self.type_ != \"Standards Track\"):\n raise PEPError(\"Only Standards Track PEPs may \"\n \"have a Provisional status\", pep_file.name,\n self.number)\n self.status = status\n # 'Author'.\n authors_and_emails = self._parse_author(metadata['Author'])\n if len(authors_and_emails) < 1:\n raise PEPError(\"no authors found\", pep_file.name,\n self.number)\n self.authors = list(map(Author, authors_and_emails))\n\n def _parse_author(self, data):\n \"\"\"Return a list of author names and emails.\"\"\"\n # XXX Consider using email.utils.parseaddr (doesn't work with names\n # lacking an email address.\n angled = constants.text_type(r'(?P<author>.+?) <(?P<email>.+?)>')\n paren = constants.text_type(r'(?P<email>.+?) \\((?P<author>.+?)\\)')\n simple = constants.text_type(r'(?P<author>[^,]+)')\n author_list = []\n for regex in (angled, paren, simple):\n # Watch out for commas separating multiple names.\n regex += u'(,\\s*)?'\n for match in re.finditer(regex, data):\n # Watch out for suffixes like 'Jr.' when they are comma-separated\n # from the name and thus cause issues when *all* names are only\n # separated by commas.\n match_dict = match.groupdict()\n author = match_dict['author']\n if not author.partition(' ')[1] and author.endswith('.'):\n prev_author = author_list.pop()\n author = ', '.join([prev_author, author])\n if u'email' not in match_dict:\n email = ''\n else:\n email = match_dict['email']\n author_list.append((author, email))\n else:\n # If authors were found then stop searching as only expect one\n # style of author citation.\n if author_list:\n break\n return author_list\n\n @property\n def type_abbr(self):\n \"\"\"Return the how the type is to be represented in the index.\"\"\"\n return self.type_[0].upper()\n\n @property\n def status_abbr(self):\n \"\"\"Return how the status should be represented in the index.\"\"\"\n if self.status in ('Draft', 'Active'):\n return u' '\n else:\n return self.status[0].upper()\n\n @property\n def author_abbr(self):\n \"\"\"Return the author list as a comma-separated with only last names.\"\"\"\n return u', '.join(x.nick for x in self.authors)\n\n @property\n def title_abbr(self):\n \"\"\"Shorten the title to be no longer than the max title length.\"\"\"\n if len(self.title) <= constants.title_length:\n return self.title\n wrapped_title = textwrap.wrap(self.title, constants.title_length - 4)\n return wrapped_title[0] + u' ...'\n\n def __unicode__(self):\n \"\"\"Return the line entry for the PEP.\"\"\"\n pep_info = {'type': self.type_abbr, 'number': str(self.number),\n 'title': self.title_abbr, 'status': self.status_abbr,\n 'authors': self.author_abbr}\n return constants.column_format % pep_info\n\n if sys.version_info[0] > 2:\n __str__ = __unicode__\n", "path": "pep0/pep.py"}]}
| 3,761 | 152 |
gh_patches_debug_15109
|
rasdani/github-patches
|
git_diff
|
mne-tools__mne-bids-1112
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Cut 0.12 release
In preparation for the MNE-Python 1.3 release alongside with new standalone installers that will also feature the M E-BIDS-Pipeline, we should soon make a new release of MNE-BIDS (within the next two weeks or so?)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mne_bids/__init__.py`
Content:
```
1 """MNE software for easily interacting with BIDS compatible datasets."""
2
3 __version__ = '0.12.dev0'
4 from mne_bids import commands
5 from mne_bids.report import make_report
6 from mne_bids.path import (BIDSPath, get_datatypes, get_entity_vals,
7 print_dir_tree, get_entities_from_fname,
8 search_folder_for_text, get_bids_path_from_fname,
9 find_matching_paths)
10 from mne_bids.read import get_head_mri_trans, read_raw_bids
11 from mne_bids.utils import get_anonymization_daysback
12 from mne_bids.write import (make_dataset_description, write_anat,
13 write_raw_bids, mark_channels,
14 write_meg_calibration, write_meg_crosstalk,
15 get_anat_landmarks, anonymize_dataset)
16 from mne_bids.sidecar_updates import update_sidecar_json, update_anat_landmarks
17 from mne_bids.inspect import inspect_dataset
18 from mne_bids.dig import (template_to_head, convert_montage_to_ras,
19 convert_montage_to_mri)
20
```
Path: `doc/conf.py`
Content:
```
1 """Configure details for documentation with sphinx."""
2 import os
3 import sys
4 from datetime import date
5
6 import sphinx_gallery # noqa: F401
7 from sphinx_gallery.sorting import ExampleTitleSortKey
8
9 import mne_bids
10
11
12 # If extensions (or modules to document with autodoc) are in another directory,
13 # add these directories to sys.path here. If the directory is relative to the
14 # documentation root, use os.path.abspath to make it absolute, like shown here.
15 curdir = os.path.dirname(__file__)
16 sys.path.append(os.path.abspath(os.path.join(curdir, '..', 'mne_bids')))
17 sys.path.append(os.path.abspath(os.path.join(curdir, 'sphinxext')))
18
19
20 # -- General configuration ------------------------------------------------
21
22 # If your documentation needs a minimal Sphinx version, state it here.
23 #
24 # needs_sphinx = '1.0'
25
26 # Add any Sphinx extension module names here, as strings. They can be
27 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
28 # ones.
29 extensions = [
30 'sphinx.ext.githubpages',
31 'sphinx.ext.autodoc',
32 'sphinx.ext.mathjax',
33 'sphinx.ext.viewcode',
34 'sphinx.ext.autosummary',
35 'sphinx.ext.doctest',
36 'sphinx.ext.intersphinx',
37 'sphinx_gallery.gen_gallery',
38 'numpydoc',
39 'sphinx_copybutton',
40 'gen_cli', # custom extension, see ./sphinxext/gen_cli.py
41 'gh_substitutions', # custom extension, see ./sphinxext/gh_substitutions.py
42 ]
43
44 # configure sphinx-copybutton
45 copybutton_prompt_text = r">>> |\.\.\. |\$ "
46 copybutton_prompt_is_regexp = True
47
48 # configure numpydoc
49 numpydoc_xref_param_type = True
50 numpydoc_class_members_toctree = False
51 numpydoc_attributes_as_param_list = True
52 numpydoc_xref_aliases = {
53 'BIDSPath': ':class:`BIDSPath <mne_bids.BIDSPath>`',
54 'path-like': ':term:`path-like <mne:path-like>`',
55 'array-like': ':term:`array_like <numpy:array_like>`',
56 'int': ':class:`int <python:int>`',
57 'bool': ':class:`bool <python:bool>`',
58 'float': ':class:`float <python:float>`',
59 'list': ':class:`list <python:list>`',
60 'tuple': ':class:`tuple <python:tuple>`',
61 'NibabelImageObject': 'nibabel.spatialimages.SpatialImage',
62 }
63 numpydoc_xref_ignore = {
64 # words
65 'instance', 'instances', 'of'
66 }
67
68
69 # generate autosummary even if no references
70 autosummary_generate = True
71 autodoc_default_options = {'inherited-members': None}
72 default_role = 'autolink' # XXX silently allows bad syntax, someone should fix
73
74 # configure linkcheck
75 # https://sphinx-doc.org/en/master/usage/configuration.html?#options-for-the-linkcheck-builder
76 linkcheck_retries = 2
77 linkcheck_rate_limit_timeout = 15.0
78 linkcheck_ignore = [
79 r'https://www.researchgate.net/profile/.*',
80 ]
81
82 # The suffix(es) of source filenames.
83 # You can specify multiple suffix as a list of string:
84 #
85 # source_suffix = ['.rst', '.md']
86 source_suffix = '.rst'
87
88 # The master toctree document.
89 master_doc = 'index'
90
91 # General information about the project.
92 project = u'MNE-BIDS'
93 td = date.today()
94 copyright = u'2017-%s, MNE Developers. Last updated on %s' % (td.year,
95 td.isoformat())
96
97 author = u'MNE Developers'
98
99 # The version info for the project you're documenting, acts as replacement for
100 # |version| and |release|, also used in various other places throughout the
101 # built documents.
102 #
103 # The short X.Y version.
104 version = mne_bids.__version__
105 # The full version, including alpha/beta/rc tags.
106 release = version
107
108 # List of patterns, relative to source directory, that match files and
109 # directories to ignore when looking for source files.
110 # This patterns also effect to html_static_path and html_extra_path
111 exclude_patterns = ['auto_examples/index.rst', '_build', 'Thumbs.db',
112 '.DS_Store']
113
114 # HTML options (e.g., theme)
115 html_show_sourcelink = False
116 html_copy_source = False
117
118 html_theme = 'pydata_sphinx_theme'
119
120 # Add any paths that contain templates here, relative to this directory.
121 templates_path = ['_templates']
122 html_static_path = ['_static']
123 html_css_files = ['style.css']
124
125 # Theme options are theme-specific and customize the look and feel of a theme
126 # further. For a list of options available for each theme, see the
127 # documentation.
128 html_theme_options = {
129 'icon_links': [
130 dict(name='GitHub',
131 url='https://github.com/mne-tools/mne-bids',
132 icon='fab fa-github-square'),
133 dict(name='Discourse',
134 url='https://mne.discourse.group/tags/mne-bids',
135 icon='fab fa-discourse'),
136 ],
137 'icon_links_label': 'Quick Links', # for screen reader
138 'use_edit_page_button': False,
139 'navigation_with_keys': False,
140 'show_toc_level': 1,
141 'navbar_end': ['version-switcher', 'navbar-icon-links'],
142 }
143
144 html_context = {
145 'versions_dropdown': {
146 'dev': 'v0.12 (devel)',
147 'stable': 'v0.11 (stable)',
148 'v0.10': 'v0.10',
149 'v0.9': 'v0.9',
150 'v0.8': 'v0.8',
151 'v0.7': 'v0.7',
152 'v0.6': 'v0.6',
153 'v0.5': 'v0.5',
154 'v0.4': 'v0.4',
155 'v0.3': 'v0.3',
156 'v0.2': 'v0.2',
157 'v0.1': 'v0.1',
158 },
159 }
160
161 html_sidebars = {}
162
163 # Example configuration for intersphinx: refer to the Python standard library.
164 intersphinx_mapping = {
165 'python': ('https://docs.python.org/3', None),
166 'mne': ('https://mne.tools/dev', None),
167 'numpy': ('https://numpy.org/devdocs', None),
168 'scipy': ('https://scipy.github.io/devdocs', None),
169 'matplotlib': ('https://matplotlib.org', None),
170 'nilearn': ('http://nilearn.github.io/stable', None),
171 'pandas': ('https://pandas.pydata.org/pandas-docs/dev', None),
172 'nibabel': ('https://nipy.org/nibabel', None),
173 }
174 intersphinx_timeout = 5
175
176 # Resolve binder filepath_prefix. From the docs:
177 # "A prefix to append to the filepath in the Binder links. You should use this
178 # if you will store your built documentation in a sub-folder of a repository,
179 # instead of in the root."
180 # we will store dev docs in a `dev` subdirectory and all other docs in a
181 # directory "v" + version_str. E.g., "v0.3"
182 if 'dev' in version:
183 filepath_prefix = 'dev'
184 else:
185 filepath_prefix = 'v{}'.format(version)
186
187 sphinx_gallery_conf = {
188 'doc_module': 'mne_bids',
189 'reference_url': {
190 'mne_bids': None,
191 },
192 'backreferences_dir': 'generated',
193 'examples_dirs': '../examples',
194 'within_subsection_order': ExampleTitleSortKey,
195 'gallery_dirs': 'auto_examples',
196 'filename_pattern': '^((?!sgskip).)*$',
197 'binder': {
198 # Required keys
199 'org': 'mne-tools',
200 'repo': 'mne-bids',
201 'branch': 'gh-pages', # noqa: E501 Can be any branch, tag, or commit hash. Use a branch that hosts your docs.
202 'binderhub_url': 'https://mybinder.org', # noqa: E501 Any URL of a binderhub deployment. Must be full URL (e.g. https://mybinder.org).
203 'filepath_prefix': filepath_prefix, # noqa: E501 A prefix to prepend to any filepaths in Binder links.
204 'dependencies': [
205 '../test_requirements.txt',
206 './requirements.txt',
207 ],
208 }
209 }
210
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/doc/conf.py b/doc/conf.py
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -143,8 +143,9 @@
html_context = {
'versions_dropdown': {
- 'dev': 'v0.12 (devel)',
- 'stable': 'v0.11 (stable)',
+ 'dev': 'v0.13 (devel)',
+ 'stable': 'v0.12 (stable)',
+ 'v0.11': 'v0.11',
'v0.10': 'v0.10',
'v0.9': 'v0.9',
'v0.8': 'v0.8',
diff --git a/mne_bids/__init__.py b/mne_bids/__init__.py
--- a/mne_bids/__init__.py
+++ b/mne_bids/__init__.py
@@ -1,6 +1,6 @@
"""MNE software for easily interacting with BIDS compatible datasets."""
-__version__ = '0.12.dev0'
+__version__ = '0.12'
from mne_bids import commands
from mne_bids.report import make_report
from mne_bids.path import (BIDSPath, get_datatypes, get_entity_vals,
|
{"golden_diff": "diff --git a/doc/conf.py b/doc/conf.py\n--- a/doc/conf.py\n+++ b/doc/conf.py\n@@ -143,8 +143,9 @@\n \n html_context = {\n 'versions_dropdown': {\n- 'dev': 'v0.12 (devel)',\n- 'stable': 'v0.11 (stable)',\n+ 'dev': 'v0.13 (devel)',\n+ 'stable': 'v0.12 (stable)',\n+ 'v0.11': 'v0.11',\n 'v0.10': 'v0.10',\n 'v0.9': 'v0.9',\n 'v0.8': 'v0.8',\ndiff --git a/mne_bids/__init__.py b/mne_bids/__init__.py\n--- a/mne_bids/__init__.py\n+++ b/mne_bids/__init__.py\n@@ -1,6 +1,6 @@\n \"\"\"MNE software for easily interacting with BIDS compatible datasets.\"\"\"\n \n-__version__ = '0.12.dev0'\n+__version__ = '0.12'\n from mne_bids import commands\n from mne_bids.report import make_report\n from mne_bids.path import (BIDSPath, get_datatypes, get_entity_vals,\n", "issue": "Cut 0.12 release\nIn preparation for the MNE-Python 1.3 release alongside with new standalone installers that will also feature the M E-BIDS-Pipeline, we should soon make a new release of MNE-BIDS (within the next two weeks or so?)\n", "before_files": [{"content": "\"\"\"MNE software for easily interacting with BIDS compatible datasets.\"\"\"\n\n__version__ = '0.12.dev0'\nfrom mne_bids import commands\nfrom mne_bids.report import make_report\nfrom mne_bids.path import (BIDSPath, get_datatypes, get_entity_vals,\n print_dir_tree, get_entities_from_fname,\n search_folder_for_text, get_bids_path_from_fname,\n find_matching_paths)\nfrom mne_bids.read import get_head_mri_trans, read_raw_bids\nfrom mne_bids.utils import get_anonymization_daysback\nfrom mne_bids.write import (make_dataset_description, write_anat,\n write_raw_bids, mark_channels,\n write_meg_calibration, write_meg_crosstalk,\n get_anat_landmarks, anonymize_dataset)\nfrom mne_bids.sidecar_updates import update_sidecar_json, update_anat_landmarks\nfrom mne_bids.inspect import inspect_dataset\nfrom mne_bids.dig import (template_to_head, convert_montage_to_ras,\n convert_montage_to_mri)\n", "path": "mne_bids/__init__.py"}, {"content": "\"\"\"Configure details for documentation with sphinx.\"\"\"\nimport os\nimport sys\nfrom datetime import date\n\nimport sphinx_gallery # noqa: F401\nfrom sphinx_gallery.sorting import ExampleTitleSortKey\n\nimport mne_bids\n\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\ncurdir = os.path.dirname(__file__)\nsys.path.append(os.path.abspath(os.path.join(curdir, '..', 'mne_bids')))\nsys.path.append(os.path.abspath(os.path.join(curdir, 'sphinxext')))\n\n\n# -- General configuration ------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#\n# needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n 'sphinx.ext.githubpages',\n 'sphinx.ext.autodoc',\n 'sphinx.ext.mathjax',\n 'sphinx.ext.viewcode',\n 'sphinx.ext.autosummary',\n 'sphinx.ext.doctest',\n 'sphinx.ext.intersphinx',\n 'sphinx_gallery.gen_gallery',\n 'numpydoc',\n 'sphinx_copybutton',\n 'gen_cli', # custom extension, see ./sphinxext/gen_cli.py\n 'gh_substitutions', # custom extension, see ./sphinxext/gh_substitutions.py\n]\n\n# configure sphinx-copybutton\ncopybutton_prompt_text = r\">>> |\\.\\.\\. |\\$ \"\ncopybutton_prompt_is_regexp = True\n\n# configure numpydoc\nnumpydoc_xref_param_type = True\nnumpydoc_class_members_toctree = False\nnumpydoc_attributes_as_param_list = True\nnumpydoc_xref_aliases = {\n 'BIDSPath': ':class:`BIDSPath <mne_bids.BIDSPath>`',\n 'path-like': ':term:`path-like <mne:path-like>`',\n 'array-like': ':term:`array_like <numpy:array_like>`',\n 'int': ':class:`int <python:int>`',\n 'bool': ':class:`bool <python:bool>`',\n 'float': ':class:`float <python:float>`',\n 'list': ':class:`list <python:list>`',\n 'tuple': ':class:`tuple <python:tuple>`',\n 'NibabelImageObject': 'nibabel.spatialimages.SpatialImage',\n}\nnumpydoc_xref_ignore = {\n # words\n 'instance', 'instances', 'of'\n}\n\n\n# generate autosummary even if no references\nautosummary_generate = True\nautodoc_default_options = {'inherited-members': None}\ndefault_role = 'autolink' # XXX silently allows bad syntax, someone should fix\n\n# configure linkcheck\n# https://sphinx-doc.org/en/master/usage/configuration.html?#options-for-the-linkcheck-builder\nlinkcheck_retries = 2\nlinkcheck_rate_limit_timeout = 15.0\nlinkcheck_ignore = [\n r'https://www.researchgate.net/profile/.*',\n]\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n#\n# source_suffix = ['.rst', '.md']\nsource_suffix = '.rst'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = u'MNE-BIDS'\ntd = date.today()\ncopyright = u'2017-%s, MNE Developers. Last updated on %s' % (td.year,\n td.isoformat())\n\nauthor = u'MNE Developers'\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\nversion = mne_bids.__version__\n# The full version, including alpha/beta/rc tags.\nrelease = version\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This patterns also effect to html_static_path and html_extra_path\nexclude_patterns = ['auto_examples/index.rst', '_build', 'Thumbs.db',\n '.DS_Store']\n\n# HTML options (e.g., theme)\nhtml_show_sourcelink = False\nhtml_copy_source = False\n\nhtml_theme = 'pydata_sphinx_theme'\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\nhtml_static_path = ['_static']\nhtml_css_files = ['style.css']\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\nhtml_theme_options = {\n 'icon_links': [\n dict(name='GitHub',\n url='https://github.com/mne-tools/mne-bids',\n icon='fab fa-github-square'),\n dict(name='Discourse',\n url='https://mne.discourse.group/tags/mne-bids',\n icon='fab fa-discourse'),\n ],\n 'icon_links_label': 'Quick Links', # for screen reader\n 'use_edit_page_button': False,\n 'navigation_with_keys': False,\n 'show_toc_level': 1,\n 'navbar_end': ['version-switcher', 'navbar-icon-links'],\n}\n\nhtml_context = {\n 'versions_dropdown': {\n 'dev': 'v0.12 (devel)',\n 'stable': 'v0.11 (stable)',\n 'v0.10': 'v0.10',\n 'v0.9': 'v0.9',\n 'v0.8': 'v0.8',\n 'v0.7': 'v0.7',\n 'v0.6': 'v0.6',\n 'v0.5': 'v0.5',\n 'v0.4': 'v0.4',\n 'v0.3': 'v0.3',\n 'v0.2': 'v0.2',\n 'v0.1': 'v0.1',\n },\n}\n\nhtml_sidebars = {}\n\n# Example configuration for intersphinx: refer to the Python standard library.\nintersphinx_mapping = {\n 'python': ('https://docs.python.org/3', None),\n 'mne': ('https://mne.tools/dev', None),\n 'numpy': ('https://numpy.org/devdocs', None),\n 'scipy': ('https://scipy.github.io/devdocs', None),\n 'matplotlib': ('https://matplotlib.org', None),\n 'nilearn': ('http://nilearn.github.io/stable', None),\n 'pandas': ('https://pandas.pydata.org/pandas-docs/dev', None),\n 'nibabel': ('https://nipy.org/nibabel', None),\n}\nintersphinx_timeout = 5\n\n# Resolve binder filepath_prefix. From the docs:\n# \"A prefix to append to the filepath in the Binder links. You should use this\n# if you will store your built documentation in a sub-folder of a repository,\n# instead of in the root.\"\n# we will store dev docs in a `dev` subdirectory and all other docs in a\n# directory \"v\" + version_str. E.g., \"v0.3\"\nif 'dev' in version:\n filepath_prefix = 'dev'\nelse:\n filepath_prefix = 'v{}'.format(version)\n\nsphinx_gallery_conf = {\n 'doc_module': 'mne_bids',\n 'reference_url': {\n 'mne_bids': None,\n },\n 'backreferences_dir': 'generated',\n 'examples_dirs': '../examples',\n 'within_subsection_order': ExampleTitleSortKey,\n 'gallery_dirs': 'auto_examples',\n 'filename_pattern': '^((?!sgskip).)*$',\n 'binder': {\n # Required keys\n 'org': 'mne-tools',\n 'repo': 'mne-bids',\n 'branch': 'gh-pages', # noqa: E501 Can be any branch, tag, or commit hash. Use a branch that hosts your docs.\n 'binderhub_url': 'https://mybinder.org', # noqa: E501 Any URL of a binderhub deployment. Must be full URL (e.g. https://mybinder.org).\n 'filepath_prefix': filepath_prefix, # noqa: E501 A prefix to prepend to any filepaths in Binder links.\n 'dependencies': [\n '../test_requirements.txt',\n './requirements.txt',\n ],\n }\n}\n", "path": "doc/conf.py"}], "after_files": [{"content": "\"\"\"MNE software for easily interacting with BIDS compatible datasets.\"\"\"\n\n__version__ = '0.12'\nfrom mne_bids import commands\nfrom mne_bids.report import make_report\nfrom mne_bids.path import (BIDSPath, get_datatypes, get_entity_vals,\n print_dir_tree, get_entities_from_fname,\n search_folder_for_text, get_bids_path_from_fname,\n find_matching_paths)\nfrom mne_bids.read import get_head_mri_trans, read_raw_bids\nfrom mne_bids.utils import get_anonymization_daysback\nfrom mne_bids.write import (make_dataset_description, write_anat,\n write_raw_bids, mark_channels,\n write_meg_calibration, write_meg_crosstalk,\n get_anat_landmarks, anonymize_dataset)\nfrom mne_bids.sidecar_updates import update_sidecar_json, update_anat_landmarks\nfrom mne_bids.inspect import inspect_dataset\nfrom mne_bids.dig import (template_to_head, convert_montage_to_ras,\n convert_montage_to_mri)\n", "path": "mne_bids/__init__.py"}, {"content": "\"\"\"Configure details for documentation with sphinx.\"\"\"\nimport os\nimport sys\nfrom datetime import date\n\nimport sphinx_gallery # noqa: F401\nfrom sphinx_gallery.sorting import ExampleTitleSortKey\n\nimport mne_bids\n\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\ncurdir = os.path.dirname(__file__)\nsys.path.append(os.path.abspath(os.path.join(curdir, '..', 'mne_bids')))\nsys.path.append(os.path.abspath(os.path.join(curdir, 'sphinxext')))\n\n\n# -- General configuration ------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#\n# needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n 'sphinx.ext.githubpages',\n 'sphinx.ext.autodoc',\n 'sphinx.ext.mathjax',\n 'sphinx.ext.viewcode',\n 'sphinx.ext.autosummary',\n 'sphinx.ext.doctest',\n 'sphinx.ext.intersphinx',\n 'sphinx_gallery.gen_gallery',\n 'numpydoc',\n 'sphinx_copybutton',\n 'gen_cli', # custom extension, see ./sphinxext/gen_cli.py\n 'gh_substitutions', # custom extension, see ./sphinxext/gh_substitutions.py\n]\n\n# configure sphinx-copybutton\ncopybutton_prompt_text = r\">>> |\\.\\.\\. |\\$ \"\ncopybutton_prompt_is_regexp = True\n\n# configure numpydoc\nnumpydoc_xref_param_type = True\nnumpydoc_class_members_toctree = False\nnumpydoc_attributes_as_param_list = True\nnumpydoc_xref_aliases = {\n 'BIDSPath': ':class:`BIDSPath <mne_bids.BIDSPath>`',\n 'path-like': ':term:`path-like <mne:path-like>`',\n 'array-like': ':term:`array_like <numpy:array_like>`',\n 'int': ':class:`int <python:int>`',\n 'bool': ':class:`bool <python:bool>`',\n 'float': ':class:`float <python:float>`',\n 'list': ':class:`list <python:list>`',\n 'tuple': ':class:`tuple <python:tuple>`',\n 'NibabelImageObject': 'nibabel.spatialimages.SpatialImage',\n}\nnumpydoc_xref_ignore = {\n # words\n 'instance', 'instances', 'of'\n}\n\n\n# generate autosummary even if no references\nautosummary_generate = True\nautodoc_default_options = {'inherited-members': None}\ndefault_role = 'autolink' # XXX silently allows bad syntax, someone should fix\n\n# configure linkcheck\n# https://sphinx-doc.org/en/master/usage/configuration.html?#options-for-the-linkcheck-builder\nlinkcheck_retries = 2\nlinkcheck_rate_limit_timeout = 15.0\nlinkcheck_ignore = [\n r'https://www.researchgate.net/profile/.*',\n]\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n#\n# source_suffix = ['.rst', '.md']\nsource_suffix = '.rst'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = u'MNE-BIDS'\ntd = date.today()\ncopyright = u'2017-%s, MNE Developers. Last updated on %s' % (td.year,\n td.isoformat())\n\nauthor = u'MNE Developers'\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\nversion = mne_bids.__version__\n# The full version, including alpha/beta/rc tags.\nrelease = version\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This patterns also effect to html_static_path and html_extra_path\nexclude_patterns = ['auto_examples/index.rst', '_build', 'Thumbs.db',\n '.DS_Store']\n\n# HTML options (e.g., theme)\nhtml_show_sourcelink = False\nhtml_copy_source = False\n\nhtml_theme = 'pydata_sphinx_theme'\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\nhtml_static_path = ['_static']\nhtml_css_files = ['style.css']\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\nhtml_theme_options = {\n 'icon_links': [\n dict(name='GitHub',\n url='https://github.com/mne-tools/mne-bids',\n icon='fab fa-github-square'),\n dict(name='Discourse',\n url='https://mne.discourse.group/tags/mne-bids',\n icon='fab fa-discourse'),\n ],\n 'icon_links_label': 'Quick Links', # for screen reader\n 'use_edit_page_button': False,\n 'navigation_with_keys': False,\n 'show_toc_level': 1,\n 'navbar_end': ['version-switcher', 'navbar-icon-links'],\n}\n\nhtml_context = {\n 'versions_dropdown': {\n 'dev': 'v0.13 (devel)',\n 'stable': 'v0.12 (stable)',\n 'v0.11': 'v0.11',\n 'v0.10': 'v0.10',\n 'v0.9': 'v0.9',\n 'v0.8': 'v0.8',\n 'v0.7': 'v0.7',\n 'v0.6': 'v0.6',\n 'v0.5': 'v0.5',\n 'v0.4': 'v0.4',\n 'v0.3': 'v0.3',\n 'v0.2': 'v0.2',\n 'v0.1': 'v0.1',\n },\n}\n\nhtml_sidebars = {}\n\n# Example configuration for intersphinx: refer to the Python standard library.\nintersphinx_mapping = {\n 'python': ('https://docs.python.org/3', None),\n 'mne': ('https://mne.tools/dev', None),\n 'numpy': ('https://numpy.org/devdocs', None),\n 'scipy': ('https://scipy.github.io/devdocs', None),\n 'matplotlib': ('https://matplotlib.org', None),\n 'nilearn': ('http://nilearn.github.io/stable', None),\n 'pandas': ('https://pandas.pydata.org/pandas-docs/dev', None),\n 'nibabel': ('https://nipy.org/nibabel', None),\n}\nintersphinx_timeout = 5\n\n# Resolve binder filepath_prefix. From the docs:\n# \"A prefix to append to the filepath in the Binder links. You should use this\n# if you will store your built documentation in a sub-folder of a repository,\n# instead of in the root.\"\n# we will store dev docs in a `dev` subdirectory and all other docs in a\n# directory \"v\" + version_str. E.g., \"v0.3\"\nif 'dev' in version:\n filepath_prefix = 'dev'\nelse:\n filepath_prefix = 'v{}'.format(version)\n\nsphinx_gallery_conf = {\n 'doc_module': 'mne_bids',\n 'reference_url': {\n 'mne_bids': None,\n },\n 'backreferences_dir': 'generated',\n 'examples_dirs': '../examples',\n 'within_subsection_order': ExampleTitleSortKey,\n 'gallery_dirs': 'auto_examples',\n 'filename_pattern': '^((?!sgskip).)*$',\n 'binder': {\n # Required keys\n 'org': 'mne-tools',\n 'repo': 'mne-bids',\n 'branch': 'gh-pages', # noqa: E501 Can be any branch, tag, or commit hash. Use a branch that hosts your docs.\n 'binderhub_url': 'https://mybinder.org', # noqa: E501 Any URL of a binderhub deployment. Must be full URL (e.g. https://mybinder.org).\n 'filepath_prefix': filepath_prefix, # noqa: E501 A prefix to prepend to any filepaths in Binder links.\n 'dependencies': [\n '../test_requirements.txt',\n './requirements.txt',\n ],\n }\n}\n", "path": "doc/conf.py"}]}
| 3,021 | 294 |
gh_patches_debug_33914
|
rasdani/github-patches
|
git_diff
|
quantumlib__Cirq-853
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Implement equality for CircuitDag
Implement `__eq__` for CircuitDag using `networkx.is_isomorphic()`. Use the node_match argument of is_isomorphic: `node_match=lambda n: n.val`.
This may be useful for #830.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `cirq/circuits/circuit_dag.py`
Content:
```
1 # Copyright 2018 The ops Developers
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # https://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from typing import Any, Callable, Generic, Iterator, TypeVar
16
17 import networkx
18
19 from cirq import ops, devices
20 from cirq.circuits import circuit
21
22
23 T = TypeVar('T')
24
25 class Unique(Generic[T]):
26 """A wrapper for a value that doesn't compare equal to other instances.
27
28 For example: 5 == 5 but Unique(5) != Unique(5).
29
30 Unique is used by CircuitDag to wrap operations because nodes in a graph
31 are considered the same node if they compare equal to each other. X(q0)
32 in one moment of a Circuit and X(q0) in another moment of the Circuit are
33 wrapped by Unique(X(q0)) so they are distinct nodes in the graph.
34 """
35 def __init__(self, val: T) -> None:
36 self.val = val
37
38 def __repr__(self):
39 return 'Unique({}, {!r})'.format(id(self), self.val)
40
41
42 def _disjoint_qubits(op1: ops.Operation, op2: ops.Operation) -> bool:
43 """Returns true only if the operations have qubits in common."""
44 return not set(op1.qubits) & set(op2.qubits)
45
46
47 class CircuitDag(networkx.DiGraph):
48 """A representation of a Circuit as a directed acyclic graph.
49
50 Nodes of the graph are instances of Unique containing each operation of a
51 circuit.
52
53 Edges of the graph are tuples of nodes. Each edge specifies a required
54 application order between two operations. The first must be applied before
55 the second.
56
57 The graph is maximalist (transitive completion).
58 """
59
60 disjoint_qubits = staticmethod(_disjoint_qubits)
61
62 def __init__(self,
63 can_reorder: Callable[[ops.Operation, ops.Operation],
64 bool] = _disjoint_qubits,
65 incoming_graph_data: Any = None,
66 device: devices.Device = devices.UnconstrainedDevice
67 ) -> None:
68 """Initializes a CircuitDag.
69
70 Args:
71 can_reorder: A predicate that determines if two operations may be
72 reordered. Graph edges are created for pairs of operations
73 where this returns False.
74
75 The default predicate allows reordering only when the operations
76 don't share common qubits.
77 incoming_graph_data: Data in initialize the graph. This can be any
78 value supported by networkx.DiGraph() e.g. an edge list or
79 another graph.
80 device: Hardware that the circuit should be able to run on.
81 """
82 super().__init__(incoming_graph_data)
83 self.can_reorder = can_reorder
84 self.device = device
85
86 @staticmethod
87 def make_node(op: ops.Operation) -> Unique:
88 return Unique(op)
89
90 @staticmethod
91 def from_circuit(circuit: circuit.Circuit,
92 can_reorder: Callable[[ops.Operation, ops.Operation],
93 bool] = _disjoint_qubits
94 ) -> 'CircuitDag':
95 return CircuitDag.from_ops(circuit.all_operations(),
96 can_reorder=can_reorder,
97 device=circuit.device)
98
99 @staticmethod
100 def from_ops(*operations: ops.OP_TREE,
101 can_reorder: Callable[[ops.Operation, ops.Operation],
102 bool] = _disjoint_qubits,
103 device: devices.Device = devices.UnconstrainedDevice
104 ) -> 'CircuitDag':
105 dag = CircuitDag(can_reorder=can_reorder, device=device)
106 for op in ops.flatten_op_tree(operations):
107 dag.append(op)
108 return dag
109
110 def append(self, op: ops.Operation) -> None:
111 new_node = self.make_node(op)
112 self.add_edges_from([(node, new_node)
113 for node in self.nodes
114 if not self.can_reorder(node.val, new_node.val)])
115 self.add_node(new_node)
116
117 def ordered_nodes(self) -> Iterator[Unique[ops.Operation]]:
118 if not self.nodes:
119 return
120 g = self.copy()
121
122 def get_root_node(some_node: Unique[ops.Operation]
123 ) -> Unique[ops.Operation]:
124 pred = g.pred
125 while pred[some_node]:
126 some_node = next(iter(pred[some_node]))
127 return some_node
128
129 def get_first_node() -> Unique[ops.Operation]:
130 return get_root_node(next(iter(g.nodes)))
131
132 def get_next_node(succ: networkx.classes.coreviews.AtlasView
133 ) -> Unique[ops.Operation]:
134 if succ:
135 return get_root_node(next(iter(succ)))
136 else:
137 return get_first_node()
138
139 node = get_first_node()
140 while True:
141 yield node
142 succ = g.succ[node]
143 g.remove_node(node)
144
145 if not g.nodes:
146 return
147
148 node = get_next_node(succ)
149
150 def all_operations(self) -> Iterator[ops.Operation]:
151 return (node.val for node in self.ordered_nodes())
152
153 def to_circuit(self) -> circuit.Circuit:
154 return circuit.Circuit.from_ops(
155 self.all_operations(),
156 strategy=circuit.InsertStrategy.EARLIEST,
157 device=self.device)
158
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/cirq/circuits/circuit_dag.py b/cirq/circuits/circuit_dag.py
--- a/cirq/circuits/circuit_dag.py
+++ b/cirq/circuits/circuit_dag.py
@@ -12,8 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import Any, Callable, Generic, Iterator, TypeVar
+from typing import Any, Callable, Dict, Generic, Iterator, TypeVar
+import functools
import networkx
from cirq import ops, devices
@@ -22,6 +23,7 @@
T = TypeVar('T')
[email protected]_ordering
class Unique(Generic[T]):
"""A wrapper for a value that doesn't compare equal to other instances.
@@ -38,6 +40,11 @@
def __repr__(self):
return 'Unique({}, {!r})'.format(id(self), self.val)
+ def __lt__(self, other):
+ if not isinstance(other, type(self)):
+ return NotImplemented
+ return id(self) < id(other)
+
def _disjoint_qubits(op1: ops.Operation, op2: ops.Operation) -> bool:
"""Returns true only if the operations have qubits in common."""
@@ -114,6 +121,24 @@
if not self.can_reorder(node.val, new_node.val)])
self.add_node(new_node)
+ def __eq__(self, other):
+ if not isinstance(other, type(self)):
+ return NotImplemented
+ g1 = self.copy()
+ g2 = other.copy()
+ for node, attr in g1.nodes.items():
+ attr['val'] = node.val
+ for node, attr in g2.nodes.items():
+ attr['val'] = node.val
+ def node_match(attr1: Dict[Any, Any], attr2: Dict[Any, Any]) -> bool:
+ return attr1['val'] == attr2['val']
+ return networkx.is_isomorphic(g1, g2, node_match=node_match)
+
+ def __ne__(self, other):
+ return not self == other
+
+ __hash__ = None # type: ignore
+
def ordered_nodes(self) -> Iterator[Unique[ops.Operation]]:
if not self.nodes:
return
|
{"golden_diff": "diff --git a/cirq/circuits/circuit_dag.py b/cirq/circuits/circuit_dag.py\n--- a/cirq/circuits/circuit_dag.py\n+++ b/cirq/circuits/circuit_dag.py\n@@ -12,8 +12,9 @@\n # See the License for the specific language governing permissions and\n # limitations under the License.\n \n-from typing import Any, Callable, Generic, Iterator, TypeVar\n+from typing import Any, Callable, Dict, Generic, Iterator, TypeVar\n \n+import functools\n import networkx\n \n from cirq import ops, devices\n@@ -22,6 +23,7 @@\n \n T = TypeVar('T')\n \[email protected]_ordering\n class Unique(Generic[T]):\n \"\"\"A wrapper for a value that doesn't compare equal to other instances.\n \n@@ -38,6 +40,11 @@\n def __repr__(self):\n return 'Unique({}, {!r})'.format(id(self), self.val)\n \n+ def __lt__(self, other):\n+ if not isinstance(other, type(self)):\n+ return NotImplemented\n+ return id(self) < id(other)\n+\n \n def _disjoint_qubits(op1: ops.Operation, op2: ops.Operation) -> bool:\n \"\"\"Returns true only if the operations have qubits in common.\"\"\"\n@@ -114,6 +121,24 @@\n if not self.can_reorder(node.val, new_node.val)])\n self.add_node(new_node)\n \n+ def __eq__(self, other):\n+ if not isinstance(other, type(self)):\n+ return NotImplemented\n+ g1 = self.copy()\n+ g2 = other.copy()\n+ for node, attr in g1.nodes.items():\n+ attr['val'] = node.val\n+ for node, attr in g2.nodes.items():\n+ attr['val'] = node.val\n+ def node_match(attr1: Dict[Any, Any], attr2: Dict[Any, Any]) -> bool:\n+ return attr1['val'] == attr2['val']\n+ return networkx.is_isomorphic(g1, g2, node_match=node_match)\n+\n+ def __ne__(self, other):\n+ return not self == other\n+\n+ __hash__ = None # type: ignore\n+\n def ordered_nodes(self) -> Iterator[Unique[ops.Operation]]:\n if not self.nodes:\n return\n", "issue": "Implement equality for CircuitDag\nImplement `__eq__` for CircuitDag using `networkx.is_isomorphic()`. Use the node_match argument of is_isomorphic: `node_match=lambda n: n.val`.\r\n\r\nThis may be useful for #830.\n", "before_files": [{"content": "# Copyright 2018 The ops Developers\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom typing import Any, Callable, Generic, Iterator, TypeVar\n\nimport networkx\n\nfrom cirq import ops, devices\nfrom cirq.circuits import circuit\n\n\nT = TypeVar('T')\n\nclass Unique(Generic[T]):\n \"\"\"A wrapper for a value that doesn't compare equal to other instances.\n\n For example: 5 == 5 but Unique(5) != Unique(5).\n\n Unique is used by CircuitDag to wrap operations because nodes in a graph\n are considered the same node if they compare equal to each other. X(q0)\n in one moment of a Circuit and X(q0) in another moment of the Circuit are\n wrapped by Unique(X(q0)) so they are distinct nodes in the graph.\n \"\"\"\n def __init__(self, val: T) -> None:\n self.val = val\n\n def __repr__(self):\n return 'Unique({}, {!r})'.format(id(self), self.val)\n\n\ndef _disjoint_qubits(op1: ops.Operation, op2: ops.Operation) -> bool:\n \"\"\"Returns true only if the operations have qubits in common.\"\"\"\n return not set(op1.qubits) & set(op2.qubits)\n\n\nclass CircuitDag(networkx.DiGraph):\n \"\"\"A representation of a Circuit as a directed acyclic graph.\n\n Nodes of the graph are instances of Unique containing each operation of a\n circuit.\n\n Edges of the graph are tuples of nodes. Each edge specifies a required\n application order between two operations. The first must be applied before\n the second.\n\n The graph is maximalist (transitive completion).\n \"\"\"\n\n disjoint_qubits = staticmethod(_disjoint_qubits)\n\n def __init__(self,\n can_reorder: Callable[[ops.Operation, ops.Operation],\n bool] = _disjoint_qubits,\n incoming_graph_data: Any = None,\n device: devices.Device = devices.UnconstrainedDevice\n ) -> None:\n \"\"\"Initializes a CircuitDag.\n\n Args:\n can_reorder: A predicate that determines if two operations may be\n reordered. Graph edges are created for pairs of operations\n where this returns False.\n\n The default predicate allows reordering only when the operations\n don't share common qubits.\n incoming_graph_data: Data in initialize the graph. This can be any\n value supported by networkx.DiGraph() e.g. an edge list or\n another graph.\n device: Hardware that the circuit should be able to run on.\n \"\"\"\n super().__init__(incoming_graph_data)\n self.can_reorder = can_reorder\n self.device = device\n\n @staticmethod\n def make_node(op: ops.Operation) -> Unique:\n return Unique(op)\n\n @staticmethod\n def from_circuit(circuit: circuit.Circuit,\n can_reorder: Callable[[ops.Operation, ops.Operation],\n bool] = _disjoint_qubits\n ) -> 'CircuitDag':\n return CircuitDag.from_ops(circuit.all_operations(),\n can_reorder=can_reorder,\n device=circuit.device)\n\n @staticmethod\n def from_ops(*operations: ops.OP_TREE,\n can_reorder: Callable[[ops.Operation, ops.Operation],\n bool] = _disjoint_qubits,\n device: devices.Device = devices.UnconstrainedDevice\n ) -> 'CircuitDag':\n dag = CircuitDag(can_reorder=can_reorder, device=device)\n for op in ops.flatten_op_tree(operations):\n dag.append(op)\n return dag\n\n def append(self, op: ops.Operation) -> None:\n new_node = self.make_node(op)\n self.add_edges_from([(node, new_node)\n for node in self.nodes\n if not self.can_reorder(node.val, new_node.val)])\n self.add_node(new_node)\n\n def ordered_nodes(self) -> Iterator[Unique[ops.Operation]]:\n if not self.nodes:\n return\n g = self.copy()\n\n def get_root_node(some_node: Unique[ops.Operation]\n ) -> Unique[ops.Operation]:\n pred = g.pred\n while pred[some_node]:\n some_node = next(iter(pred[some_node]))\n return some_node\n\n def get_first_node() -> Unique[ops.Operation]:\n return get_root_node(next(iter(g.nodes)))\n\n def get_next_node(succ: networkx.classes.coreviews.AtlasView\n ) -> Unique[ops.Operation]:\n if succ:\n return get_root_node(next(iter(succ)))\n else:\n return get_first_node()\n\n node = get_first_node()\n while True:\n yield node\n succ = g.succ[node]\n g.remove_node(node)\n\n if not g.nodes:\n return\n\n node = get_next_node(succ)\n\n def all_operations(self) -> Iterator[ops.Operation]:\n return (node.val for node in self.ordered_nodes())\n\n def to_circuit(self) -> circuit.Circuit:\n return circuit.Circuit.from_ops(\n self.all_operations(),\n strategy=circuit.InsertStrategy.EARLIEST,\n device=self.device)\n", "path": "cirq/circuits/circuit_dag.py"}], "after_files": [{"content": "# Copyright 2018 The ops Developers\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom typing import Any, Callable, Dict, Generic, Iterator, TypeVar\n\nimport functools\nimport networkx\n\nfrom cirq import ops, devices\nfrom cirq.circuits import circuit\n\n\nT = TypeVar('T')\n\[email protected]_ordering\nclass Unique(Generic[T]):\n \"\"\"A wrapper for a value that doesn't compare equal to other instances.\n\n For example: 5 == 5 but Unique(5) != Unique(5).\n\n Unique is used by CircuitDag to wrap operations because nodes in a graph\n are considered the same node if they compare equal to each other. X(q0)\n in one moment of a Circuit and X(q0) in another moment of the Circuit are\n wrapped by Unique(X(q0)) so they are distinct nodes in the graph.\n \"\"\"\n def __init__(self, val: T) -> None:\n self.val = val\n\n def __repr__(self):\n return 'Unique({}, {!r})'.format(id(self), self.val)\n\n def __lt__(self, other):\n if not isinstance(other, type(self)):\n return NotImplemented\n return id(self) < id(other)\n\n\ndef _disjoint_qubits(op1: ops.Operation, op2: ops.Operation) -> bool:\n \"\"\"Returns true only if the operations have qubits in common.\"\"\"\n return not set(op1.qubits) & set(op2.qubits)\n\n\nclass CircuitDag(networkx.DiGraph):\n \"\"\"A representation of a Circuit as a directed acyclic graph.\n\n Nodes of the graph are instances of Unique containing each operation of a\n circuit.\n\n Edges of the graph are tuples of nodes. Each edge specifies a required\n application order between two operations. The first must be applied before\n the second.\n\n The graph is maximalist (transitive completion).\n \"\"\"\n\n disjoint_qubits = staticmethod(_disjoint_qubits)\n\n def __init__(self,\n can_reorder: Callable[[ops.Operation, ops.Operation],\n bool] = _disjoint_qubits,\n incoming_graph_data: Any = None,\n device: devices.Device = devices.UnconstrainedDevice\n ) -> None:\n \"\"\"Initializes a CircuitDag.\n\n Args:\n can_reorder: A predicate that determines if two operations may be\n reordered. Graph edges are created for pairs of operations\n where this returns False.\n\n The default predicate allows reordering only when the operations\n don't share common qubits.\n incoming_graph_data: Data in initialize the graph. This can be any\n value supported by networkx.DiGraph() e.g. an edge list or\n another graph.\n device: Hardware that the circuit should be able to run on.\n \"\"\"\n super().__init__(incoming_graph_data)\n self.can_reorder = can_reorder\n self.device = device\n\n @staticmethod\n def make_node(op: ops.Operation) -> Unique:\n return Unique(op)\n\n @staticmethod\n def from_circuit(circuit: circuit.Circuit,\n can_reorder: Callable[[ops.Operation, ops.Operation],\n bool] = _disjoint_qubits\n ) -> 'CircuitDag':\n return CircuitDag.from_ops(circuit.all_operations(),\n can_reorder=can_reorder,\n device=circuit.device)\n\n @staticmethod\n def from_ops(*operations: ops.OP_TREE,\n can_reorder: Callable[[ops.Operation, ops.Operation],\n bool] = _disjoint_qubits,\n device: devices.Device = devices.UnconstrainedDevice\n ) -> 'CircuitDag':\n dag = CircuitDag(can_reorder=can_reorder, device=device)\n for op in ops.flatten_op_tree(operations):\n dag.append(op)\n return dag\n\n def append(self, op: ops.Operation) -> None:\n new_node = self.make_node(op)\n self.add_edges_from([(node, new_node)\n for node in self.nodes\n if not self.can_reorder(node.val, new_node.val)])\n self.add_node(new_node)\n\n def __eq__(self, other):\n if not isinstance(other, type(self)):\n return NotImplemented\n g1 = self.copy()\n g2 = other.copy()\n for node, attr in g1.nodes.items():\n attr['val'] = node.val\n for node, attr in g2.nodes.items():\n attr['val'] = node.val\n def node_match(attr1: Dict[Any, Any], attr2: Dict[Any, Any]) -> bool:\n return attr1['val'] == attr2['val']\n return networkx.is_isomorphic(g1, g2, node_match=node_match)\n\n def __ne__(self, other):\n return not self == other\n\n __hash__ = None # type: ignore\n\n def ordered_nodes(self) -> Iterator[Unique[ops.Operation]]:\n if not self.nodes:\n return\n g = self.copy()\n\n def get_root_node(some_node: Unique[ops.Operation]\n ) -> Unique[ops.Operation]:\n pred = g.pred\n while pred[some_node]:\n some_node = next(iter(pred[some_node]))\n return some_node\n\n def get_first_node() -> Unique[ops.Operation]:\n return get_root_node(next(iter(g.nodes)))\n\n def get_next_node(succ: networkx.classes.coreviews.AtlasView\n ) -> Unique[ops.Operation]:\n if succ:\n return get_root_node(next(iter(succ)))\n else:\n return get_first_node()\n\n node = get_first_node()\n while True:\n yield node\n succ = g.succ[node]\n g.remove_node(node)\n\n if not g.nodes:\n return\n\n node = get_next_node(succ)\n\n def all_operations(self) -> Iterator[ops.Operation]:\n return (node.val for node in self.ordered_nodes())\n\n def to_circuit(self) -> circuit.Circuit:\n return circuit.Circuit.from_ops(\n self.all_operations(),\n strategy=circuit.InsertStrategy.EARLIEST,\n device=self.device)\n", "path": "cirq/circuits/circuit_dag.py"}]}
| 1,925 | 528 |
gh_patches_debug_35683
|
rasdani/github-patches
|
git_diff
|
PrefectHQ__prefect-3136
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Setting `flow_run_id` in local runs breaks slack notifier
#2868 made some additions to local runs so that targets/results could be templated properly both locally and in runs using a backend API. Setting `flow_run_id` on local runs here:
https://github.com/PrefectHQ/prefect/blob/30c88b315c83748b114228703d42bfe3e7d6a763/src/prefect/core/flow.py#L964-L969
breaks the `slack_notifier` because it has an extra check for a `flow_run_id` in context.
https://github.com/PrefectHQ/prefect/blob/30c88b315c83748b114228703d42bfe3e7d6a763/src/prefect/utilities/notifications/notifications.py#L151
I think this should be resolved by making the backend functionality of the slack notifier opt-in instead of default when a flow_run_id is present.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/prefect/utilities/notifications/notifications.py`
Content:
```
1 """
2 Tools and utilities for notifications and callbacks.
3
4 For an in-depth guide to setting up your system for using Slack notifications, [please see our
5 tutorial](/core/advanced_tutorials/slack-notifications.html).
6 """
7 import smtplib
8 from email.header import Header
9 from email.mime.multipart import MIMEMultipart
10 from email.mime.text import MIMEText
11 from typing import TYPE_CHECKING, Any, Callable, Union, cast
12
13 from toolz import curry
14
15 import prefect
16
17 if TYPE_CHECKING:
18 import prefect.engine.state
19 import prefect.client
20 from prefect import Flow, Task # noqa
21
22 TrackedObjectType = Union["Flow", "Task"]
23
24 __all__ = ["callback_factory", "gmail_notifier", "slack_notifier"]
25
26
27 def callback_factory(
28 fn: Callable[[Any, "prefect.engine.state.State"], Any],
29 check: Callable[["prefect.engine.state.State"], bool],
30 ) -> Callable:
31 """
32 Utility for generating state handlers that serve as callbacks, under arbitrary
33 state-based checks.
34
35 Args:
36 - fn (Callable): a function with signature `fn(obj, state: State) -> None`
37 that will be called anytime the associated state-check passes; in general, it is
38 expected that this function will have side effects (e.g., sends an email). The
39 first argument to this function is the `Task` or `Flow` it is attached to.
40 - check (Callable): a function with signature `check(state: State) -> bool`
41 that is used for determining when the callback function should be called
42
43 Returns:
44 - state_handler (Callable): a state handler function that can be attached to both Tasks
45 and Flows
46
47 Example:
48 ```python
49 from prefect import Task, Flow
50 from prefect.utilities.notifications import callback_factory
51
52 fn = lambda obj, state: print(state)
53 check = lambda state: state.is_successful()
54 callback = callback_factory(fn, check)
55
56 t = Task(state_handlers=[callback])
57 f = Flow(tasks=[t], state_handlers=[callback])
58 f.run()
59 # prints:
60 # Success("Task run succeeded.")
61 # Success("All reference tasks succeeded.")
62 ```
63 """
64
65 def state_handler(
66 obj: Any,
67 old_state: "prefect.engine.state.State",
68 new_state: "prefect.engine.state.State",
69 ) -> "prefect.engine.state.State":
70 if check(new_state) is True:
71 fn(obj, new_state)
72 return new_state
73
74 return state_handler
75
76
77 def email_message_formatter(
78 tracked_obj: TrackedObjectType, state: "prefect.engine.state.State", email_to: str
79 ) -> str:
80 if isinstance(state.result, Exception):
81 msg = "<pre>{}</pre>".format(repr(state.result))
82 else:
83 msg = '"{}"'.format(state.message)
84
85 html = """
86 <html>
87 <head></head>
88 <body>
89 <table align="left" border="0" cellpadding="2px" cellspacing="2px">
90 <tr>
91 <td style="border-left: 2px solid {color};">
92 <img src="https://emoji.slack-edge.com/TAN3D79AL/prefect/2497370f58500a5a.png">
93 </td>
94 <td style="border-left: 2px solid {color}; padding-left: 6px;">
95 {text}
96 </td>
97 </tr>
98 </table>
99 </body>
100 </html>
101 """
102 color = state.color
103 text = """
104 <pre>{name}</pre> is now in a <font color="{color}"><b>{state}</b></font> state
105 <br><br>
106 Message: {msg}
107 """.format(
108 name=tracked_obj.name, color=state.color, state=type(state).__name__, msg=msg
109 )
110
111 contents = MIMEMultipart("alternative")
112 contents.attach(MIMEText(text, "plain"))
113 contents.attach(MIMEText(html.format(color=color, text=text), "html"))
114
115 contents["Subject"] = Header(
116 "Prefect state change notification for {}".format(tracked_obj.name), "UTF-8"
117 )
118 contents["From"] = "[email protected]"
119 contents["To"] = email_to
120
121 return contents.as_string()
122
123
124 def slack_message_formatter(
125 tracked_obj: TrackedObjectType, state: "prefect.engine.state.State"
126 ) -> dict:
127 # see https://api.slack.com/docs/message-attachments
128 fields = []
129 if isinstance(state.result, Exception):
130 value = "```{}```".format(repr(state.result))
131 else:
132 value = cast(str, state.message)
133 if value is not None:
134 fields.append({"title": "Message", "value": value, "short": False})
135
136 notification_payload = {
137 "fallback": "State change notification",
138 "color": state.color,
139 "author_name": "Prefect",
140 "author_link": "https://www.prefect.io/",
141 "author_icon": "https://emoji.slack-edge.com/TAN3D79AL/prefect/2497370f58500a5a.png",
142 "title": type(state).__name__,
143 "fields": fields,
144 # "title_link": "https://www.prefect.io/",
145 "text": "{0} is now in a {1} state".format(
146 tracked_obj.name, type(state).__name__
147 ),
148 "footer": "Prefect notification",
149 }
150
151 if prefect.context.get("flow_run_id"):
152 url = None
153
154 if isinstance(tracked_obj, prefect.Flow):
155 url = prefect.client.Client().get_cloud_url(
156 "flow-run", prefect.context["flow_run_id"], as_user=False
157 )
158 elif isinstance(tracked_obj, prefect.Task):
159 url = prefect.client.Client().get_cloud_url(
160 "task-run", prefect.context.get("task_run_id", ""), as_user=False
161 )
162
163 if url:
164 notification_payload.update(title_link=url)
165
166 data = {"attachments": [notification_payload]}
167 return data
168
169
170 @curry
171 def gmail_notifier(
172 tracked_obj: TrackedObjectType,
173 old_state: "prefect.engine.state.State",
174 new_state: "prefect.engine.state.State",
175 ignore_states: list = None,
176 only_states: list = None,
177 ) -> "prefect.engine.state.State":
178 """
179 Email state change handler - configured to work solely with Gmail; works as a standalone
180 state handler, or can be called from within a custom state handler. This function is
181 curried meaning that it can be called multiple times to partially bind any keyword
182 arguments (see example below).
183
184 The username and password Gmail credentials will be taken from your `"EMAIL_USERNAME"` and
185 `"EMAIL_PASSWORD"` secrets, respectively; note the username will also serve as the
186 destination email address for the notification.
187
188 Args:
189 - tracked_obj (Task or Flow): Task or Flow object the handler is registered with
190 - old_state (State): previous state of tracked object
191 - new_state (State): new state of tracked object
192 - ignore_states ([State], optional): list of `State` classes to ignore, e.g.,
193 `[Running, Scheduled]`. If `new_state` is an instance of one of the passed states,
194 no notification will occur.
195 - only_states ([State], optional): similar to `ignore_states`, but instead _only_
196 notifies you if the Task / Flow is in a state from the provided list of `State`
197 classes
198
199 Returns:
200 - State: the `new_state` object that was provided
201
202 Raises:
203 - ValueError: if the email notification fails for any reason
204
205 Example:
206 ```python
207 from prefect import task
208 from prefect.utilities.notifications import gmail_notifier
209
210 @task(state_handlers=[gmail_notifier(ignore_states=[Running])]) # uses currying
211 def add(x, y):
212 return x + y
213 ```
214 """
215 username = cast(str, prefect.client.Secret("EMAIL_USERNAME").get())
216 password = cast(str, prefect.client.Secret("EMAIL_PASSWORD").get())
217 ignore_states = ignore_states or []
218 only_states = only_states or []
219
220 if any([isinstance(new_state, ignored) for ignored in ignore_states]):
221 return new_state
222
223 if only_states and not any(
224 [isinstance(new_state, included) for included in only_states]
225 ):
226 return new_state
227
228 body = email_message_formatter(tracked_obj, new_state, username)
229
230 server = smtplib.SMTP_SSL("smtp.gmail.com", 465)
231 server.login(username, password)
232 try:
233 server.sendmail("[email protected]", username, body)
234 except Exception:
235 raise ValueError("Email notification for {} failed".format(tracked_obj))
236 finally:
237 server.quit()
238
239 return new_state
240
241
242 @curry
243 def slack_notifier(
244 tracked_obj: TrackedObjectType,
245 old_state: "prefect.engine.state.State",
246 new_state: "prefect.engine.state.State",
247 ignore_states: list = None,
248 only_states: list = None,
249 webhook_secret: str = None,
250 ) -> "prefect.engine.state.State":
251 """
252 Slack state change handler; requires having the Prefect slack app installed. Works as a
253 standalone state handler, or can be called from within a custom state handler. This
254 function is curried meaning that it can be called multiple times to partially bind any
255 keyword arguments (see example below).
256
257 Args:
258 - tracked_obj (Task or Flow): Task or Flow object the handler is
259 registered with
260 - old_state (State): previous state of tracked object
261 - new_state (State): new state of tracked object
262 - ignore_states ([State], optional): list of `State` classes to ignore, e.g.,
263 `[Running, Scheduled]`. If `new_state` is an instance of one of the passed states,
264 no notification will occur.
265 - only_states ([State], optional): similar to `ignore_states`, but instead _only_
266 notifies you if the Task / Flow is in a state from the provided list of `State`
267 classes
268 - webhook_secret (str, optional): the name of the Prefect Secret that stores your slack
269 webhook URL; defaults to `"SLACK_WEBHOOK_URL"`
270
271 Returns:
272 - State: the `new_state` object that was provided
273
274 Raises:
275 - ValueError: if the slack notification fails for any reason
276
277 Example:
278 ```python
279 from prefect import task
280 from prefect.utilities.notifications import slack_notifier
281
282 @task(state_handlers=[slack_notifier(ignore_states=[Running])]) # uses currying
283 def add(x, y):
284 return x + y
285 ```
286 """
287 webhook_url = cast(
288 str, prefect.client.Secret(webhook_secret or "SLACK_WEBHOOK_URL").get()
289 )
290 ignore_states = ignore_states or []
291 only_states = only_states or []
292
293 if any([isinstance(new_state, ignored) for ignored in ignore_states]):
294 return new_state
295
296 if only_states and not any(
297 [isinstance(new_state, included) for included in only_states]
298 ):
299 return new_state
300
301 # 'import requests' is expensive time-wise, we should do this just-in-time to keep
302 # the 'import prefect' time low
303 import requests
304
305 form_data = slack_message_formatter(tracked_obj, new_state)
306 r = requests.post(webhook_url, json=form_data)
307 if not r.ok:
308 raise ValueError("Slack notification for {} failed".format(tracked_obj))
309 return new_state
310
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/prefect/utilities/notifications/notifications.py b/src/prefect/utilities/notifications/notifications.py
--- a/src/prefect/utilities/notifications/notifications.py
+++ b/src/prefect/utilities/notifications/notifications.py
@@ -122,7 +122,9 @@
def slack_message_formatter(
- tracked_obj: TrackedObjectType, state: "prefect.engine.state.State"
+ tracked_obj: TrackedObjectType,
+ state: "prefect.engine.state.State",
+ backend_info: bool = True,
) -> dict:
# see https://api.slack.com/docs/message-attachments
fields = []
@@ -148,7 +150,7 @@
"footer": "Prefect notification",
}
- if prefect.context.get("flow_run_id"):
+ if backend_info and prefect.context.get("flow_run_id"):
url = None
if isinstance(tracked_obj, prefect.Flow):
@@ -247,6 +249,7 @@
ignore_states: list = None,
only_states: list = None,
webhook_secret: str = None,
+ backend_info: bool = True,
) -> "prefect.engine.state.State":
"""
Slack state change handler; requires having the Prefect slack app installed. Works as a
@@ -267,6 +270,8 @@
classes
- webhook_secret (str, optional): the name of the Prefect Secret that stores your slack
webhook URL; defaults to `"SLACK_WEBHOOK_URL"`
+ - backend_info (bool, optional): Whether to supply slack notification with urls
+ pointing to backend pages; defaults to True
Returns:
- State: the `new_state` object that was provided
@@ -302,7 +307,7 @@
# the 'import prefect' time low
import requests
- form_data = slack_message_formatter(tracked_obj, new_state)
+ form_data = slack_message_formatter(tracked_obj, new_state, backend_info)
r = requests.post(webhook_url, json=form_data)
if not r.ok:
raise ValueError("Slack notification for {} failed".format(tracked_obj))
|
{"golden_diff": "diff --git a/src/prefect/utilities/notifications/notifications.py b/src/prefect/utilities/notifications/notifications.py\n--- a/src/prefect/utilities/notifications/notifications.py\n+++ b/src/prefect/utilities/notifications/notifications.py\n@@ -122,7 +122,9 @@\n \n \n def slack_message_formatter(\n- tracked_obj: TrackedObjectType, state: \"prefect.engine.state.State\"\n+ tracked_obj: TrackedObjectType,\n+ state: \"prefect.engine.state.State\",\n+ backend_info: bool = True,\n ) -> dict:\n # see https://api.slack.com/docs/message-attachments\n fields = []\n@@ -148,7 +150,7 @@\n \"footer\": \"Prefect notification\",\n }\n \n- if prefect.context.get(\"flow_run_id\"):\n+ if backend_info and prefect.context.get(\"flow_run_id\"):\n url = None\n \n if isinstance(tracked_obj, prefect.Flow):\n@@ -247,6 +249,7 @@\n ignore_states: list = None,\n only_states: list = None,\n webhook_secret: str = None,\n+ backend_info: bool = True,\n ) -> \"prefect.engine.state.State\":\n \"\"\"\n Slack state change handler; requires having the Prefect slack app installed. Works as a\n@@ -267,6 +270,8 @@\n classes\n - webhook_secret (str, optional): the name of the Prefect Secret that stores your slack\n webhook URL; defaults to `\"SLACK_WEBHOOK_URL\"`\n+ - backend_info (bool, optional): Whether to supply slack notification with urls\n+ pointing to backend pages; defaults to True\n \n Returns:\n - State: the `new_state` object that was provided\n@@ -302,7 +307,7 @@\n # the 'import prefect' time low\n import requests\n \n- form_data = slack_message_formatter(tracked_obj, new_state)\n+ form_data = slack_message_formatter(tracked_obj, new_state, backend_info)\n r = requests.post(webhook_url, json=form_data)\n if not r.ok:\n raise ValueError(\"Slack notification for {} failed\".format(tracked_obj))\n", "issue": "Setting `flow_run_id` in local runs breaks slack notifier\n#2868 made some additions to local runs so that targets/results could be templated properly both locally and in runs using a backend API. Setting `flow_run_id` on local runs here:\r\n\r\nhttps://github.com/PrefectHQ/prefect/blob/30c88b315c83748b114228703d42bfe3e7d6a763/src/prefect/core/flow.py#L964-L969\r\n\r\nbreaks the `slack_notifier` because it has an extra check for a `flow_run_id` in context.\r\n\r\nhttps://github.com/PrefectHQ/prefect/blob/30c88b315c83748b114228703d42bfe3e7d6a763/src/prefect/utilities/notifications/notifications.py#L151\r\n\r\nI think this should be resolved by making the backend functionality of the slack notifier opt-in instead of default when a flow_run_id is present.\n", "before_files": [{"content": "\"\"\"\nTools and utilities for notifications and callbacks.\n\nFor an in-depth guide to setting up your system for using Slack notifications, [please see our\ntutorial](/core/advanced_tutorials/slack-notifications.html).\n\"\"\"\nimport smtplib\nfrom email.header import Header\nfrom email.mime.multipart import MIMEMultipart\nfrom email.mime.text import MIMEText\nfrom typing import TYPE_CHECKING, Any, Callable, Union, cast\n\nfrom toolz import curry\n\nimport prefect\n\nif TYPE_CHECKING:\n import prefect.engine.state\n import prefect.client\n from prefect import Flow, Task # noqa\n\nTrackedObjectType = Union[\"Flow\", \"Task\"]\n\n__all__ = [\"callback_factory\", \"gmail_notifier\", \"slack_notifier\"]\n\n\ndef callback_factory(\n fn: Callable[[Any, \"prefect.engine.state.State\"], Any],\n check: Callable[[\"prefect.engine.state.State\"], bool],\n) -> Callable:\n \"\"\"\n Utility for generating state handlers that serve as callbacks, under arbitrary\n state-based checks.\n\n Args:\n - fn (Callable): a function with signature `fn(obj, state: State) -> None`\n that will be called anytime the associated state-check passes; in general, it is\n expected that this function will have side effects (e.g., sends an email). The\n first argument to this function is the `Task` or `Flow` it is attached to.\n - check (Callable): a function with signature `check(state: State) -> bool`\n that is used for determining when the callback function should be called\n\n Returns:\n - state_handler (Callable): a state handler function that can be attached to both Tasks\n and Flows\n\n Example:\n ```python\n from prefect import Task, Flow\n from prefect.utilities.notifications import callback_factory\n\n fn = lambda obj, state: print(state)\n check = lambda state: state.is_successful()\n callback = callback_factory(fn, check)\n\n t = Task(state_handlers=[callback])\n f = Flow(tasks=[t], state_handlers=[callback])\n f.run()\n # prints:\n # Success(\"Task run succeeded.\")\n # Success(\"All reference tasks succeeded.\")\n ```\n \"\"\"\n\n def state_handler(\n obj: Any,\n old_state: \"prefect.engine.state.State\",\n new_state: \"prefect.engine.state.State\",\n ) -> \"prefect.engine.state.State\":\n if check(new_state) is True:\n fn(obj, new_state)\n return new_state\n\n return state_handler\n\n\ndef email_message_formatter(\n tracked_obj: TrackedObjectType, state: \"prefect.engine.state.State\", email_to: str\n) -> str:\n if isinstance(state.result, Exception):\n msg = \"<pre>{}</pre>\".format(repr(state.result))\n else:\n msg = '\"{}\"'.format(state.message)\n\n html = \"\"\"\n <html>\n <head></head>\n <body>\n <table align=\"left\" border=\"0\" cellpadding=\"2px\" cellspacing=\"2px\">\n <tr>\n <td style=\"border-left: 2px solid {color};\">\n <img src=\"https://emoji.slack-edge.com/TAN3D79AL/prefect/2497370f58500a5a.png\">\n </td>\n <td style=\"border-left: 2px solid {color}; padding-left: 6px;\">\n {text}\n </td>\n </tr>\n </table>\n </body>\n </html>\n \"\"\"\n color = state.color\n text = \"\"\"\n <pre>{name}</pre> is now in a <font color=\"{color}\"><b>{state}</b></font> state\n <br><br>\n Message: {msg}\n \"\"\".format(\n name=tracked_obj.name, color=state.color, state=type(state).__name__, msg=msg\n )\n\n contents = MIMEMultipart(\"alternative\")\n contents.attach(MIMEText(text, \"plain\"))\n contents.attach(MIMEText(html.format(color=color, text=text), \"html\"))\n\n contents[\"Subject\"] = Header(\n \"Prefect state change notification for {}\".format(tracked_obj.name), \"UTF-8\"\n )\n contents[\"From\"] = \"[email protected]\"\n contents[\"To\"] = email_to\n\n return contents.as_string()\n\n\ndef slack_message_formatter(\n tracked_obj: TrackedObjectType, state: \"prefect.engine.state.State\"\n) -> dict:\n # see https://api.slack.com/docs/message-attachments\n fields = []\n if isinstance(state.result, Exception):\n value = \"```{}```\".format(repr(state.result))\n else:\n value = cast(str, state.message)\n if value is not None:\n fields.append({\"title\": \"Message\", \"value\": value, \"short\": False})\n\n notification_payload = {\n \"fallback\": \"State change notification\",\n \"color\": state.color,\n \"author_name\": \"Prefect\",\n \"author_link\": \"https://www.prefect.io/\",\n \"author_icon\": \"https://emoji.slack-edge.com/TAN3D79AL/prefect/2497370f58500a5a.png\",\n \"title\": type(state).__name__,\n \"fields\": fields,\n # \"title_link\": \"https://www.prefect.io/\",\n \"text\": \"{0} is now in a {1} state\".format(\n tracked_obj.name, type(state).__name__\n ),\n \"footer\": \"Prefect notification\",\n }\n\n if prefect.context.get(\"flow_run_id\"):\n url = None\n\n if isinstance(tracked_obj, prefect.Flow):\n url = prefect.client.Client().get_cloud_url(\n \"flow-run\", prefect.context[\"flow_run_id\"], as_user=False\n )\n elif isinstance(tracked_obj, prefect.Task):\n url = prefect.client.Client().get_cloud_url(\n \"task-run\", prefect.context.get(\"task_run_id\", \"\"), as_user=False\n )\n\n if url:\n notification_payload.update(title_link=url)\n\n data = {\"attachments\": [notification_payload]}\n return data\n\n\n@curry\ndef gmail_notifier(\n tracked_obj: TrackedObjectType,\n old_state: \"prefect.engine.state.State\",\n new_state: \"prefect.engine.state.State\",\n ignore_states: list = None,\n only_states: list = None,\n) -> \"prefect.engine.state.State\":\n \"\"\"\n Email state change handler - configured to work solely with Gmail; works as a standalone\n state handler, or can be called from within a custom state handler. This function is\n curried meaning that it can be called multiple times to partially bind any keyword\n arguments (see example below).\n\n The username and password Gmail credentials will be taken from your `\"EMAIL_USERNAME\"` and\n `\"EMAIL_PASSWORD\"` secrets, respectively; note the username will also serve as the\n destination email address for the notification.\n\n Args:\n - tracked_obj (Task or Flow): Task or Flow object the handler is registered with\n - old_state (State): previous state of tracked object\n - new_state (State): new state of tracked object\n - ignore_states ([State], optional): list of `State` classes to ignore, e.g.,\n `[Running, Scheduled]`. If `new_state` is an instance of one of the passed states,\n no notification will occur.\n - only_states ([State], optional): similar to `ignore_states`, but instead _only_\n notifies you if the Task / Flow is in a state from the provided list of `State`\n classes\n\n Returns:\n - State: the `new_state` object that was provided\n\n Raises:\n - ValueError: if the email notification fails for any reason\n\n Example:\n ```python\n from prefect import task\n from prefect.utilities.notifications import gmail_notifier\n\n @task(state_handlers=[gmail_notifier(ignore_states=[Running])]) # uses currying\n def add(x, y):\n return x + y\n ```\n \"\"\"\n username = cast(str, prefect.client.Secret(\"EMAIL_USERNAME\").get())\n password = cast(str, prefect.client.Secret(\"EMAIL_PASSWORD\").get())\n ignore_states = ignore_states or []\n only_states = only_states or []\n\n if any([isinstance(new_state, ignored) for ignored in ignore_states]):\n return new_state\n\n if only_states and not any(\n [isinstance(new_state, included) for included in only_states]\n ):\n return new_state\n\n body = email_message_formatter(tracked_obj, new_state, username)\n\n server = smtplib.SMTP_SSL(\"smtp.gmail.com\", 465)\n server.login(username, password)\n try:\n server.sendmail(\"[email protected]\", username, body)\n except Exception:\n raise ValueError(\"Email notification for {} failed\".format(tracked_obj))\n finally:\n server.quit()\n\n return new_state\n\n\n@curry\ndef slack_notifier(\n tracked_obj: TrackedObjectType,\n old_state: \"prefect.engine.state.State\",\n new_state: \"prefect.engine.state.State\",\n ignore_states: list = None,\n only_states: list = None,\n webhook_secret: str = None,\n) -> \"prefect.engine.state.State\":\n \"\"\"\n Slack state change handler; requires having the Prefect slack app installed. Works as a\n standalone state handler, or can be called from within a custom state handler. This\n function is curried meaning that it can be called multiple times to partially bind any\n keyword arguments (see example below).\n\n Args:\n - tracked_obj (Task or Flow): Task or Flow object the handler is\n registered with\n - old_state (State): previous state of tracked object\n - new_state (State): new state of tracked object\n - ignore_states ([State], optional): list of `State` classes to ignore, e.g.,\n `[Running, Scheduled]`. If `new_state` is an instance of one of the passed states,\n no notification will occur.\n - only_states ([State], optional): similar to `ignore_states`, but instead _only_\n notifies you if the Task / Flow is in a state from the provided list of `State`\n classes\n - webhook_secret (str, optional): the name of the Prefect Secret that stores your slack\n webhook URL; defaults to `\"SLACK_WEBHOOK_URL\"`\n\n Returns:\n - State: the `new_state` object that was provided\n\n Raises:\n - ValueError: if the slack notification fails for any reason\n\n Example:\n ```python\n from prefect import task\n from prefect.utilities.notifications import slack_notifier\n\n @task(state_handlers=[slack_notifier(ignore_states=[Running])]) # uses currying\n def add(x, y):\n return x + y\n ```\n \"\"\"\n webhook_url = cast(\n str, prefect.client.Secret(webhook_secret or \"SLACK_WEBHOOK_URL\").get()\n )\n ignore_states = ignore_states or []\n only_states = only_states or []\n\n if any([isinstance(new_state, ignored) for ignored in ignore_states]):\n return new_state\n\n if only_states and not any(\n [isinstance(new_state, included) for included in only_states]\n ):\n return new_state\n\n # 'import requests' is expensive time-wise, we should do this just-in-time to keep\n # the 'import prefect' time low\n import requests\n\n form_data = slack_message_formatter(tracked_obj, new_state)\n r = requests.post(webhook_url, json=form_data)\n if not r.ok:\n raise ValueError(\"Slack notification for {} failed\".format(tracked_obj))\n return new_state\n", "path": "src/prefect/utilities/notifications/notifications.py"}], "after_files": [{"content": "\"\"\"\nTools and utilities for notifications and callbacks.\n\nFor an in-depth guide to setting up your system for using Slack notifications, [please see our\ntutorial](/core/advanced_tutorials/slack-notifications.html).\n\"\"\"\nimport smtplib\nfrom email.header import Header\nfrom email.mime.multipart import MIMEMultipart\nfrom email.mime.text import MIMEText\nfrom typing import TYPE_CHECKING, Any, Callable, Union, cast\n\nfrom toolz import curry\n\nimport prefect\n\nif TYPE_CHECKING:\n import prefect.engine.state\n import prefect.client\n from prefect import Flow, Task # noqa\n\nTrackedObjectType = Union[\"Flow\", \"Task\"]\n\n__all__ = [\"callback_factory\", \"gmail_notifier\", \"slack_notifier\"]\n\n\ndef callback_factory(\n fn: Callable[[Any, \"prefect.engine.state.State\"], Any],\n check: Callable[[\"prefect.engine.state.State\"], bool],\n) -> Callable:\n \"\"\"\n Utility for generating state handlers that serve as callbacks, under arbitrary\n state-based checks.\n\n Args:\n - fn (Callable): a function with signature `fn(obj, state: State) -> None`\n that will be called anytime the associated state-check passes; in general, it is\n expected that this function will have side effects (e.g., sends an email). The\n first argument to this function is the `Task` or `Flow` it is attached to.\n - check (Callable): a function with signature `check(state: State) -> bool`\n that is used for determining when the callback function should be called\n\n Returns:\n - state_handler (Callable): a state handler function that can be attached to both Tasks\n and Flows\n\n Example:\n ```python\n from prefect import Task, Flow\n from prefect.utilities.notifications import callback_factory\n\n fn = lambda obj, state: print(state)\n check = lambda state: state.is_successful()\n callback = callback_factory(fn, check)\n\n t = Task(state_handlers=[callback])\n f = Flow(tasks=[t], state_handlers=[callback])\n f.run()\n # prints:\n # Success(\"Task run succeeded.\")\n # Success(\"All reference tasks succeeded.\")\n ```\n \"\"\"\n\n def state_handler(\n obj: Any,\n old_state: \"prefect.engine.state.State\",\n new_state: \"prefect.engine.state.State\",\n ) -> \"prefect.engine.state.State\":\n if check(new_state) is True:\n fn(obj, new_state)\n return new_state\n\n return state_handler\n\n\ndef email_message_formatter(\n tracked_obj: TrackedObjectType, state: \"prefect.engine.state.State\", email_to: str\n) -> str:\n if isinstance(state.result, Exception):\n msg = \"<pre>{}</pre>\".format(repr(state.result))\n else:\n msg = '\"{}\"'.format(state.message)\n\n html = \"\"\"\n <html>\n <head></head>\n <body>\n <table align=\"left\" border=\"0\" cellpadding=\"2px\" cellspacing=\"2px\">\n <tr>\n <td style=\"border-left: 2px solid {color};\">\n <img src=\"https://emoji.slack-edge.com/TAN3D79AL/prefect/2497370f58500a5a.png\">\n </td>\n <td style=\"border-left: 2px solid {color}; padding-left: 6px;\">\n {text}\n </td>\n </tr>\n </table>\n </body>\n </html>\n \"\"\"\n color = state.color\n text = \"\"\"\n <pre>{name}</pre> is now in a <font color=\"{color}\"><b>{state}</b></font> state\n <br><br>\n Message: {msg}\n \"\"\".format(\n name=tracked_obj.name, color=state.color, state=type(state).__name__, msg=msg\n )\n\n contents = MIMEMultipart(\"alternative\")\n contents.attach(MIMEText(text, \"plain\"))\n contents.attach(MIMEText(html.format(color=color, text=text), \"html\"))\n\n contents[\"Subject\"] = Header(\n \"Prefect state change notification for {}\".format(tracked_obj.name), \"UTF-8\"\n )\n contents[\"From\"] = \"[email protected]\"\n contents[\"To\"] = email_to\n\n return contents.as_string()\n\n\ndef slack_message_formatter(\n tracked_obj: TrackedObjectType,\n state: \"prefect.engine.state.State\",\n backend_info: bool = True,\n) -> dict:\n # see https://api.slack.com/docs/message-attachments\n fields = []\n if isinstance(state.result, Exception):\n value = \"```{}```\".format(repr(state.result))\n else:\n value = cast(str, state.message)\n if value is not None:\n fields.append({\"title\": \"Message\", \"value\": value, \"short\": False})\n\n notification_payload = {\n \"fallback\": \"State change notification\",\n \"color\": state.color,\n \"author_name\": \"Prefect\",\n \"author_link\": \"https://www.prefect.io/\",\n \"author_icon\": \"https://emoji.slack-edge.com/TAN3D79AL/prefect/2497370f58500a5a.png\",\n \"title\": type(state).__name__,\n \"fields\": fields,\n # \"title_link\": \"https://www.prefect.io/\",\n \"text\": \"{0} is now in a {1} state\".format(\n tracked_obj.name, type(state).__name__\n ),\n \"footer\": \"Prefect notification\",\n }\n\n if backend_info and prefect.context.get(\"flow_run_id\"):\n url = None\n\n if isinstance(tracked_obj, prefect.Flow):\n url = prefect.client.Client().get_cloud_url(\n \"flow-run\", prefect.context[\"flow_run_id\"], as_user=False\n )\n elif isinstance(tracked_obj, prefect.Task):\n url = prefect.client.Client().get_cloud_url(\n \"task-run\", prefect.context.get(\"task_run_id\", \"\"), as_user=False\n )\n\n if url:\n notification_payload.update(title_link=url)\n\n data = {\"attachments\": [notification_payload]}\n return data\n\n\n@curry\ndef gmail_notifier(\n tracked_obj: TrackedObjectType,\n old_state: \"prefect.engine.state.State\",\n new_state: \"prefect.engine.state.State\",\n ignore_states: list = None,\n only_states: list = None,\n) -> \"prefect.engine.state.State\":\n \"\"\"\n Email state change handler - configured to work solely with Gmail; works as a standalone\n state handler, or can be called from within a custom state handler. This function is\n curried meaning that it can be called multiple times to partially bind any keyword\n arguments (see example below).\n\n The username and password Gmail credentials will be taken from your `\"EMAIL_USERNAME\"` and\n `\"EMAIL_PASSWORD\"` secrets, respectively; note the username will also serve as the\n destination email address for the notification.\n\n Args:\n - tracked_obj (Task or Flow): Task or Flow object the handler is registered with\n - old_state (State): previous state of tracked object\n - new_state (State): new state of tracked object\n - ignore_states ([State], optional): list of `State` classes to ignore, e.g.,\n `[Running, Scheduled]`. If `new_state` is an instance of one of the passed states,\n no notification will occur.\n - only_states ([State], optional): similar to `ignore_states`, but instead _only_\n notifies you if the Task / Flow is in a state from the provided list of `State`\n classes\n\n Returns:\n - State: the `new_state` object that was provided\n\n Raises:\n - ValueError: if the email notification fails for any reason\n\n Example:\n ```python\n from prefect import task\n from prefect.utilities.notifications import gmail_notifier\n\n @task(state_handlers=[gmail_notifier(ignore_states=[Running])]) # uses currying\n def add(x, y):\n return x + y\n ```\n \"\"\"\n username = cast(str, prefect.client.Secret(\"EMAIL_USERNAME\").get())\n password = cast(str, prefect.client.Secret(\"EMAIL_PASSWORD\").get())\n ignore_states = ignore_states or []\n only_states = only_states or []\n\n if any([isinstance(new_state, ignored) for ignored in ignore_states]):\n return new_state\n\n if only_states and not any(\n [isinstance(new_state, included) for included in only_states]\n ):\n return new_state\n\n body = email_message_formatter(tracked_obj, new_state, username)\n\n server = smtplib.SMTP_SSL(\"smtp.gmail.com\", 465)\n server.login(username, password)\n try:\n server.sendmail(\"[email protected]\", username, body)\n except Exception:\n raise ValueError(\"Email notification for {} failed\".format(tracked_obj))\n finally:\n server.quit()\n\n return new_state\n\n\n@curry\ndef slack_notifier(\n tracked_obj: TrackedObjectType,\n old_state: \"prefect.engine.state.State\",\n new_state: \"prefect.engine.state.State\",\n ignore_states: list = None,\n only_states: list = None,\n webhook_secret: str = None,\n backend_info: bool = True,\n) -> \"prefect.engine.state.State\":\n \"\"\"\n Slack state change handler; requires having the Prefect slack app installed. Works as a\n standalone state handler, or can be called from within a custom state handler. This\n function is curried meaning that it can be called multiple times to partially bind any\n keyword arguments (see example below).\n\n Args:\n - tracked_obj (Task or Flow): Task or Flow object the handler is\n registered with\n - old_state (State): previous state of tracked object\n - new_state (State): new state of tracked object\n - ignore_states ([State], optional): list of `State` classes to ignore, e.g.,\n `[Running, Scheduled]`. If `new_state` is an instance of one of the passed states,\n no notification will occur.\n - only_states ([State], optional): similar to `ignore_states`, but instead _only_\n notifies you if the Task / Flow is in a state from the provided list of `State`\n classes\n - webhook_secret (str, optional): the name of the Prefect Secret that stores your slack\n webhook URL; defaults to `\"SLACK_WEBHOOK_URL\"`\n - backend_info (bool, optional): Whether to supply slack notification with urls\n pointing to backend pages; defaults to True\n\n Returns:\n - State: the `new_state` object that was provided\n\n Raises:\n - ValueError: if the slack notification fails for any reason\n\n Example:\n ```python\n from prefect import task\n from prefect.utilities.notifications import slack_notifier\n\n @task(state_handlers=[slack_notifier(ignore_states=[Running])]) # uses currying\n def add(x, y):\n return x + y\n ```\n \"\"\"\n webhook_url = cast(\n str, prefect.client.Secret(webhook_secret or \"SLACK_WEBHOOK_URL\").get()\n )\n ignore_states = ignore_states or []\n only_states = only_states or []\n\n if any([isinstance(new_state, ignored) for ignored in ignore_states]):\n return new_state\n\n if only_states and not any(\n [isinstance(new_state, included) for included in only_states]\n ):\n return new_state\n\n # 'import requests' is expensive time-wise, we should do this just-in-time to keep\n # the 'import prefect' time low\n import requests\n\n form_data = slack_message_formatter(tracked_obj, new_state, backend_info)\n r = requests.post(webhook_url, json=form_data)\n if not r.ok:\n raise ValueError(\"Slack notification for {} failed\".format(tracked_obj))\n return new_state\n", "path": "src/prefect/utilities/notifications/notifications.py"}]}
| 3,862 | 482 |
gh_patches_debug_12757
|
rasdani/github-patches
|
git_diff
|
dmlc__dgl-946
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Bug] DGLSubGraph.map_to_subgraph_nid error in v0.3
Hello,
The method map_to_subgraph_nid from DGLSubGraph seems to malfunction.
The following example code:
`import dgl`
`g=dgl.DGLGraph()`
`g.add_nodes(10)`
`h=g.subgraph([0,1,2,5,8])`
`h.map_to_subgraph_nid([0,8,2])`
produces the following error chain:
`File “.../python3.6/dist-packages/dgl/subgraph.py”, line 139, in map_to_subgraph_nid`
` return map_to_subgraph_nid(self._graph,utils.toindex(parent_vids)).tousertensor()`
`File “.../python3.6/dist-packages/dgl/graph_index.py”, line 1137, in map_to_subgraph_nid`
` return utils.toindex(_CAPI_DGLMapSubgraphNID(subgraph.induced_nodes.todgltensor(),`
`AttributeError: ‘GraphIndex’ object has no attribute ‘induced_nodes’
`
This bug does not occur in version 0.2 . The main difference I have found was that in v0.2 (in my code), `type(h._graph)` was
`<class ‘dgl.graph_index.SubGraphIndex’>`, instead of
`<class ‘dgl.graph_index.GraphIndex’>` as it is in v0.3 .
Am I using the library incorrectly? Is this behavior intended? If so, is there a replacement for the method?
Or is it a bug?
Thanks!
[Bug] DGLSubGraph.map_to_subgraph_nid error in v0.3
Hello,
The method map_to_subgraph_nid from DGLSubGraph seems to malfunction.
The following example code:
`import dgl`
`g=dgl.DGLGraph()`
`g.add_nodes(10)`
`h=g.subgraph([0,1,2,5,8])`
`h.map_to_subgraph_nid([0,8,2])`
produces the following error chain:
`File “.../python3.6/dist-packages/dgl/subgraph.py”, line 139, in map_to_subgraph_nid`
` return map_to_subgraph_nid(self._graph,utils.toindex(parent_vids)).tousertensor()`
`File “.../python3.6/dist-packages/dgl/graph_index.py”, line 1137, in map_to_subgraph_nid`
` return utils.toindex(_CAPI_DGLMapSubgraphNID(subgraph.induced_nodes.todgltensor(),`
`AttributeError: ‘GraphIndex’ object has no attribute ‘induced_nodes’
`
This bug does not occur in version 0.2 . The main difference I have found was that in v0.2 (in my code), `type(h._graph)` was
`<class ‘dgl.graph_index.SubGraphIndex’>`, instead of
`<class ‘dgl.graph_index.GraphIndex’>` as it is in v0.3 .
Am I using the library incorrectly? Is this behavior intended? If so, is there a replacement for the method?
Or is it a bug?
Thanks!
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `python/dgl/subgraph.py`
Content:
```
1 """Class for subgraph data structure."""
2 from __future__ import absolute_import
3
4 from .frame import Frame, FrameRef
5 from .graph import DGLGraph
6 from . import utils
7 from .base import DGLError
8 from .graph_index import map_to_subgraph_nid
9
10 class DGLSubGraph(DGLGraph):
11 """The subgraph class.
12
13 There are two subgraph modes: shared and non-shared.
14
15 For the "non-shared" mode, the user needs to explicitly call
16 ``copy_from_parent`` to copy node/edge features from its parent graph.
17 * If the user tries to get node/edge features before ``copy_from_parent``,
18 s/he will get nothing.
19 * If the subgraph already has its own node/edge features, ``copy_from_parent``
20 will override them.
21 * Any update on the subgraph's node/edge features will not be seen
22 by the parent graph. As such, the memory consumption is of the order
23 of the subgraph size.
24 * To write the subgraph's node/edge features back to parent graph. There are two options:
25 (1) Use ``copy_to_parent`` API to write node/edge features back.
26 (2) [TODO] Use ``dgl.merge`` to merge multiple subgraphs back to one parent.
27
28 The "shared" mode is currently not supported.
29
30 The subgraph is read-only on structure; graph mutation is not allowed.
31
32 Parameters
33 ----------
34 parent : DGLGraph
35 The parent graph
36 sgi : SubgraphIndex
37 Internal subgraph data structure.
38 shared : bool, optional
39 Whether the subgraph shares node/edge features with the parent graph.
40 """
41 def __init__(self, parent, sgi, shared=False):
42 super(DGLSubGraph, self).__init__(graph_data=sgi.graph,
43 readonly=True)
44 if shared:
45 raise DGLError('Shared mode is not yet supported.')
46 self._parent = parent
47 self._parent_nid = sgi.induced_nodes
48 self._parent_eid = sgi.induced_edges
49
50 # override APIs
51 def add_nodes(self, num, data=None):
52 """Add nodes. Disabled because subgraph is read-only."""
53 raise DGLError('Readonly graph. Mutation is not allowed.')
54
55 def add_edge(self, u, v, data=None):
56 """Add one edge. Disabled because subgraph is read-only."""
57 raise DGLError('Readonly graph. Mutation is not allowed.')
58
59 def add_edges(self, u, v, data=None):
60 """Add many edges. Disabled because subgraph is read-only."""
61 raise DGLError('Readonly graph. Mutation is not allowed.')
62
63 @property
64 def parent_nid(self):
65 """Get the parent node ids.
66
67 The returned tensor can be used as a map from the node id
68 in this subgraph to the node id in the parent graph.
69
70 Returns
71 -------
72 Tensor
73 The parent node id array.
74 """
75 return self._parent_nid.tousertensor()
76
77 def _get_parent_eid(self):
78 # The parent eid might be lazily evaluated and thus may not
79 # be an index. Instead, it's a lambda function that returns
80 # an index.
81 if isinstance(self._parent_eid, utils.Index):
82 return self._parent_eid
83 else:
84 return self._parent_eid()
85
86 @property
87 def parent_eid(self):
88 """Get the parent edge ids.
89
90 The returned tensor can be used as a map from the edge id
91 in this subgraph to the edge id in the parent graph.
92
93 Returns
94 -------
95 Tensor
96 The parent edge id array.
97 """
98 return self._get_parent_eid().tousertensor()
99
100 def copy_to_parent(self, inplace=False):
101 """Write node/edge features to the parent graph.
102
103 Parameters
104 ----------
105 inplace : bool
106 If true, use inplace write (no gradient but faster)
107 """
108 self._parent._node_frame.update_rows(
109 self._parent_nid, self._node_frame, inplace=inplace)
110 if self._parent._edge_frame.num_rows != 0:
111 self._parent._edge_frame.update_rows(
112 self._get_parent_eid(), self._edge_frame, inplace=inplace)
113
114 def copy_from_parent(self):
115 """Copy node/edge features from the parent graph.
116
117 All old features will be removed.
118 """
119 if self._parent._node_frame.num_rows != 0 and self._parent._node_frame.num_columns != 0:
120 self._node_frame = FrameRef(Frame(
121 self._parent._node_frame[self._parent_nid]))
122 if self._parent._edge_frame.num_rows != 0 and self._parent._edge_frame.num_columns != 0:
123 self._edge_frame = FrameRef(Frame(
124 self._parent._edge_frame[self._get_parent_eid()]))
125
126 def map_to_subgraph_nid(self, parent_vids):
127 """Map the node Ids in the parent graph to the node Ids in the subgraph.
128
129 Parameters
130 ----------
131 parent_vids : list, tensor
132 The node ID array in the parent graph.
133
134 Returns
135 -------
136 tensor
137 The node ID array in the subgraph.
138 """
139 return map_to_subgraph_nid(self._graph, utils.toindex(parent_vids)).tousertensor()
140
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/python/dgl/subgraph.py b/python/dgl/subgraph.py
--- a/python/dgl/subgraph.py
+++ b/python/dgl/subgraph.py
@@ -46,6 +46,7 @@
self._parent = parent
self._parent_nid = sgi.induced_nodes
self._parent_eid = sgi.induced_edges
+ self._subgraph_index = sgi
# override APIs
def add_nodes(self, num, data=None):
@@ -136,4 +137,5 @@
tensor
The node ID array in the subgraph.
"""
- return map_to_subgraph_nid(self._graph, utils.toindex(parent_vids)).tousertensor()
+ v = map_to_subgraph_nid(self._subgraph_index, utils.toindex(parent_vids))
+ return v.tousertensor()
|
{"golden_diff": "diff --git a/python/dgl/subgraph.py b/python/dgl/subgraph.py\n--- a/python/dgl/subgraph.py\n+++ b/python/dgl/subgraph.py\n@@ -46,6 +46,7 @@\n self._parent = parent\n self._parent_nid = sgi.induced_nodes\n self._parent_eid = sgi.induced_edges\n+ self._subgraph_index = sgi\n \n # override APIs\n def add_nodes(self, num, data=None):\n@@ -136,4 +137,5 @@\n tensor\n The node ID array in the subgraph.\n \"\"\"\n- return map_to_subgraph_nid(self._graph, utils.toindex(parent_vids)).tousertensor()\n+ v = map_to_subgraph_nid(self._subgraph_index, utils.toindex(parent_vids))\n+ return v.tousertensor()\n", "issue": "[Bug] DGLSubGraph.map_to_subgraph_nid error in v0.3\nHello,\r\n\r\nThe method map_to_subgraph_nid from DGLSubGraph seems to malfunction. \r\n\r\nThe following example code:\r\n`import dgl`\r\n`g=dgl.DGLGraph()`\r\n`g.add_nodes(10)`\r\n`h=g.subgraph([0,1,2,5,8])`\r\n`h.map_to_subgraph_nid([0,8,2])`\r\n\r\n\r\nproduces the following error chain: \r\n`File \u201c.../python3.6/dist-packages/dgl/subgraph.py\u201d, line 139, in map_to_subgraph_nid`\r\n` return map_to_subgraph_nid(self._graph,utils.toindex(parent_vids)).tousertensor()`\r\n`File \u201c.../python3.6/dist-packages/dgl/graph_index.py\u201d, line 1137, in map_to_subgraph_nid`\r\n` return utils.toindex(_CAPI_DGLMapSubgraphNID(subgraph.induced_nodes.todgltensor(),`\r\n`AttributeError: \u2018GraphIndex\u2019 object has no attribute \u2018induced_nodes\u2019\r\n`\r\n\r\nThis bug does not occur in version 0.2 . The main difference I have found was that in v0.2 (in my code), `type(h._graph)` was \r\n`<class \u2018dgl.graph_index.SubGraphIndex\u2019>`, instead of \r\n`<class \u2018dgl.graph_index.GraphIndex\u2019>` as it is in v0.3 .\r\n\r\nAm I using the library incorrectly? Is this behavior intended? If so, is there a replacement for the method?\r\nOr is it a bug?\r\n\r\nThanks!\n[Bug] DGLSubGraph.map_to_subgraph_nid error in v0.3\nHello,\r\n\r\nThe method map_to_subgraph_nid from DGLSubGraph seems to malfunction. \r\n\r\nThe following example code:\r\n`import dgl`\r\n`g=dgl.DGLGraph()`\r\n`g.add_nodes(10)`\r\n`h=g.subgraph([0,1,2,5,8])`\r\n`h.map_to_subgraph_nid([0,8,2])`\r\n\r\n\r\nproduces the following error chain: \r\n`File \u201c.../python3.6/dist-packages/dgl/subgraph.py\u201d, line 139, in map_to_subgraph_nid`\r\n` return map_to_subgraph_nid(self._graph,utils.toindex(parent_vids)).tousertensor()`\r\n`File \u201c.../python3.6/dist-packages/dgl/graph_index.py\u201d, line 1137, in map_to_subgraph_nid`\r\n` return utils.toindex(_CAPI_DGLMapSubgraphNID(subgraph.induced_nodes.todgltensor(),`\r\n`AttributeError: \u2018GraphIndex\u2019 object has no attribute \u2018induced_nodes\u2019\r\n`\r\n\r\nThis bug does not occur in version 0.2 . The main difference I have found was that in v0.2 (in my code), `type(h._graph)` was \r\n`<class \u2018dgl.graph_index.SubGraphIndex\u2019>`, instead of \r\n`<class \u2018dgl.graph_index.GraphIndex\u2019>` as it is in v0.3 .\r\n\r\nAm I using the library incorrectly? Is this behavior intended? If so, is there a replacement for the method?\r\nOr is it a bug?\r\n\r\nThanks!\n", "before_files": [{"content": "\"\"\"Class for subgraph data structure.\"\"\"\nfrom __future__ import absolute_import\n\nfrom .frame import Frame, FrameRef\nfrom .graph import DGLGraph\nfrom . import utils\nfrom .base import DGLError\nfrom .graph_index import map_to_subgraph_nid\n\nclass DGLSubGraph(DGLGraph):\n \"\"\"The subgraph class.\n\n There are two subgraph modes: shared and non-shared.\n\n For the \"non-shared\" mode, the user needs to explicitly call\n ``copy_from_parent`` to copy node/edge features from its parent graph.\n * If the user tries to get node/edge features before ``copy_from_parent``,\n s/he will get nothing.\n * If the subgraph already has its own node/edge features, ``copy_from_parent``\n will override them.\n * Any update on the subgraph's node/edge features will not be seen\n by the parent graph. As such, the memory consumption is of the order\n of the subgraph size.\n * To write the subgraph's node/edge features back to parent graph. There are two options:\n (1) Use ``copy_to_parent`` API to write node/edge features back.\n (2) [TODO] Use ``dgl.merge`` to merge multiple subgraphs back to one parent.\n\n The \"shared\" mode is currently not supported.\n\n The subgraph is read-only on structure; graph mutation is not allowed.\n\n Parameters\n ----------\n parent : DGLGraph\n The parent graph\n sgi : SubgraphIndex\n Internal subgraph data structure.\n shared : bool, optional\n Whether the subgraph shares node/edge features with the parent graph.\n \"\"\"\n def __init__(self, parent, sgi, shared=False):\n super(DGLSubGraph, self).__init__(graph_data=sgi.graph,\n readonly=True)\n if shared:\n raise DGLError('Shared mode is not yet supported.')\n self._parent = parent\n self._parent_nid = sgi.induced_nodes\n self._parent_eid = sgi.induced_edges\n\n # override APIs\n def add_nodes(self, num, data=None):\n \"\"\"Add nodes. Disabled because subgraph is read-only.\"\"\"\n raise DGLError('Readonly graph. Mutation is not allowed.')\n\n def add_edge(self, u, v, data=None):\n \"\"\"Add one edge. Disabled because subgraph is read-only.\"\"\"\n raise DGLError('Readonly graph. Mutation is not allowed.')\n\n def add_edges(self, u, v, data=None):\n \"\"\"Add many edges. Disabled because subgraph is read-only.\"\"\"\n raise DGLError('Readonly graph. Mutation is not allowed.')\n\n @property\n def parent_nid(self):\n \"\"\"Get the parent node ids.\n\n The returned tensor can be used as a map from the node id\n in this subgraph to the node id in the parent graph.\n\n Returns\n -------\n Tensor\n The parent node id array.\n \"\"\"\n return self._parent_nid.tousertensor()\n\n def _get_parent_eid(self):\n # The parent eid might be lazily evaluated and thus may not\n # be an index. Instead, it's a lambda function that returns\n # an index.\n if isinstance(self._parent_eid, utils.Index):\n return self._parent_eid\n else:\n return self._parent_eid()\n\n @property\n def parent_eid(self):\n \"\"\"Get the parent edge ids.\n\n The returned tensor can be used as a map from the edge id\n in this subgraph to the edge id in the parent graph.\n\n Returns\n -------\n Tensor\n The parent edge id array.\n \"\"\"\n return self._get_parent_eid().tousertensor()\n\n def copy_to_parent(self, inplace=False):\n \"\"\"Write node/edge features to the parent graph.\n\n Parameters\n ----------\n inplace : bool\n If true, use inplace write (no gradient but faster)\n \"\"\"\n self._parent._node_frame.update_rows(\n self._parent_nid, self._node_frame, inplace=inplace)\n if self._parent._edge_frame.num_rows != 0:\n self._parent._edge_frame.update_rows(\n self._get_parent_eid(), self._edge_frame, inplace=inplace)\n\n def copy_from_parent(self):\n \"\"\"Copy node/edge features from the parent graph.\n\n All old features will be removed.\n \"\"\"\n if self._parent._node_frame.num_rows != 0 and self._parent._node_frame.num_columns != 0:\n self._node_frame = FrameRef(Frame(\n self._parent._node_frame[self._parent_nid]))\n if self._parent._edge_frame.num_rows != 0 and self._parent._edge_frame.num_columns != 0:\n self._edge_frame = FrameRef(Frame(\n self._parent._edge_frame[self._get_parent_eid()]))\n\n def map_to_subgraph_nid(self, parent_vids):\n \"\"\"Map the node Ids in the parent graph to the node Ids in the subgraph.\n\n Parameters\n ----------\n parent_vids : list, tensor\n The node ID array in the parent graph.\n\n Returns\n -------\n tensor\n The node ID array in the subgraph.\n \"\"\"\n return map_to_subgraph_nid(self._graph, utils.toindex(parent_vids)).tousertensor()\n", "path": "python/dgl/subgraph.py"}], "after_files": [{"content": "\"\"\"Class for subgraph data structure.\"\"\"\nfrom __future__ import absolute_import\n\nfrom .frame import Frame, FrameRef\nfrom .graph import DGLGraph\nfrom . import utils\nfrom .base import DGLError\nfrom .graph_index import map_to_subgraph_nid\n\nclass DGLSubGraph(DGLGraph):\n \"\"\"The subgraph class.\n\n There are two subgraph modes: shared and non-shared.\n\n For the \"non-shared\" mode, the user needs to explicitly call\n ``copy_from_parent`` to copy node/edge features from its parent graph.\n * If the user tries to get node/edge features before ``copy_from_parent``,\n s/he will get nothing.\n * If the subgraph already has its own node/edge features, ``copy_from_parent``\n will override them.\n * Any update on the subgraph's node/edge features will not be seen\n by the parent graph. As such, the memory consumption is of the order\n of the subgraph size.\n * To write the subgraph's node/edge features back to parent graph. There are two options:\n (1) Use ``copy_to_parent`` API to write node/edge features back.\n (2) [TODO] Use ``dgl.merge`` to merge multiple subgraphs back to one parent.\n\n The \"shared\" mode is currently not supported.\n\n The subgraph is read-only on structure; graph mutation is not allowed.\n\n Parameters\n ----------\n parent : DGLGraph\n The parent graph\n sgi : SubgraphIndex\n Internal subgraph data structure.\n shared : bool, optional\n Whether the subgraph shares node/edge features with the parent graph.\n \"\"\"\n def __init__(self, parent, sgi, shared=False):\n super(DGLSubGraph, self).__init__(graph_data=sgi.graph,\n readonly=True)\n if shared:\n raise DGLError('Shared mode is not yet supported.')\n self._parent = parent\n self._parent_nid = sgi.induced_nodes\n self._parent_eid = sgi.induced_edges\n self._subgraph_index = sgi\n\n # override APIs\n def add_nodes(self, num, data=None):\n \"\"\"Add nodes. Disabled because subgraph is read-only.\"\"\"\n raise DGLError('Readonly graph. Mutation is not allowed.')\n\n def add_edge(self, u, v, data=None):\n \"\"\"Add one edge. Disabled because subgraph is read-only.\"\"\"\n raise DGLError('Readonly graph. Mutation is not allowed.')\n\n def add_edges(self, u, v, data=None):\n \"\"\"Add many edges. Disabled because subgraph is read-only.\"\"\"\n raise DGLError('Readonly graph. Mutation is not allowed.')\n\n @property\n def parent_nid(self):\n \"\"\"Get the parent node ids.\n\n The returned tensor can be used as a map from the node id\n in this subgraph to the node id in the parent graph.\n\n Returns\n -------\n Tensor\n The parent node id array.\n \"\"\"\n return self._parent_nid.tousertensor()\n\n def _get_parent_eid(self):\n # The parent eid might be lazily evaluated and thus may not\n # be an index. Instead, it's a lambda function that returns\n # an index.\n if isinstance(self._parent_eid, utils.Index):\n return self._parent_eid\n else:\n return self._parent_eid()\n\n @property\n def parent_eid(self):\n \"\"\"Get the parent edge ids.\n\n The returned tensor can be used as a map from the edge id\n in this subgraph to the edge id in the parent graph.\n\n Returns\n -------\n Tensor\n The parent edge id array.\n \"\"\"\n return self._get_parent_eid().tousertensor()\n\n def copy_to_parent(self, inplace=False):\n \"\"\"Write node/edge features to the parent graph.\n\n Parameters\n ----------\n inplace : bool\n If true, use inplace write (no gradient but faster)\n \"\"\"\n self._parent._node_frame.update_rows(\n self._parent_nid, self._node_frame, inplace=inplace)\n if self._parent._edge_frame.num_rows != 0:\n self._parent._edge_frame.update_rows(\n self._get_parent_eid(), self._edge_frame, inplace=inplace)\n\n def copy_from_parent(self):\n \"\"\"Copy node/edge features from the parent graph.\n\n All old features will be removed.\n \"\"\"\n if self._parent._node_frame.num_rows != 0 and self._parent._node_frame.num_columns != 0:\n self._node_frame = FrameRef(Frame(\n self._parent._node_frame[self._parent_nid]))\n if self._parent._edge_frame.num_rows != 0 and self._parent._edge_frame.num_columns != 0:\n self._edge_frame = FrameRef(Frame(\n self._parent._edge_frame[self._get_parent_eid()]))\n\n def map_to_subgraph_nid(self, parent_vids):\n \"\"\"Map the node Ids in the parent graph to the node Ids in the subgraph.\n\n Parameters\n ----------\n parent_vids : list, tensor\n The node ID array in the parent graph.\n\n Returns\n -------\n tensor\n The node ID array in the subgraph.\n \"\"\"\n v = map_to_subgraph_nid(self._subgraph_index, utils.toindex(parent_vids))\n return v.tousertensor()\n", "path": "python/dgl/subgraph.py"}]}
| 2,455 | 194 |
gh_patches_debug_15824
|
rasdani/github-patches
|
git_diff
|
Qiskit__qiskit-4591
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Using a numpy integer type as an index for a QuantumRegister fails
### Information
- **Qiskit Terra version**: 0.11.1
- **Python version**: 3.7.6
- **Operating system**: Ubuntu 18.04.4 LTS
### What is the current behavior?
An error is raised:
```
File "/lib/python3.7/site-packages/qiskit/circuit/register.py", line 90, in __getitem__
raise CircuitError("expected integer or slice index into register")
qiskit.circuit.exceptions.CircuitError: 'expected integer or slice index into register'
```
### Steps to reproduce the problem
```python
from qiskit import QuantumRegister
import numpy as np
qr = QuantumRegister(3)
qubit_index = np.int64(0)
qubit = qr[qubit_index]
```
### What is the expected behavior?
Since numpy is used extensively in scientific programming, the type checking should not be as strict.
### Suggested solutions
Change line 89 of register.py from:
```python
if not isinstance(key, (int, slice, list)):
```
to
```python
if not isinstance(key, (int, slice, list, np.integer)):
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `qiskit/circuit/register.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 # This code is part of Qiskit.
4 #
5 # (C) Copyright IBM 2017.
6 #
7 # This code is licensed under the Apache License, Version 2.0. You may
8 # obtain a copy of this license in the LICENSE.txt file in the root directory
9 # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
10 #
11 # Any modifications or derivative works of this code must retain this
12 # copyright notice, and modified files need to carry a notice indicating
13 # that they have been altered from the originals.
14
15 # pylint: disable=not-callable
16
17 """
18 Base register reference object.
19 """
20 import re
21 import itertools
22
23 from qiskit.circuit.exceptions import CircuitError
24
25
26 class Register:
27 """Implement a generic register."""
28
29 __slots__ = ['_name', '_size', '_bits', '_hash']
30
31 # Counter for the number of instances in this class.
32 instances_counter = itertools.count()
33 # Prefix to use for auto naming.
34 prefix = 'reg'
35 bit_type = None
36
37 def __init__(self, size, name=None):
38 """Create a new generic register.
39 """
40
41 # validate (or cast) size
42 try:
43 size = int(size)
44 except Exception:
45 raise CircuitError("Register size must be castable to an int (%s '%s' was provided)"
46 % (type(size).__name__, size))
47 if size <= 0:
48 raise CircuitError("Register size must be positive (%s '%s' was provided)"
49 % (type(size).__name__, size))
50
51 # validate (or cast) name
52 if name is None:
53 name = '%s%i' % (self.prefix, next(self.instances_counter))
54 else:
55 try:
56 name = str(name)
57 except Exception:
58 raise CircuitError("The circuit name should be castable to a string "
59 "(or None for autogenerate a name).")
60 name_format = re.compile('[a-z][a-zA-Z0-9_]*')
61 if name_format.match(name) is None:
62 raise CircuitError("%s is an invalid OPENQASM register name." % name)
63
64 self._name = name
65 self._size = size
66
67 self._hash = hash((type(self), self._name, self._size))
68 self._bits = [self.bit_type(self, idx) for idx in range(size)]
69
70 def _update_bits_hash(self):
71 for bit in self._bits:
72 bit._update_hash()
73
74 @property
75 def name(self):
76 """Get the register name."""
77 return self._name
78
79 @name.setter
80 def name(self, value):
81 """Set the register name."""
82 self._name = value
83 self._hash = hash((type(self), self._name, self._size))
84 self._update_bits_hash()
85
86 @property
87 def size(self):
88 """Get the register size."""
89 return self._size
90
91 @size.setter
92 def size(self, value):
93 """Set the register size."""
94 self._size = value
95 self._hash = hash((type(self), self._name, self._size))
96 self._update_bits_hash()
97
98 def __repr__(self):
99 """Return the official string representing the register."""
100 return "%s(%d, '%s')" % (self.__class__.__qualname__, self.size, self.name)
101
102 def __len__(self):
103 """Return register size."""
104 return self._size
105
106 def __getitem__(self, key):
107 """
108 Arg:
109 bit_type (Qubit or Clbit): a constructor type return element/s.
110 key (int or slice or list): index of the clbit to be retrieved.
111
112 Returns:
113 Qubit or Clbit or list(Qubit) or list(Clbit): a Qubit or Clbit instance if
114 key is int. If key is a slice, returns a list of these instances.
115
116 Raises:
117 CircuitError: if the `key` is not an integer.
118 QiskitIndexError: if the `key` is not in the range `(0, self.size)`.
119 """
120 if not isinstance(key, (int, slice, list)):
121 raise CircuitError("expected integer or slice index into register")
122 if isinstance(key, slice):
123 return self._bits[key]
124 elif isinstance(key, list): # list of qubit indices
125 if max(key) < len(self):
126 return [self._bits[idx] for idx in key]
127 else:
128 raise CircuitError('register index out of range')
129 else:
130 return self._bits[key]
131
132 def __iter__(self):
133 for bit in range(self._size):
134 yield self[bit]
135
136 def __eq__(self, other):
137 """Two Registers are the same if they are of the same type
138 (i.e. quantum/classical), and have the same name and size.
139
140 Args:
141 other (Register): other Register
142
143 Returns:
144 bool: `self` and `other` are equal.
145 """
146 res = False
147 if type(self) is type(other) and \
148 self._name == other._name and \
149 self._size == other._size:
150 res = True
151 return res
152
153 def __hash__(self):
154 """Make object hashable, based on the name and size to hash."""
155 return self._hash
156
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/qiskit/circuit/register.py b/qiskit/circuit/register.py
--- a/qiskit/circuit/register.py
+++ b/qiskit/circuit/register.py
@@ -19,6 +19,7 @@
"""
import re
import itertools
+import numbers
from qiskit.circuit.exceptions import CircuitError
@@ -117,7 +118,7 @@
CircuitError: if the `key` is not an integer.
QiskitIndexError: if the `key` is not in the range `(0, self.size)`.
"""
- if not isinstance(key, (int, slice, list)):
+ if not isinstance(key, (numbers.Integral, slice, list)):
raise CircuitError("expected integer or slice index into register")
if isinstance(key, slice):
return self._bits[key]
|
{"golden_diff": "diff --git a/qiskit/circuit/register.py b/qiskit/circuit/register.py\n--- a/qiskit/circuit/register.py\n+++ b/qiskit/circuit/register.py\n@@ -19,6 +19,7 @@\n \"\"\"\n import re\n import itertools\n+import numbers\n \n from qiskit.circuit.exceptions import CircuitError\n \n@@ -117,7 +118,7 @@\n CircuitError: if the `key` is not an integer.\n QiskitIndexError: if the `key` is not in the range `(0, self.size)`.\n \"\"\"\n- if not isinstance(key, (int, slice, list)):\n+ if not isinstance(key, (numbers.Integral, slice, list)):\n raise CircuitError(\"expected integer or slice index into register\")\n if isinstance(key, slice):\n return self._bits[key]\n", "issue": "Using a numpy integer type as an index for a QuantumRegister fails\n### Information\r\n\r\n- **Qiskit Terra version**: 0.11.1\r\n- **Python version**: 3.7.6\r\n- **Operating system**: Ubuntu 18.04.4 LTS\r\n\r\n### What is the current behavior?\r\n\r\nAn error is raised:\r\n\r\n```\r\nFile \"/lib/python3.7/site-packages/qiskit/circuit/register.py\", line 90, in __getitem__\r\n raise CircuitError(\"expected integer or slice index into register\")\r\nqiskit.circuit.exceptions.CircuitError: 'expected integer or slice index into register'\r\n```\r\n\r\n### Steps to reproduce the problem\r\n\r\n```python\r\nfrom qiskit import QuantumRegister\r\nimport numpy as np\r\nqr = QuantumRegister(3)\r\nqubit_index = np.int64(0)\r\nqubit = qr[qubit_index]\r\n```\r\n\r\n### What is the expected behavior?\r\n\r\nSince numpy is used extensively in scientific programming, the type checking should not be as strict.\r\n\r\n### Suggested solutions\r\n\r\nChange line 89 of register.py from:\r\n\r\n```python\r\nif not isinstance(key, (int, slice, list)):\r\n```\r\n\r\nto\r\n\r\n```python\r\nif not isinstance(key, (int, slice, list, np.integer)):\r\n```\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# This code is part of Qiskit.\n#\n# (C) Copyright IBM 2017.\n#\n# This code is licensed under the Apache License, Version 2.0. You may\n# obtain a copy of this license in the LICENSE.txt file in the root directory\n# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.\n#\n# Any modifications or derivative works of this code must retain this\n# copyright notice, and modified files need to carry a notice indicating\n# that they have been altered from the originals.\n\n# pylint: disable=not-callable\n\n\"\"\"\nBase register reference object.\n\"\"\"\nimport re\nimport itertools\n\nfrom qiskit.circuit.exceptions import CircuitError\n\n\nclass Register:\n \"\"\"Implement a generic register.\"\"\"\n\n __slots__ = ['_name', '_size', '_bits', '_hash']\n\n # Counter for the number of instances in this class.\n instances_counter = itertools.count()\n # Prefix to use for auto naming.\n prefix = 'reg'\n bit_type = None\n\n def __init__(self, size, name=None):\n \"\"\"Create a new generic register.\n \"\"\"\n\n # validate (or cast) size\n try:\n size = int(size)\n except Exception:\n raise CircuitError(\"Register size must be castable to an int (%s '%s' was provided)\"\n % (type(size).__name__, size))\n if size <= 0:\n raise CircuitError(\"Register size must be positive (%s '%s' was provided)\"\n % (type(size).__name__, size))\n\n # validate (or cast) name\n if name is None:\n name = '%s%i' % (self.prefix, next(self.instances_counter))\n else:\n try:\n name = str(name)\n except Exception:\n raise CircuitError(\"The circuit name should be castable to a string \"\n \"(or None for autogenerate a name).\")\n name_format = re.compile('[a-z][a-zA-Z0-9_]*')\n if name_format.match(name) is None:\n raise CircuitError(\"%s is an invalid OPENQASM register name.\" % name)\n\n self._name = name\n self._size = size\n\n self._hash = hash((type(self), self._name, self._size))\n self._bits = [self.bit_type(self, idx) for idx in range(size)]\n\n def _update_bits_hash(self):\n for bit in self._bits:\n bit._update_hash()\n\n @property\n def name(self):\n \"\"\"Get the register name.\"\"\"\n return self._name\n\n @name.setter\n def name(self, value):\n \"\"\"Set the register name.\"\"\"\n self._name = value\n self._hash = hash((type(self), self._name, self._size))\n self._update_bits_hash()\n\n @property\n def size(self):\n \"\"\"Get the register size.\"\"\"\n return self._size\n\n @size.setter\n def size(self, value):\n \"\"\"Set the register size.\"\"\"\n self._size = value\n self._hash = hash((type(self), self._name, self._size))\n self._update_bits_hash()\n\n def __repr__(self):\n \"\"\"Return the official string representing the register.\"\"\"\n return \"%s(%d, '%s')\" % (self.__class__.__qualname__, self.size, self.name)\n\n def __len__(self):\n \"\"\"Return register size.\"\"\"\n return self._size\n\n def __getitem__(self, key):\n \"\"\"\n Arg:\n bit_type (Qubit or Clbit): a constructor type return element/s.\n key (int or slice or list): index of the clbit to be retrieved.\n\n Returns:\n Qubit or Clbit or list(Qubit) or list(Clbit): a Qubit or Clbit instance if\n key is int. If key is a slice, returns a list of these instances.\n\n Raises:\n CircuitError: if the `key` is not an integer.\n QiskitIndexError: if the `key` is not in the range `(0, self.size)`.\n \"\"\"\n if not isinstance(key, (int, slice, list)):\n raise CircuitError(\"expected integer or slice index into register\")\n if isinstance(key, slice):\n return self._bits[key]\n elif isinstance(key, list): # list of qubit indices\n if max(key) < len(self):\n return [self._bits[idx] for idx in key]\n else:\n raise CircuitError('register index out of range')\n else:\n return self._bits[key]\n\n def __iter__(self):\n for bit in range(self._size):\n yield self[bit]\n\n def __eq__(self, other):\n \"\"\"Two Registers are the same if they are of the same type\n (i.e. quantum/classical), and have the same name and size.\n\n Args:\n other (Register): other Register\n\n Returns:\n bool: `self` and `other` are equal.\n \"\"\"\n res = False\n if type(self) is type(other) and \\\n self._name == other._name and \\\n self._size == other._size:\n res = True\n return res\n\n def __hash__(self):\n \"\"\"Make object hashable, based on the name and size to hash.\"\"\"\n return self._hash\n", "path": "qiskit/circuit/register.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\n# This code is part of Qiskit.\n#\n# (C) Copyright IBM 2017.\n#\n# This code is licensed under the Apache License, Version 2.0. You may\n# obtain a copy of this license in the LICENSE.txt file in the root directory\n# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.\n#\n# Any modifications or derivative works of this code must retain this\n# copyright notice, and modified files need to carry a notice indicating\n# that they have been altered from the originals.\n\n# pylint: disable=not-callable\n\n\"\"\"\nBase register reference object.\n\"\"\"\nimport re\nimport itertools\nimport numbers\n\nfrom qiskit.circuit.exceptions import CircuitError\n\n\nclass Register:\n \"\"\"Implement a generic register.\"\"\"\n\n __slots__ = ['_name', '_size', '_bits', '_hash']\n\n # Counter for the number of instances in this class.\n instances_counter = itertools.count()\n # Prefix to use for auto naming.\n prefix = 'reg'\n bit_type = None\n\n def __init__(self, size, name=None):\n \"\"\"Create a new generic register.\n \"\"\"\n\n # validate (or cast) size\n try:\n size = int(size)\n except Exception:\n raise CircuitError(\"Register size must be castable to an int (%s '%s' was provided)\"\n % (type(size).__name__, size))\n if size <= 0:\n raise CircuitError(\"Register size must be positive (%s '%s' was provided)\"\n % (type(size).__name__, size))\n\n # validate (or cast) name\n if name is None:\n name = '%s%i' % (self.prefix, next(self.instances_counter))\n else:\n try:\n name = str(name)\n except Exception:\n raise CircuitError(\"The circuit name should be castable to a string \"\n \"(or None for autogenerate a name).\")\n name_format = re.compile('[a-z][a-zA-Z0-9_]*')\n if name_format.match(name) is None:\n raise CircuitError(\"%s is an invalid OPENQASM register name.\" % name)\n\n self._name = name\n self._size = size\n\n self._hash = hash((type(self), self._name, self._size))\n self._bits = [self.bit_type(self, idx) for idx in range(size)]\n\n def _update_bits_hash(self):\n for bit in self._bits:\n bit._update_hash()\n\n @property\n def name(self):\n \"\"\"Get the register name.\"\"\"\n return self._name\n\n @name.setter\n def name(self, value):\n \"\"\"Set the register name.\"\"\"\n self._name = value\n self._hash = hash((type(self), self._name, self._size))\n self._update_bits_hash()\n\n @property\n def size(self):\n \"\"\"Get the register size.\"\"\"\n return self._size\n\n @size.setter\n def size(self, value):\n \"\"\"Set the register size.\"\"\"\n self._size = value\n self._hash = hash((type(self), self._name, self._size))\n self._update_bits_hash()\n\n def __repr__(self):\n \"\"\"Return the official string representing the register.\"\"\"\n return \"%s(%d, '%s')\" % (self.__class__.__qualname__, self.size, self.name)\n\n def __len__(self):\n \"\"\"Return register size.\"\"\"\n return self._size\n\n def __getitem__(self, key):\n \"\"\"\n Arg:\n bit_type (Qubit or Clbit): a constructor type return element/s.\n key (int or slice or list): index of the clbit to be retrieved.\n\n Returns:\n Qubit or Clbit or list(Qubit) or list(Clbit): a Qubit or Clbit instance if\n key is int. If key is a slice, returns a list of these instances.\n\n Raises:\n CircuitError: if the `key` is not an integer.\n QiskitIndexError: if the `key` is not in the range `(0, self.size)`.\n \"\"\"\n if not isinstance(key, (numbers.Integral, slice, list)):\n raise CircuitError(\"expected integer or slice index into register\")\n if isinstance(key, slice):\n return self._bits[key]\n elif isinstance(key, list): # list of qubit indices\n if max(key) < len(self):\n return [self._bits[idx] for idx in key]\n else:\n raise CircuitError('register index out of range')\n else:\n return self._bits[key]\n\n def __iter__(self):\n for bit in range(self._size):\n yield self[bit]\n\n def __eq__(self, other):\n \"\"\"Two Registers are the same if they are of the same type\n (i.e. quantum/classical), and have the same name and size.\n\n Args:\n other (Register): other Register\n\n Returns:\n bool: `self` and `other` are equal.\n \"\"\"\n res = False\n if type(self) is type(other) and \\\n self._name == other._name and \\\n self._size == other._size:\n res = True\n return res\n\n def __hash__(self):\n \"\"\"Make object hashable, based on the name and size to hash.\"\"\"\n return self._hash\n", "path": "qiskit/circuit/register.py"}]}
| 2,062 | 185 |
gh_patches_debug_26993
|
rasdani/github-patches
|
git_diff
|
mozilla__pontoon-3146
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Speed up Project Notifications tab
The Notifications tab takes a while to load if multiple manual notifications have been sent to project contributors.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pontoon/projects/views.py`
Content:
```
1 import uuid
2 from django.conf import settings
3 from django.contrib.auth.models import User
4 from django.contrib.contenttypes.models import ContentType
5 from django.core.exceptions import ImproperlyConfigured
6 from django.db import transaction
7 from django.db.models import Q
8 from django.http import Http404, JsonResponse, HttpResponseRedirect
9 from django.shortcuts import get_object_or_404, render
10 from django.views.generic.detail import DetailView
11
12 from guardian.decorators import permission_required_or_403
13 from notifications.models import Notification
14 from notifications.signals import notify
15
16 from pontoon.base.models import Project, Locale
17 from pontoon.base.utils import require_AJAX, split_ints, get_project_or_redirect
18 from pontoon.contributors.views import ContributorsMixin
19 from pontoon.insights.utils import get_insights
20 from pontoon.projects import forms
21 from pontoon.tags.utils import Tags
22
23
24 def projects(request):
25 """List all active projects."""
26 projects = (
27 Project.objects.visible()
28 .visible_for(request.user)
29 .prefetch_related(
30 "latest_translation__user", "latest_translation__approved_user"
31 )
32 .order_by("name")
33 )
34
35 if not projects:
36 return render(request, "no_projects.html", {"title": "Projects"})
37
38 return render(
39 request,
40 "projects/projects.html",
41 {"projects": projects, "top_instances": projects.get_top_instances()},
42 )
43
44
45 def project(request, slug):
46 """Project dashboard."""
47 project = get_project_or_redirect(
48 slug, "pontoon.projects.project", "slug", request.user
49 )
50 if isinstance(project, HttpResponseRedirect):
51 return project
52
53 project_locales = project.project_locale
54 chart = project
55
56 # Only include filtered teams if provided
57 teams = request.GET.get("teams", "").split(",")
58 filtered_locales = Locale.objects.filter(code__in=teams)
59 if filtered_locales.exists():
60 project_locales = project_locales.filter(locale__in=filtered_locales)
61 chart = project_locales.aggregated_stats()
62
63 return render(
64 request,
65 "projects/project.html",
66 {
67 "chart": chart,
68 "count": project_locales.count(),
69 "project": project,
70 "tags_count": (
71 project.tag_set.filter(resources__isnull=False).distinct().count()
72 if project.tags_enabled
73 else None
74 ),
75 },
76 )
77
78
79 @require_AJAX
80 def ajax_teams(request, slug):
81 """Teams tab."""
82 project = get_object_or_404(
83 Project.objects.visible_for(request.user).available(), slug=slug
84 )
85
86 locales = Locale.objects.available()
87
88 # Only include filtered teams if provided
89 teams = request.GET.get("teams", "").split(",")
90 filtered_locales = Locale.objects.filter(code__in=teams)
91 if filtered_locales.exists():
92 locales = locales.filter(pk__in=filtered_locales)
93
94 locales = locales.prefetch_project_locale(project).order_by("name")
95
96 return render(
97 request,
98 "projects/includes/teams.html",
99 {"project": project, "locales": locales},
100 )
101
102
103 @require_AJAX
104 def ajax_tags(request, slug):
105 """Tags tab."""
106 project = get_object_or_404(Project.objects.visible_for(request.user), slug=slug)
107
108 if not project.tags_enabled:
109 raise Http404
110
111 tags = Tags(project=project).get()
112
113 return render(
114 request,
115 "projects/includes/tags.html",
116 {"project": project, "tags": tags},
117 )
118
119
120 @require_AJAX
121 def ajax_insights(request, slug):
122 """Insights tab."""
123 if not settings.ENABLE_INSIGHTS:
124 raise ImproperlyConfigured("ENABLE_INSIGHTS variable not set in settings.")
125
126 project = get_object_or_404(
127 Project.objects.visible_for(request.user).available(), slug=slug
128 )
129 insights = get_insights(project=project)
130
131 return render(request, "projects/includes/insights.html", insights)
132
133
134 @require_AJAX
135 def ajax_info(request, slug):
136 """Info tab."""
137 project = get_object_or_404(
138 Project.objects.visible_for(request.user).available(), slug=slug
139 )
140
141 return render(request, "projects/includes/info.html", {"project": project})
142
143
144 @permission_required_or_403("base.can_manage_project")
145 @transaction.atomic
146 @require_AJAX
147 def ajax_notifications(request, slug):
148 """Notifications tab."""
149 project = get_object_or_404(
150 Project.objects.visible_for(request.user).available(), slug=slug
151 )
152 available_locales = project.locales.prefetch_project_locale(project).order_by(
153 "name"
154 )
155
156 # Send notifications
157 if request.method == "POST":
158 form = forms.NotificationsForm(request.POST)
159
160 if not form.is_valid():
161 return JsonResponse(dict(form.errors.items()))
162
163 contributors = User.objects.filter(
164 translation__entity__resource__project=project,
165 )
166
167 # For performance reasons, only filter contributors for selected
168 # locales if different from all project locales
169 available_ids = sorted(list(available_locales.values_list("id", flat=True)))
170 selected_ids = sorted(split_ints(form.cleaned_data.get("selected_locales")))
171
172 if available_ids != selected_ids:
173 contributors = User.objects.filter(
174 translation__entity__resource__project=project,
175 translation__locale__in=available_locales.filter(id__in=selected_ids),
176 )
177
178 identifier = uuid.uuid4().hex
179 for contributor in contributors.distinct():
180 notify.send(
181 request.user,
182 recipient=contributor,
183 verb="has sent a message in",
184 target=project,
185 description=form.cleaned_data.get("message"),
186 identifier=identifier,
187 )
188
189 # Detect previously sent notifications using a unique identifier
190 # TODO: We should simplify this with a custom Notifications model
191 notifications_map = {}
192
193 for notification in Notification.objects.filter(
194 description__isnull=False,
195 target_content_type=ContentType.objects.get_for_model(project),
196 target_object_id=project.id,
197 ):
198 identifier = notification.data["identifier"]
199 if identifier not in notifications_map:
200 notifications_map[identifier] = notification
201
202 notifications = list(notifications_map.values())
203 notifications.sort(key=lambda x: x.timestamp, reverse=True)
204
205 # Recipient shortcuts
206 incomplete = []
207 complete = []
208 for available_locale in available_locales:
209 completion_percent = available_locale.get_chart(project)["completion_percent"]
210 if completion_percent == 100:
211 complete.append(available_locale.pk)
212 else:
213 incomplete.append(available_locale.pk)
214
215 return render(
216 request,
217 "projects/includes/manual_notifications.html",
218 {
219 "form": forms.NotificationsForm(),
220 "project": project,
221 "available_locales": available_locales,
222 "notifications": notifications,
223 "incomplete": incomplete,
224 "complete": complete,
225 },
226 )
227
228
229 class ProjectContributorsView(ContributorsMixin, DetailView):
230 """
231 Renders view of contributors for the project.
232 """
233
234 template_name = "projects/includes/contributors.html"
235 model = Project
236
237 def get_queryset(self):
238 return super().get_queryset().visible_for(self.request.user)
239
240 def get_context_object_name(self, obj):
241 return "project"
242
243 def contributors_filter(self, **kwargs):
244 return Q(entity__resource__project=self.object)
245
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pontoon/projects/views.py b/pontoon/projects/views.py
--- a/pontoon/projects/views.py
+++ b/pontoon/projects/views.py
@@ -186,20 +186,27 @@
identifier=identifier,
)
- # Detect previously sent notifications using a unique identifier
- # TODO: We should simplify this with a custom Notifications model
- notifications_map = {}
-
- for notification in Notification.objects.filter(
- description__isnull=False,
- target_content_type=ContentType.objects.get_for_model(project),
- target_object_id=project.id,
- ):
- identifier = notification.data["identifier"]
- if identifier not in notifications_map:
- notifications_map[identifier] = notification
-
- notifications = list(notifications_map.values())
+ notifications = list(
+ Notification.objects.filter(
+ description__isnull=False,
+ target_content_type=ContentType.objects.get_for_model(project),
+ target_object_id=project.id,
+ )
+ # Each project notification is stored in one Notification instance per user. To
+ # identify unique project Notifications, we use the identifier stored in the
+ # Notification.data field.
+ #
+ # PostgreSQL allows us to retrieve Notifications with unique Notification.data
+ # fields by combining .order_by(*fields) and .distinct(*fields) calls. Read more:
+ # https://docs.djangoproject.com/en/3.2/ref/models/querysets/#distinct
+ #
+ # That approach doesn't allow us to order Notifications by their timestamp, so
+ # we have to do that in python below.
+ .order_by("data")
+ .distinct("data")
+ .prefetch_related("actor", "target")
+ )
+
notifications.sort(key=lambda x: x.timestamp, reverse=True)
# Recipient shortcuts
|
{"golden_diff": "diff --git a/pontoon/projects/views.py b/pontoon/projects/views.py\n--- a/pontoon/projects/views.py\n+++ b/pontoon/projects/views.py\n@@ -186,20 +186,27 @@\n identifier=identifier,\n )\n \n- # Detect previously sent notifications using a unique identifier\n- # TODO: We should simplify this with a custom Notifications model\n- notifications_map = {}\n-\n- for notification in Notification.objects.filter(\n- description__isnull=False,\n- target_content_type=ContentType.objects.get_for_model(project),\n- target_object_id=project.id,\n- ):\n- identifier = notification.data[\"identifier\"]\n- if identifier not in notifications_map:\n- notifications_map[identifier] = notification\n-\n- notifications = list(notifications_map.values())\n+ notifications = list(\n+ Notification.objects.filter(\n+ description__isnull=False,\n+ target_content_type=ContentType.objects.get_for_model(project),\n+ target_object_id=project.id,\n+ )\n+ # Each project notification is stored in one Notification instance per user. To\n+ # identify unique project Notifications, we use the identifier stored in the\n+ # Notification.data field.\n+ #\n+ # PostgreSQL allows us to retrieve Notifications with unique Notification.data\n+ # fields by combining .order_by(*fields) and .distinct(*fields) calls. Read more:\n+ # https://docs.djangoproject.com/en/3.2/ref/models/querysets/#distinct\n+ #\n+ # That approach doesn't allow us to order Notifications by their timestamp, so\n+ # we have to do that in python below.\n+ .order_by(\"data\")\n+ .distinct(\"data\")\n+ .prefetch_related(\"actor\", \"target\")\n+ )\n+\n notifications.sort(key=lambda x: x.timestamp, reverse=True)\n \n # Recipient shortcuts\n", "issue": "Speed up Project Notifications tab\nThe Notifications tab takes a while to load if multiple manual notifications have been sent to project contributors.\n", "before_files": [{"content": "import uuid\nfrom django.conf import settings\nfrom django.contrib.auth.models import User\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.core.exceptions import ImproperlyConfigured\nfrom django.db import transaction\nfrom django.db.models import Q\nfrom django.http import Http404, JsonResponse, HttpResponseRedirect\nfrom django.shortcuts import get_object_or_404, render\nfrom django.views.generic.detail import DetailView\n\nfrom guardian.decorators import permission_required_or_403\nfrom notifications.models import Notification\nfrom notifications.signals import notify\n\nfrom pontoon.base.models import Project, Locale\nfrom pontoon.base.utils import require_AJAX, split_ints, get_project_or_redirect\nfrom pontoon.contributors.views import ContributorsMixin\nfrom pontoon.insights.utils import get_insights\nfrom pontoon.projects import forms\nfrom pontoon.tags.utils import Tags\n\n\ndef projects(request):\n \"\"\"List all active projects.\"\"\"\n projects = (\n Project.objects.visible()\n .visible_for(request.user)\n .prefetch_related(\n \"latest_translation__user\", \"latest_translation__approved_user\"\n )\n .order_by(\"name\")\n )\n\n if not projects:\n return render(request, \"no_projects.html\", {\"title\": \"Projects\"})\n\n return render(\n request,\n \"projects/projects.html\",\n {\"projects\": projects, \"top_instances\": projects.get_top_instances()},\n )\n\n\ndef project(request, slug):\n \"\"\"Project dashboard.\"\"\"\n project = get_project_or_redirect(\n slug, \"pontoon.projects.project\", \"slug\", request.user\n )\n if isinstance(project, HttpResponseRedirect):\n return project\n\n project_locales = project.project_locale\n chart = project\n\n # Only include filtered teams if provided\n teams = request.GET.get(\"teams\", \"\").split(\",\")\n filtered_locales = Locale.objects.filter(code__in=teams)\n if filtered_locales.exists():\n project_locales = project_locales.filter(locale__in=filtered_locales)\n chart = project_locales.aggregated_stats()\n\n return render(\n request,\n \"projects/project.html\",\n {\n \"chart\": chart,\n \"count\": project_locales.count(),\n \"project\": project,\n \"tags_count\": (\n project.tag_set.filter(resources__isnull=False).distinct().count()\n if project.tags_enabled\n else None\n ),\n },\n )\n\n\n@require_AJAX\ndef ajax_teams(request, slug):\n \"\"\"Teams tab.\"\"\"\n project = get_object_or_404(\n Project.objects.visible_for(request.user).available(), slug=slug\n )\n\n locales = Locale.objects.available()\n\n # Only include filtered teams if provided\n teams = request.GET.get(\"teams\", \"\").split(\",\")\n filtered_locales = Locale.objects.filter(code__in=teams)\n if filtered_locales.exists():\n locales = locales.filter(pk__in=filtered_locales)\n\n locales = locales.prefetch_project_locale(project).order_by(\"name\")\n\n return render(\n request,\n \"projects/includes/teams.html\",\n {\"project\": project, \"locales\": locales},\n )\n\n\n@require_AJAX\ndef ajax_tags(request, slug):\n \"\"\"Tags tab.\"\"\"\n project = get_object_or_404(Project.objects.visible_for(request.user), slug=slug)\n\n if not project.tags_enabled:\n raise Http404\n\n tags = Tags(project=project).get()\n\n return render(\n request,\n \"projects/includes/tags.html\",\n {\"project\": project, \"tags\": tags},\n )\n\n\n@require_AJAX\ndef ajax_insights(request, slug):\n \"\"\"Insights tab.\"\"\"\n if not settings.ENABLE_INSIGHTS:\n raise ImproperlyConfigured(\"ENABLE_INSIGHTS variable not set in settings.\")\n\n project = get_object_or_404(\n Project.objects.visible_for(request.user).available(), slug=slug\n )\n insights = get_insights(project=project)\n\n return render(request, \"projects/includes/insights.html\", insights)\n\n\n@require_AJAX\ndef ajax_info(request, slug):\n \"\"\"Info tab.\"\"\"\n project = get_object_or_404(\n Project.objects.visible_for(request.user).available(), slug=slug\n )\n\n return render(request, \"projects/includes/info.html\", {\"project\": project})\n\n\n@permission_required_or_403(\"base.can_manage_project\")\[email protected]\n@require_AJAX\ndef ajax_notifications(request, slug):\n \"\"\"Notifications tab.\"\"\"\n project = get_object_or_404(\n Project.objects.visible_for(request.user).available(), slug=slug\n )\n available_locales = project.locales.prefetch_project_locale(project).order_by(\n \"name\"\n )\n\n # Send notifications\n if request.method == \"POST\":\n form = forms.NotificationsForm(request.POST)\n\n if not form.is_valid():\n return JsonResponse(dict(form.errors.items()))\n\n contributors = User.objects.filter(\n translation__entity__resource__project=project,\n )\n\n # For performance reasons, only filter contributors for selected\n # locales if different from all project locales\n available_ids = sorted(list(available_locales.values_list(\"id\", flat=True)))\n selected_ids = sorted(split_ints(form.cleaned_data.get(\"selected_locales\")))\n\n if available_ids != selected_ids:\n contributors = User.objects.filter(\n translation__entity__resource__project=project,\n translation__locale__in=available_locales.filter(id__in=selected_ids),\n )\n\n identifier = uuid.uuid4().hex\n for contributor in contributors.distinct():\n notify.send(\n request.user,\n recipient=contributor,\n verb=\"has sent a message in\",\n target=project,\n description=form.cleaned_data.get(\"message\"),\n identifier=identifier,\n )\n\n # Detect previously sent notifications using a unique identifier\n # TODO: We should simplify this with a custom Notifications model\n notifications_map = {}\n\n for notification in Notification.objects.filter(\n description__isnull=False,\n target_content_type=ContentType.objects.get_for_model(project),\n target_object_id=project.id,\n ):\n identifier = notification.data[\"identifier\"]\n if identifier not in notifications_map:\n notifications_map[identifier] = notification\n\n notifications = list(notifications_map.values())\n notifications.sort(key=lambda x: x.timestamp, reverse=True)\n\n # Recipient shortcuts\n incomplete = []\n complete = []\n for available_locale in available_locales:\n completion_percent = available_locale.get_chart(project)[\"completion_percent\"]\n if completion_percent == 100:\n complete.append(available_locale.pk)\n else:\n incomplete.append(available_locale.pk)\n\n return render(\n request,\n \"projects/includes/manual_notifications.html\",\n {\n \"form\": forms.NotificationsForm(),\n \"project\": project,\n \"available_locales\": available_locales,\n \"notifications\": notifications,\n \"incomplete\": incomplete,\n \"complete\": complete,\n },\n )\n\n\nclass ProjectContributorsView(ContributorsMixin, DetailView):\n \"\"\"\n Renders view of contributors for the project.\n \"\"\"\n\n template_name = \"projects/includes/contributors.html\"\n model = Project\n\n def get_queryset(self):\n return super().get_queryset().visible_for(self.request.user)\n\n def get_context_object_name(self, obj):\n return \"project\"\n\n def contributors_filter(self, **kwargs):\n return Q(entity__resource__project=self.object)\n", "path": "pontoon/projects/views.py"}], "after_files": [{"content": "import uuid\nfrom django.conf import settings\nfrom django.contrib.auth.models import User\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.core.exceptions import ImproperlyConfigured\nfrom django.db import transaction\nfrom django.db.models import Q\nfrom django.http import Http404, JsonResponse, HttpResponseRedirect\nfrom django.shortcuts import get_object_or_404, render\nfrom django.views.generic.detail import DetailView\n\nfrom guardian.decorators import permission_required_or_403\nfrom notifications.models import Notification\nfrom notifications.signals import notify\n\nfrom pontoon.base.models import Project, Locale\nfrom pontoon.base.utils import require_AJAX, split_ints, get_project_or_redirect\nfrom pontoon.contributors.views import ContributorsMixin\nfrom pontoon.insights.utils import get_insights\nfrom pontoon.projects import forms\nfrom pontoon.tags.utils import Tags\n\n\ndef projects(request):\n \"\"\"List all active projects.\"\"\"\n projects = (\n Project.objects.visible()\n .visible_for(request.user)\n .prefetch_related(\n \"latest_translation__user\", \"latest_translation__approved_user\"\n )\n .order_by(\"name\")\n )\n\n if not projects:\n return render(request, \"no_projects.html\", {\"title\": \"Projects\"})\n\n return render(\n request,\n \"projects/projects.html\",\n {\"projects\": projects, \"top_instances\": projects.get_top_instances()},\n )\n\n\ndef project(request, slug):\n \"\"\"Project dashboard.\"\"\"\n project = get_project_or_redirect(\n slug, \"pontoon.projects.project\", \"slug\", request.user\n )\n if isinstance(project, HttpResponseRedirect):\n return project\n\n project_locales = project.project_locale\n chart = project\n\n # Only include filtered teams if provided\n teams = request.GET.get(\"teams\", \"\").split(\",\")\n filtered_locales = Locale.objects.filter(code__in=teams)\n if filtered_locales.exists():\n project_locales = project_locales.filter(locale__in=filtered_locales)\n chart = project_locales.aggregated_stats()\n\n return render(\n request,\n \"projects/project.html\",\n {\n \"chart\": chart,\n \"count\": project_locales.count(),\n \"project\": project,\n \"tags_count\": (\n project.tag_set.filter(resources__isnull=False).distinct().count()\n if project.tags_enabled\n else None\n ),\n },\n )\n\n\n@require_AJAX\ndef ajax_teams(request, slug):\n \"\"\"Teams tab.\"\"\"\n project = get_object_or_404(\n Project.objects.visible_for(request.user).available(), slug=slug\n )\n\n locales = Locale.objects.available()\n\n # Only include filtered teams if provided\n teams = request.GET.get(\"teams\", \"\").split(\",\")\n filtered_locales = Locale.objects.filter(code__in=teams)\n if filtered_locales.exists():\n locales = locales.filter(pk__in=filtered_locales)\n\n locales = locales.prefetch_project_locale(project).order_by(\"name\")\n\n return render(\n request,\n \"projects/includes/teams.html\",\n {\"project\": project, \"locales\": locales},\n )\n\n\n@require_AJAX\ndef ajax_tags(request, slug):\n \"\"\"Tags tab.\"\"\"\n project = get_object_or_404(Project.objects.visible_for(request.user), slug=slug)\n\n if not project.tags_enabled:\n raise Http404\n\n tags = Tags(project=project).get()\n\n return render(\n request,\n \"projects/includes/tags.html\",\n {\"project\": project, \"tags\": tags},\n )\n\n\n@require_AJAX\ndef ajax_insights(request, slug):\n \"\"\"Insights tab.\"\"\"\n if not settings.ENABLE_INSIGHTS:\n raise ImproperlyConfigured(\"ENABLE_INSIGHTS variable not set in settings.\")\n\n project = get_object_or_404(\n Project.objects.visible_for(request.user).available(), slug=slug\n )\n insights = get_insights(project=project)\n\n return render(request, \"projects/includes/insights.html\", insights)\n\n\n@require_AJAX\ndef ajax_info(request, slug):\n \"\"\"Info tab.\"\"\"\n project = get_object_or_404(\n Project.objects.visible_for(request.user).available(), slug=slug\n )\n\n return render(request, \"projects/includes/info.html\", {\"project\": project})\n\n\n@permission_required_or_403(\"base.can_manage_project\")\[email protected]\n@require_AJAX\ndef ajax_notifications(request, slug):\n \"\"\"Notifications tab.\"\"\"\n project = get_object_or_404(\n Project.objects.visible_for(request.user).available(), slug=slug\n )\n available_locales = project.locales.prefetch_project_locale(project).order_by(\n \"name\"\n )\n\n # Send notifications\n if request.method == \"POST\":\n form = forms.NotificationsForm(request.POST)\n\n if not form.is_valid():\n return JsonResponse(dict(form.errors.items()))\n\n contributors = User.objects.filter(\n translation__entity__resource__project=project,\n )\n\n # For performance reasons, only filter contributors for selected\n # locales if different from all project locales\n available_ids = sorted(list(available_locales.values_list(\"id\", flat=True)))\n selected_ids = sorted(split_ints(form.cleaned_data.get(\"selected_locales\")))\n\n if available_ids != selected_ids:\n contributors = User.objects.filter(\n translation__entity__resource__project=project,\n translation__locale__in=available_locales.filter(id__in=selected_ids),\n )\n\n identifier = uuid.uuid4().hex\n for contributor in contributors.distinct():\n notify.send(\n request.user,\n recipient=contributor,\n verb=\"has sent a message in\",\n target=project,\n description=form.cleaned_data.get(\"message\"),\n identifier=identifier,\n )\n\n notifications = list(\n Notification.objects.filter(\n description__isnull=False,\n target_content_type=ContentType.objects.get_for_model(project),\n target_object_id=project.id,\n )\n # Each project notification is stored in one Notification instance per user. To\n # identify unique project Notifications, we use the identifier stored in the\n # Notification.data field.\n #\n # PostgreSQL allows us to retrieve Notifications with unique Notification.data\n # fields by combining .order_by(*fields) and .distinct(*fields) calls. Read more:\n # https://docs.djangoproject.com/en/3.2/ref/models/querysets/#distinct\n #\n # That approach doesn't allow us to order Notifications by their timestamp, so\n # we have to do that in python below.\n .order_by(\"data\")\n .distinct(\"data\")\n .prefetch_related(\"actor\", \"target\")\n )\n\n notifications.sort(key=lambda x: x.timestamp, reverse=True)\n\n # Recipient shortcuts\n incomplete = []\n complete = []\n for available_locale in available_locales:\n completion_percent = available_locale.get_chart(project)[\"completion_percent\"]\n if completion_percent == 100:\n complete.append(available_locale.pk)\n else:\n incomplete.append(available_locale.pk)\n\n return render(\n request,\n \"projects/includes/manual_notifications.html\",\n {\n \"form\": forms.NotificationsForm(),\n \"project\": project,\n \"available_locales\": available_locales,\n \"notifications\": notifications,\n \"incomplete\": incomplete,\n \"complete\": complete,\n },\n )\n\n\nclass ProjectContributorsView(ContributorsMixin, DetailView):\n \"\"\"\n Renders view of contributors for the project.\n \"\"\"\n\n template_name = \"projects/includes/contributors.html\"\n model = Project\n\n def get_queryset(self):\n return super().get_queryset().visible_for(self.request.user)\n\n def get_context_object_name(self, obj):\n return \"project\"\n\n def contributors_filter(self, **kwargs):\n return Q(entity__resource__project=self.object)\n", "path": "pontoon/projects/views.py"}]}
| 2,499 | 404 |
gh_patches_debug_25937
|
rasdani/github-patches
|
git_diff
|
e-valuation__EvaP-684
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove single results from export
When exporting a semester with single results a `ZeroDevisionError` occurs, because there are no participants.
Single results should not be part of the semester export.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `evap/results/exporters.py`
Content:
```
1 from evap.evaluation.models import Questionnaire
2 from evap.evaluation.tools import calculate_results, calculate_average_grades_and_deviation, get_grade_color, get_deviation_color
3
4 from django.utils.translation import ugettext as _
5
6 from collections import OrderedDict
7 from collections import defaultdict
8 import datetime
9 import xlwt
10
11
12 class ExcelExporter(object):
13
14 def __init__(self, semester):
15 self.semester = semester
16 self.styles = dict()
17
18 self.CUSTOM_COLOR_START = 8
19 self.NUM_GRADE_COLORS = 21 # 1.0 to 5.0 in 0.2 steps
20 self.NUM_DEVIATION_COLORS = 13 # 0.0 to 2.4 in 0.2 steps
21 self.STEP = 0.2 # we only have a limited number of custom colors
22
23 def normalize_number(self, number):
24 """ floors 'number' to a multiply of self.STEP """
25 rounded_number = round(number, 1) # see #302
26 return round(int(rounded_number / self.STEP + 0.0001) * self.STEP, 1)
27
28 def create_style(self, workbook, base_style, style_name, palette_index, color):
29 color_name = style_name + "_color"
30 xlwt.add_palette_colour(color_name, palette_index)
31 workbook.set_colour_RGB(palette_index, *color)
32 self.styles[style_name] = xlwt.easyxf(base_style.format(color_name), num_format_str="0.0")
33
34 def init_styles(self, workbook):
35 self.styles = {
36 'default': xlwt.Style.default_style,
37 'avg': xlwt.easyxf('alignment: horiz centre; font: bold on; borders: left medium, top medium, bottom medium'),
38 'headline': xlwt.easyxf('font: bold on, height 400; alignment: horiz centre, vert centre, wrap on', num_format_str="0.0"),
39 'course': xlwt.easyxf('alignment: horiz centre, wrap on, rota 90; borders: left medium, top medium'),
40 'course_unfinished': xlwt.easyxf('alignment: horiz centre, wrap on, rota 90; borders: left medium, top medium; font: italic on'),
41 'total_voters': xlwt.easyxf('alignment: horiz centre; borders: left medium, bottom medium, right medium'),
42 'bold': xlwt.easyxf('font: bold on'),
43 'border_left': xlwt.easyxf('borders: left medium'),
44 'border_right': xlwt.easyxf('borders: right medium'),
45 'border_top_bottom_right': xlwt.easyxf('borders: top medium, bottom medium, right medium')}
46
47
48
49 grade_base_style = 'pattern: pattern solid, fore_colour {}; alignment: horiz centre; font: bold on; borders: left medium'
50 for i in range(0, self.NUM_GRADE_COLORS):
51 grade = 1 + i*self.STEP
52 color = get_grade_color(grade)
53 palette_index = self.CUSTOM_COLOR_START + i
54 style_name = self.grade_to_style(grade)
55 self.create_style(workbook, grade_base_style, style_name, palette_index, color)
56
57 deviation_base_style = 'pattern: pattern solid, fore_colour {}; alignment: horiz centre; borders: right medium'
58 for i in range(0, self.NUM_DEVIATION_COLORS):
59 deviation = i * self.STEP
60 color = get_deviation_color(deviation)
61 palette_index = self.CUSTOM_COLOR_START + self.NUM_GRADE_COLORS + i
62 style_name = self.deviation_to_style(deviation)
63 self.create_style(workbook, deviation_base_style, style_name, palette_index, color)
64
65
66 def grade_to_style(self, grade):
67 return 'grade_' + str(self.normalize_number(grade))
68
69 def deviation_to_style(self, deviation):
70 return 'deviation_' + str(self.normalize_number(deviation))
71
72 def export(self, response, ignore_not_enough_answers=False):
73 courses_with_results = list()
74 for course in self.semester.course_set.filter(state="published").all():
75 results = OrderedDict()
76 for questionnaire, contributor, label, data, section_warning in calculate_results(course):
77 results.setdefault(questionnaire.id, []).extend(data)
78 courses_with_results.append((course, results))
79
80 courses_with_results.sort(key=lambda cr: cr[0].type)
81
82 qn_frequencies = defaultdict(int)
83 for course, results in courses_with_results:
84 for questionnaire, results in results.items():
85 qn_frequencies[questionnaire] += 1
86
87 qn_relevant = list(qn_frequencies.items())
88 qn_relevant.sort(key=lambda t: -t[1])
89
90 questionnaires = [Questionnaire.objects.get(id=t[0]) for t in qn_relevant]
91
92 self.workbook = xlwt.Workbook()
93 self.sheet = self.workbook.add_sheet(_("Results"))
94 self.row = 0
95 self.col = 0
96
97
98 self.init_styles(self.workbook)
99
100 writec(self, _("Evaluation {0} - created on {1}").format(self.semester.name, datetime.date.today()), "headline")
101 for course, results in courses_with_results:
102 if course.state == "published":
103 writec(self, course.name, "course", cols=2)
104 else:
105 writec(self, course.name, "course_unfinished", cols=2)
106
107 writen(self)
108 for course, results in courses_with_results:
109 writec(self, "Average", "avg")
110 writec(self, "Deviation", "border_top_bottom_right")
111
112 for questionnaire in questionnaires:
113 writen(self, questionnaire.name, "bold")
114 for course, results in courses_with_results:
115 self.write_two_empty_cells_with_borders()
116
117 for question in questionnaire.question_set.all():
118 if question.is_text_question:
119 continue
120
121 writen(self, question.text)
122
123 for course, results in courses_with_results:
124 qn_results = results.get(questionnaire.id, None)
125 if qn_results:
126 values = []
127 deviations = []
128 for grade_result in qn_results:
129 if grade_result.question.id == question.id:
130 if grade_result.average:
131 values.append(grade_result.average)
132 deviations.append(grade_result.deviation)
133 break
134 enough_answers = course.can_publish_grades
135 if values and (enough_answers or ignore_not_enough_answers):
136 avg = sum(values) / len(values)
137 writec(self, avg, self.grade_to_style(avg))
138
139 dev = sum(deviations) / len(deviations)
140 writec(self, dev, self.deviation_to_style(dev))
141 else:
142 self.write_two_empty_cells_with_borders()
143 else:
144 self.write_two_empty_cells_with_borders()
145 writen(self, None)
146 for course, results in courses_with_results:
147 self.write_two_empty_cells_with_borders()
148
149 writen(self, _("Overall Average Grade"), "bold")
150 for course, results in courses_with_results:
151 avg, dev = calculate_average_grades_and_deviation(course)
152 if avg:
153 writec(self, avg, self.grade_to_style(avg), cols=2)
154 else:
155 self.write_two_empty_cells_with_borders()
156
157 writen(self, _("Overall Average Standard Deviation"), "bold")
158 for course, results in courses_with_results:
159 avg, dev = calculate_average_grades_and_deviation(course)
160 if dev is not None:
161 writec(self, dev, self.deviation_to_style(dev), cols=2)
162 else:
163 self.write_two_empty_cells_with_borders()
164
165 writen(self, _("Total Voters/Total Participants"), "bold")
166 for course, results in courses_with_results:
167 percent_participants = float(course.num_voters)/float(course.num_participants)
168 writec(self, "{}/{} ({:.0%})".format(course.num_voters, course.num_participants, percent_participants), "total_voters", cols=2)
169
170 self.workbook.save(response)
171
172
173 def write_two_empty_cells_with_borders(self):
174 writec(self, None, "border_left")
175 writec(self, None, "border_right")
176
177
178 def writen(exporter, label="", style_name="default"):
179 """Write the cell at the beginning of the next row."""
180 exporter.col = 0
181 exporter.row += 1
182 writec(exporter, label, style_name)
183
184 def writec(exporter, label, style_name, rows=1, cols=1):
185 """Write the cell in the next column of the current line."""
186 _write(exporter, label, exporter.styles[style_name], rows, cols)
187 exporter.col += 1
188
189 def _write(exporter, label, style, rows, cols):
190 if rows > 1 or cols > 1:
191 exporter.sheet.write_merge(exporter.row, exporter.row+rows-1, exporter.col, exporter.col+cols-1, label, style)
192 exporter.col += cols - 1
193 else:
194 exporter.sheet.write(exporter.row, exporter.col, label, style)
195
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/evap/results/exporters.py b/evap/results/exporters.py
--- a/evap/results/exporters.py
+++ b/evap/results/exporters.py
@@ -72,6 +72,8 @@
def export(self, response, ignore_not_enough_answers=False):
courses_with_results = list()
for course in self.semester.course_set.filter(state="published").all():
+ if course.is_single_result():
+ continue
results = OrderedDict()
for questionnaire, contributor, label, data, section_warning in calculate_results(course):
results.setdefault(questionnaire.id, []).extend(data)
@@ -164,7 +166,7 @@
writen(self, _("Total Voters/Total Participants"), "bold")
for course, results in courses_with_results:
- percent_participants = float(course.num_voters)/float(course.num_participants)
+ percent_participants = float(course.num_voters)/float(course.num_participants) if course.num_participants > 0 else 0
writec(self, "{}/{} ({:.0%})".format(course.num_voters, course.num_participants, percent_participants), "total_voters", cols=2)
self.workbook.save(response)
|
{"golden_diff": "diff --git a/evap/results/exporters.py b/evap/results/exporters.py\n--- a/evap/results/exporters.py\n+++ b/evap/results/exporters.py\n@@ -72,6 +72,8 @@\n def export(self, response, ignore_not_enough_answers=False):\n courses_with_results = list()\n for course in self.semester.course_set.filter(state=\"published\").all():\n+ if course.is_single_result():\n+ continue\n results = OrderedDict()\n for questionnaire, contributor, label, data, section_warning in calculate_results(course):\n results.setdefault(questionnaire.id, []).extend(data)\n@@ -164,7 +166,7 @@\n \n writen(self, _(\"Total Voters/Total Participants\"), \"bold\")\n for course, results in courses_with_results:\n- percent_participants = float(course.num_voters)/float(course.num_participants)\n+ percent_participants = float(course.num_voters)/float(course.num_participants) if course.num_participants > 0 else 0\n writec(self, \"{}/{} ({:.0%})\".format(course.num_voters, course.num_participants, percent_participants), \"total_voters\", cols=2)\n \n self.workbook.save(response)\n", "issue": "Remove single results from export\nWhen exporting a semester with single results a `ZeroDevisionError` occurs, because there are no participants.\nSingle results should not be part of the semester export.\n\n", "before_files": [{"content": "from evap.evaluation.models import Questionnaire\nfrom evap.evaluation.tools import calculate_results, calculate_average_grades_and_deviation, get_grade_color, get_deviation_color\n\nfrom django.utils.translation import ugettext as _\n\nfrom collections import OrderedDict\nfrom collections import defaultdict\nimport datetime\nimport xlwt\n\n\nclass ExcelExporter(object):\n\n def __init__(self, semester):\n self.semester = semester\n self.styles = dict()\n\n self.CUSTOM_COLOR_START = 8\n self.NUM_GRADE_COLORS = 21 # 1.0 to 5.0 in 0.2 steps\n self.NUM_DEVIATION_COLORS = 13 # 0.0 to 2.4 in 0.2 steps\n self.STEP = 0.2 # we only have a limited number of custom colors\n\n def normalize_number(self, number):\n \"\"\" floors 'number' to a multiply of self.STEP \"\"\"\n rounded_number = round(number, 1) # see #302\n return round(int(rounded_number / self.STEP + 0.0001) * self.STEP, 1)\n\n def create_style(self, workbook, base_style, style_name, palette_index, color):\n color_name = style_name + \"_color\"\n xlwt.add_palette_colour(color_name, palette_index)\n workbook.set_colour_RGB(palette_index, *color)\n self.styles[style_name] = xlwt.easyxf(base_style.format(color_name), num_format_str=\"0.0\")\n\n def init_styles(self, workbook):\n self.styles = {\n 'default': xlwt.Style.default_style,\n 'avg': xlwt.easyxf('alignment: horiz centre; font: bold on; borders: left medium, top medium, bottom medium'),\n 'headline': xlwt.easyxf('font: bold on, height 400; alignment: horiz centre, vert centre, wrap on', num_format_str=\"0.0\"),\n 'course': xlwt.easyxf('alignment: horiz centre, wrap on, rota 90; borders: left medium, top medium'),\n 'course_unfinished': xlwt.easyxf('alignment: horiz centre, wrap on, rota 90; borders: left medium, top medium; font: italic on'),\n 'total_voters': xlwt.easyxf('alignment: horiz centre; borders: left medium, bottom medium, right medium'),\n 'bold': xlwt.easyxf('font: bold on'),\n 'border_left': xlwt.easyxf('borders: left medium'),\n 'border_right': xlwt.easyxf('borders: right medium'),\n 'border_top_bottom_right': xlwt.easyxf('borders: top medium, bottom medium, right medium')}\n\n\n\n grade_base_style = 'pattern: pattern solid, fore_colour {}; alignment: horiz centre; font: bold on; borders: left medium'\n for i in range(0, self.NUM_GRADE_COLORS):\n grade = 1 + i*self.STEP\n color = get_grade_color(grade)\n palette_index = self.CUSTOM_COLOR_START + i\n style_name = self.grade_to_style(grade)\n self.create_style(workbook, grade_base_style, style_name, palette_index, color)\n\n deviation_base_style = 'pattern: pattern solid, fore_colour {}; alignment: horiz centre; borders: right medium'\n for i in range(0, self.NUM_DEVIATION_COLORS):\n deviation = i * self.STEP\n color = get_deviation_color(deviation)\n palette_index = self.CUSTOM_COLOR_START + self.NUM_GRADE_COLORS + i\n style_name = self.deviation_to_style(deviation)\n self.create_style(workbook, deviation_base_style, style_name, palette_index, color)\n\n\n def grade_to_style(self, grade):\n return 'grade_' + str(self.normalize_number(grade))\n\n def deviation_to_style(self, deviation):\n return 'deviation_' + str(self.normalize_number(deviation))\n\n def export(self, response, ignore_not_enough_answers=False):\n courses_with_results = list()\n for course in self.semester.course_set.filter(state=\"published\").all():\n results = OrderedDict()\n for questionnaire, contributor, label, data, section_warning in calculate_results(course):\n results.setdefault(questionnaire.id, []).extend(data)\n courses_with_results.append((course, results))\n\n courses_with_results.sort(key=lambda cr: cr[0].type)\n\n qn_frequencies = defaultdict(int)\n for course, results in courses_with_results:\n for questionnaire, results in results.items():\n qn_frequencies[questionnaire] += 1\n\n qn_relevant = list(qn_frequencies.items())\n qn_relevant.sort(key=lambda t: -t[1])\n\n questionnaires = [Questionnaire.objects.get(id=t[0]) for t in qn_relevant]\n\n self.workbook = xlwt.Workbook()\n self.sheet = self.workbook.add_sheet(_(\"Results\"))\n self.row = 0\n self.col = 0\n\n\n self.init_styles(self.workbook)\n\n writec(self, _(\"Evaluation {0} - created on {1}\").format(self.semester.name, datetime.date.today()), \"headline\")\n for course, results in courses_with_results:\n if course.state == \"published\":\n writec(self, course.name, \"course\", cols=2)\n else:\n writec(self, course.name, \"course_unfinished\", cols=2)\n\n writen(self)\n for course, results in courses_with_results:\n writec(self, \"Average\", \"avg\")\n writec(self, \"Deviation\", \"border_top_bottom_right\")\n\n for questionnaire in questionnaires:\n writen(self, questionnaire.name, \"bold\")\n for course, results in courses_with_results:\n self.write_two_empty_cells_with_borders()\n\n for question in questionnaire.question_set.all():\n if question.is_text_question:\n continue\n\n writen(self, question.text)\n\n for course, results in courses_with_results:\n qn_results = results.get(questionnaire.id, None)\n if qn_results:\n values = []\n deviations = []\n for grade_result in qn_results:\n if grade_result.question.id == question.id:\n if grade_result.average:\n values.append(grade_result.average)\n deviations.append(grade_result.deviation)\n break\n enough_answers = course.can_publish_grades\n if values and (enough_answers or ignore_not_enough_answers):\n avg = sum(values) / len(values)\n writec(self, avg, self.grade_to_style(avg))\n\n dev = sum(deviations) / len(deviations)\n writec(self, dev, self.deviation_to_style(dev))\n else:\n self.write_two_empty_cells_with_borders()\n else:\n self.write_two_empty_cells_with_borders()\n writen(self, None)\n for course, results in courses_with_results:\n self.write_two_empty_cells_with_borders()\n\n writen(self, _(\"Overall Average Grade\"), \"bold\")\n for course, results in courses_with_results:\n avg, dev = calculate_average_grades_and_deviation(course)\n if avg:\n writec(self, avg, self.grade_to_style(avg), cols=2)\n else:\n self.write_two_empty_cells_with_borders()\n\n writen(self, _(\"Overall Average Standard Deviation\"), \"bold\")\n for course, results in courses_with_results:\n avg, dev = calculate_average_grades_and_deviation(course)\n if dev is not None:\n writec(self, dev, self.deviation_to_style(dev), cols=2)\n else:\n self.write_two_empty_cells_with_borders()\n\n writen(self, _(\"Total Voters/Total Participants\"), \"bold\")\n for course, results in courses_with_results:\n percent_participants = float(course.num_voters)/float(course.num_participants)\n writec(self, \"{}/{} ({:.0%})\".format(course.num_voters, course.num_participants, percent_participants), \"total_voters\", cols=2)\n\n self.workbook.save(response)\n\n\n def write_two_empty_cells_with_borders(self):\n writec(self, None, \"border_left\")\n writec(self, None, \"border_right\")\n\n\ndef writen(exporter, label=\"\", style_name=\"default\"):\n \"\"\"Write the cell at the beginning of the next row.\"\"\"\n exporter.col = 0\n exporter.row += 1\n writec(exporter, label, style_name)\n\ndef writec(exporter, label, style_name, rows=1, cols=1):\n \"\"\"Write the cell in the next column of the current line.\"\"\"\n _write(exporter, label, exporter.styles[style_name], rows, cols)\n exporter.col += 1\n\ndef _write(exporter, label, style, rows, cols):\n if rows > 1 or cols > 1:\n exporter.sheet.write_merge(exporter.row, exporter.row+rows-1, exporter.col, exporter.col+cols-1, label, style)\n exporter.col += cols - 1\n else:\n exporter.sheet.write(exporter.row, exporter.col, label, style)\n", "path": "evap/results/exporters.py"}], "after_files": [{"content": "from evap.evaluation.models import Questionnaire\nfrom evap.evaluation.tools import calculate_results, calculate_average_grades_and_deviation, get_grade_color, get_deviation_color\n\nfrom django.utils.translation import ugettext as _\n\nfrom collections import OrderedDict\nfrom collections import defaultdict\nimport datetime\nimport xlwt\n\n\nclass ExcelExporter(object):\n\n def __init__(self, semester):\n self.semester = semester\n self.styles = dict()\n\n self.CUSTOM_COLOR_START = 8\n self.NUM_GRADE_COLORS = 21 # 1.0 to 5.0 in 0.2 steps\n self.NUM_DEVIATION_COLORS = 13 # 0.0 to 2.4 in 0.2 steps\n self.STEP = 0.2 # we only have a limited number of custom colors\n\n def normalize_number(self, number):\n \"\"\" floors 'number' to a multiply of self.STEP \"\"\"\n rounded_number = round(number, 1) # see #302\n return round(int(rounded_number / self.STEP + 0.0001) * self.STEP, 1)\n\n def create_style(self, workbook, base_style, style_name, palette_index, color):\n color_name = style_name + \"_color\"\n xlwt.add_palette_colour(color_name, palette_index)\n workbook.set_colour_RGB(palette_index, *color)\n self.styles[style_name] = xlwt.easyxf(base_style.format(color_name), num_format_str=\"0.0\")\n\n def init_styles(self, workbook):\n self.styles = {\n 'default': xlwt.Style.default_style,\n 'avg': xlwt.easyxf('alignment: horiz centre; font: bold on; borders: left medium, top medium, bottom medium'),\n 'headline': xlwt.easyxf('font: bold on, height 400; alignment: horiz centre, vert centre, wrap on', num_format_str=\"0.0\"),\n 'course': xlwt.easyxf('alignment: horiz centre, wrap on, rota 90; borders: left medium, top medium'),\n 'course_unfinished': xlwt.easyxf('alignment: horiz centre, wrap on, rota 90; borders: left medium, top medium; font: italic on'),\n 'total_voters': xlwt.easyxf('alignment: horiz centre; borders: left medium, bottom medium, right medium'),\n 'bold': xlwt.easyxf('font: bold on'),\n 'border_left': xlwt.easyxf('borders: left medium'),\n 'border_right': xlwt.easyxf('borders: right medium'),\n 'border_top_bottom_right': xlwt.easyxf('borders: top medium, bottom medium, right medium')}\n\n\n\n grade_base_style = 'pattern: pattern solid, fore_colour {}; alignment: horiz centre; font: bold on; borders: left medium'\n for i in range(0, self.NUM_GRADE_COLORS):\n grade = 1 + i*self.STEP\n color = get_grade_color(grade)\n palette_index = self.CUSTOM_COLOR_START + i\n style_name = self.grade_to_style(grade)\n self.create_style(workbook, grade_base_style, style_name, palette_index, color)\n\n deviation_base_style = 'pattern: pattern solid, fore_colour {}; alignment: horiz centre; borders: right medium'\n for i in range(0, self.NUM_DEVIATION_COLORS):\n deviation = i * self.STEP\n color = get_deviation_color(deviation)\n palette_index = self.CUSTOM_COLOR_START + self.NUM_GRADE_COLORS + i\n style_name = self.deviation_to_style(deviation)\n self.create_style(workbook, deviation_base_style, style_name, palette_index, color)\n\n\n def grade_to_style(self, grade):\n return 'grade_' + str(self.normalize_number(grade))\n\n def deviation_to_style(self, deviation):\n return 'deviation_' + str(self.normalize_number(deviation))\n\n def export(self, response, ignore_not_enough_answers=False):\n courses_with_results = list()\n for course in self.semester.course_set.filter(state=\"published\").all():\n if course.is_single_result():\n continue\n results = OrderedDict()\n for questionnaire, contributor, label, data, section_warning in calculate_results(course):\n results.setdefault(questionnaire.id, []).extend(data)\n courses_with_results.append((course, results))\n\n courses_with_results.sort(key=lambda cr: cr[0].type)\n\n qn_frequencies = defaultdict(int)\n for course, results in courses_with_results:\n for questionnaire, results in results.items():\n qn_frequencies[questionnaire] += 1\n\n qn_relevant = list(qn_frequencies.items())\n qn_relevant.sort(key=lambda t: -t[1])\n\n questionnaires = [Questionnaire.objects.get(id=t[0]) for t in qn_relevant]\n\n self.workbook = xlwt.Workbook()\n self.sheet = self.workbook.add_sheet(_(\"Results\"))\n self.row = 0\n self.col = 0\n\n\n self.init_styles(self.workbook)\n\n writec(self, _(\"Evaluation {0} - created on {1}\").format(self.semester.name, datetime.date.today()), \"headline\")\n for course, results in courses_with_results:\n if course.state == \"published\":\n writec(self, course.name, \"course\", cols=2)\n else:\n writec(self, course.name, \"course_unfinished\", cols=2)\n\n writen(self)\n for course, results in courses_with_results:\n writec(self, \"Average\", \"avg\")\n writec(self, \"Deviation\", \"border_top_bottom_right\")\n\n for questionnaire in questionnaires:\n writen(self, questionnaire.name, \"bold\")\n for course, results in courses_with_results:\n self.write_two_empty_cells_with_borders()\n\n for question in questionnaire.question_set.all():\n if question.is_text_question:\n continue\n\n writen(self, question.text)\n\n for course, results in courses_with_results:\n qn_results = results.get(questionnaire.id, None)\n if qn_results:\n values = []\n deviations = []\n for grade_result in qn_results:\n if grade_result.question.id == question.id:\n if grade_result.average:\n values.append(grade_result.average)\n deviations.append(grade_result.deviation)\n break\n enough_answers = course.can_publish_grades\n if values and (enough_answers or ignore_not_enough_answers):\n avg = sum(values) / len(values)\n writec(self, avg, self.grade_to_style(avg))\n\n dev = sum(deviations) / len(deviations)\n writec(self, dev, self.deviation_to_style(dev))\n else:\n self.write_two_empty_cells_with_borders()\n else:\n self.write_two_empty_cells_with_borders()\n writen(self, None)\n for course, results in courses_with_results:\n self.write_two_empty_cells_with_borders()\n\n writen(self, _(\"Overall Average Grade\"), \"bold\")\n for course, results in courses_with_results:\n avg, dev = calculate_average_grades_and_deviation(course)\n if avg:\n writec(self, avg, self.grade_to_style(avg), cols=2)\n else:\n self.write_two_empty_cells_with_borders()\n\n writen(self, _(\"Overall Average Standard Deviation\"), \"bold\")\n for course, results in courses_with_results:\n avg, dev = calculate_average_grades_and_deviation(course)\n if dev is not None:\n writec(self, dev, self.deviation_to_style(dev), cols=2)\n else:\n self.write_two_empty_cells_with_borders()\n\n writen(self, _(\"Total Voters/Total Participants\"), \"bold\")\n for course, results in courses_with_results:\n percent_participants = float(course.num_voters)/float(course.num_participants) if course.num_participants > 0 else 0\n writec(self, \"{}/{} ({:.0%})\".format(course.num_voters, course.num_participants, percent_participants), \"total_voters\", cols=2)\n\n self.workbook.save(response)\n\n\n def write_two_empty_cells_with_borders(self):\n writec(self, None, \"border_left\")\n writec(self, None, \"border_right\")\n\n\ndef writen(exporter, label=\"\", style_name=\"default\"):\n \"\"\"Write the cell at the beginning of the next row.\"\"\"\n exporter.col = 0\n exporter.row += 1\n writec(exporter, label, style_name)\n\ndef writec(exporter, label, style_name, rows=1, cols=1):\n \"\"\"Write the cell in the next column of the current line.\"\"\"\n _write(exporter, label, exporter.styles[style_name], rows, cols)\n exporter.col += 1\n\ndef _write(exporter, label, style, rows, cols):\n if rows > 1 or cols > 1:\n exporter.sheet.write_merge(exporter.row, exporter.row+rows-1, exporter.col, exporter.col+cols-1, label, style)\n exporter.col += cols - 1\n else:\n exporter.sheet.write(exporter.row, exporter.col, label, style)\n", "path": "evap/results/exporters.py"}]}
| 2,762 | 268 |
gh_patches_debug_10721
|
rasdani/github-patches
|
git_diff
|
pypi__warehouse-3979
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
use CSP: sandbox on /simple/ pages
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy/sandbox + https://www.youtube.com/watch?v=fbhW37JZtSA&feature=youtu.be
I believe this is a great fit for /simple/, which don't need any ability to do anthing but have a simple HTML structure.
I _think_ we can replace the whole current header with `Content-Security-Policy: sandbox allow-top-navigations`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `warehouse/csp.py`
Content:
```
1 # Licensed under the Apache License, Version 2.0 (the "License");
2 # you may not use this file except in compliance with the License.
3 # You may obtain a copy of the License at
4 #
5 # http://www.apache.org/licenses/LICENSE-2.0
6 #
7 # Unless required by applicable law or agreed to in writing, software
8 # distributed under the License is distributed on an "AS IS" BASIS,
9 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 # See the License for the specific language governing permissions and
11 # limitations under the License.
12
13 import collections
14 import copy
15
16
17 SELF = "'self'"
18 NONE = "'none'"
19
20
21 def _serialize(policy):
22 return "; ".join([
23 " ".join([k] + [v2 for v2 in v if v2 is not None])
24 for k, v in sorted(policy.items())
25 ])
26
27
28 def content_security_policy_tween_factory(handler, registry):
29 def content_security_policy_tween(request):
30 resp = handler(request)
31
32 try:
33 policy = request.find_service(name="csp")
34 except ValueError:
35 policy = collections.defaultdict(list)
36
37 # We don't want to apply our Content Security Policy to the debug
38 # toolbar, that's not part of our application and it doesn't work with
39 # our restrictive CSP.
40 policy = _serialize(policy).format(request=request)
41 if not request.path.startswith("/_debug_toolbar/") and policy:
42 resp.headers["Content-Security-Policy"] = policy
43
44 return resp
45
46 return content_security_policy_tween
47
48
49 class CSPPolicy(collections.defaultdict):
50 def __init__(self, policy=None):
51 super().__init__(list, policy or {})
52
53 def merge(self, policy):
54 for key, attrs in policy.items():
55 self[key].extend(attrs)
56
57
58 def csp_factory(_, request):
59 try:
60 return CSPPolicy(copy.deepcopy(request.registry.settings["csp"]))
61 except KeyError:
62 return CSPPolicy({})
63
64
65 def includeme(config):
66 config.register_service_factory(csp_factory, name="csp")
67 # Enable a Content Security Policy
68 config.add_settings({
69 "csp": {
70 "base-uri": [SELF],
71 "block-all-mixed-content": [],
72 "connect-src": [
73 item for item in [
74 SELF,
75 config.registry.settings.get("statuspage.url"),
76 "https://api.github.com/repos/",
77 ]
78 if item
79 ],
80 "default-src": [NONE],
81 "font-src": [SELF, "fonts.gstatic.com"],
82 "form-action": [SELF],
83 "frame-ancestors": [NONE],
84 "frame-src": [NONE],
85 "img-src": [
86 SELF,
87 config.registry.settings["camo.url"],
88 "www.google-analytics.com",
89 ],
90 "script-src": [
91 SELF,
92 "www.googletagmanager.com",
93 "www.google-analytics.com",
94 ],
95 "style-src": [SELF, "fonts.googleapis.com"],
96 },
97 })
98 config.add_tween("warehouse.csp.content_security_policy_tween_factory")
99
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/warehouse/csp.py b/warehouse/csp.py
--- a/warehouse/csp.py
+++ b/warehouse/csp.py
@@ -34,6 +34,12 @@
except ValueError:
policy = collections.defaultdict(list)
+ # Replace CSP headers on /simple/ pages.
+ if request.path.startswith("/simple/"):
+ policy = collections.defaultdict(list)
+ policy["sandbox"] = ["allow-top-navigation"]
+ policy["default-src"] = [NONE]
+
# We don't want to apply our Content Security Policy to the debug
# toolbar, that's not part of our application and it doesn't work with
# our restrictive CSP.
|
{"golden_diff": "diff --git a/warehouse/csp.py b/warehouse/csp.py\n--- a/warehouse/csp.py\n+++ b/warehouse/csp.py\n@@ -34,6 +34,12 @@\n except ValueError:\n policy = collections.defaultdict(list)\n \n+ # Replace CSP headers on /simple/ pages.\n+ if request.path.startswith(\"/simple/\"):\n+ policy = collections.defaultdict(list)\n+ policy[\"sandbox\"] = [\"allow-top-navigation\"]\n+ policy[\"default-src\"] = [NONE]\n+\n # We don't want to apply our Content Security Policy to the debug\n # toolbar, that's not part of our application and it doesn't work with\n # our restrictive CSP.\n", "issue": "use CSP: sandbox on /simple/ pages\nhttps://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy/sandbox + https://www.youtube.com/watch?v=fbhW37JZtSA&feature=youtu.be\r\n\r\nI believe this is a great fit for /simple/, which don't need any ability to do anthing but have a simple HTML structure.\r\n\r\nI _think_ we can replace the whole current header with `Content-Security-Policy: sandbox allow-top-navigations`.\n", "before_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport collections\nimport copy\n\n\nSELF = \"'self'\"\nNONE = \"'none'\"\n\n\ndef _serialize(policy):\n return \"; \".join([\n \" \".join([k] + [v2 for v2 in v if v2 is not None])\n for k, v in sorted(policy.items())\n ])\n\n\ndef content_security_policy_tween_factory(handler, registry):\n def content_security_policy_tween(request):\n resp = handler(request)\n\n try:\n policy = request.find_service(name=\"csp\")\n except ValueError:\n policy = collections.defaultdict(list)\n\n # We don't want to apply our Content Security Policy to the debug\n # toolbar, that's not part of our application and it doesn't work with\n # our restrictive CSP.\n policy = _serialize(policy).format(request=request)\n if not request.path.startswith(\"/_debug_toolbar/\") and policy:\n resp.headers[\"Content-Security-Policy\"] = policy\n\n return resp\n\n return content_security_policy_tween\n\n\nclass CSPPolicy(collections.defaultdict):\n def __init__(self, policy=None):\n super().__init__(list, policy or {})\n\n def merge(self, policy):\n for key, attrs in policy.items():\n self[key].extend(attrs)\n\n\ndef csp_factory(_, request):\n try:\n return CSPPolicy(copy.deepcopy(request.registry.settings[\"csp\"]))\n except KeyError:\n return CSPPolicy({})\n\n\ndef includeme(config):\n config.register_service_factory(csp_factory, name=\"csp\")\n # Enable a Content Security Policy\n config.add_settings({\n \"csp\": {\n \"base-uri\": [SELF],\n \"block-all-mixed-content\": [],\n \"connect-src\": [\n item for item in [\n SELF,\n config.registry.settings.get(\"statuspage.url\"),\n \"https://api.github.com/repos/\",\n ]\n if item\n ],\n \"default-src\": [NONE],\n \"font-src\": [SELF, \"fonts.gstatic.com\"],\n \"form-action\": [SELF],\n \"frame-ancestors\": [NONE],\n \"frame-src\": [NONE],\n \"img-src\": [\n SELF,\n config.registry.settings[\"camo.url\"],\n \"www.google-analytics.com\",\n ],\n \"script-src\": [\n SELF,\n \"www.googletagmanager.com\",\n \"www.google-analytics.com\",\n ],\n \"style-src\": [SELF, \"fonts.googleapis.com\"],\n },\n })\n config.add_tween(\"warehouse.csp.content_security_policy_tween_factory\")\n", "path": "warehouse/csp.py"}], "after_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport collections\nimport copy\n\n\nSELF = \"'self'\"\nNONE = \"'none'\"\n\n\ndef _serialize(policy):\n return \"; \".join([\n \" \".join([k] + [v2 for v2 in v if v2 is not None])\n for k, v in sorted(policy.items())\n ])\n\n\ndef content_security_policy_tween_factory(handler, registry):\n def content_security_policy_tween(request):\n resp = handler(request)\n\n try:\n policy = request.find_service(name=\"csp\")\n except ValueError:\n policy = collections.defaultdict(list)\n\n # Replace CSP headers on /simple/ pages.\n if request.path.startswith(\"/simple/\"):\n policy = collections.defaultdict(list)\n policy[\"sandbox\"] = [\"allow-top-navigation\"]\n policy[\"default-src\"] = [NONE]\n\n # We don't want to apply our Content Security Policy to the debug\n # toolbar, that's not part of our application and it doesn't work with\n # our restrictive CSP.\n policy = _serialize(policy).format(request=request)\n if not request.path.startswith(\"/_debug_toolbar/\") and policy:\n resp.headers[\"Content-Security-Policy\"] = policy\n\n return resp\n\n return content_security_policy_tween\n\n\nclass CSPPolicy(collections.defaultdict):\n def __init__(self, policy=None):\n super().__init__(list, policy or {})\n\n def merge(self, policy):\n for key, attrs in policy.items():\n self[key].extend(attrs)\n\n\ndef csp_factory(_, request):\n try:\n return CSPPolicy(copy.deepcopy(request.registry.settings[\"csp\"]))\n except KeyError:\n return CSPPolicy({})\n\n\ndef includeme(config):\n config.register_service_factory(csp_factory, name=\"csp\")\n # Enable a Content Security Policy\n config.add_settings({\n \"csp\": {\n \"base-uri\": [SELF],\n \"block-all-mixed-content\": [],\n \"connect-src\": [\n item for item in [\n SELF,\n config.registry.settings.get(\"statuspage.url\"),\n \"https://api.github.com/repos/\",\n ]\n if item\n ],\n \"default-src\": [NONE],\n \"font-src\": [SELF, \"fonts.gstatic.com\"],\n \"form-action\": [SELF],\n \"frame-ancestors\": [NONE],\n \"frame-src\": [NONE],\n \"img-src\": [\n SELF,\n config.registry.settings[\"camo.url\"],\n \"www.google-analytics.com\",\n ],\n \"script-src\": [\n SELF,\n \"www.googletagmanager.com\",\n \"www.google-analytics.com\",\n ],\n \"style-src\": [SELF, \"fonts.googleapis.com\"],\n },\n })\n config.add_tween(\"warehouse.csp.content_security_policy_tween_factory\")\n", "path": "warehouse/csp.py"}]}
| 1,215 | 153 |
gh_patches_debug_33814
|
rasdani/github-patches
|
git_diff
|
ansible__ansible-modules-core-5364
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Expose replace module's backup file path
##### Issue Type:
Feature Idea
##### Ansible Version:
1.8
##### Environment:
N/A
##### Summary:
Expose backup file path in the output of replace module.
##### Steps To Reproduce:
N/A
##### Expected Results:
We would add "backup" and the file path to the JSON output.
##### Actual Results:
N/A
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `files/replace.py`
Content:
```
1 #!/usr/bin/python
2 # -*- coding: utf-8 -*-
3
4 # (c) 2013, Evan Kaufman <[email protected]
5 #
6 # This file is part of Ansible
7 #
8 # Ansible is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # Ansible is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the GNU General Public License
19 # along with Ansible. If not, see <http://www.gnu.org/licenses/>.
20
21 import re
22 import os
23 import tempfile
24
25 DOCUMENTATION = """
26 ---
27 module: replace
28 author: "Evan Kaufman (@EvanK)"
29 extends_documentation_fragment:
30 - files
31 - validate
32 short_description: Replace all instances of a particular string in a
33 file using a back-referenced regular expression.
34 description:
35 - This module will replace all instances of a pattern within a file.
36 - It is up to the user to maintain idempotence by ensuring that the
37 same pattern would never match any replacements made.
38 version_added: "1.6"
39 options:
40 dest:
41 required: true
42 aliases: [ name, destfile ]
43 description:
44 - The file to modify.
45 regexp:
46 required: true
47 description:
48 - The regular expression to look for in the contents of the file.
49 Uses Python regular expressions; see
50 U(http://docs.python.org/2/library/re.html).
51 Uses multiline mode, which means C(^) and C($) match the beginning
52 and end respectively of I(each line) of the file.
53 replace:
54 required: false
55 description:
56 - The string to replace regexp matches. May contain backreferences
57 that will get expanded with the regexp capture groups if the regexp
58 matches. If not set, matches are removed entirely.
59 backup:
60 required: false
61 default: "no"
62 choices: [ "yes", "no" ]
63 description:
64 - Create a backup file including the timestamp information so you can
65 get the original file back if you somehow clobbered it incorrectly.
66 others:
67 description:
68 - All arguments accepted by the M(file) module also work here.
69 required: false
70 follow:
71 required: false
72 default: "no"
73 choices: [ "yes", "no" ]
74 version_added: "1.9"
75 description:
76 - 'This flag indicates that filesystem links, if they exist, should be followed.'
77 """
78
79 EXAMPLES = r"""
80 - replace: dest=/etc/hosts regexp='(\s+)old\.host\.name(\s+.*)?$' replace='\1new.host.name\2' backup=yes
81
82 - replace: dest=/home/jdoe/.ssh/known_hosts regexp='^old\.host\.name[^\n]*\n' owner=jdoe group=jdoe mode=644
83
84 - replace: dest=/etc/apache/ports regexp='^(NameVirtualHost|Listen)\s+80\s*$' replace='\1 127.0.0.1:8080' validate='/usr/sbin/apache2ctl -f %s -t'
85 """
86
87 def write_changes(module,contents,dest):
88
89 tmpfd, tmpfile = tempfile.mkstemp()
90 f = os.fdopen(tmpfd,'wb')
91 f.write(contents)
92 f.close()
93
94 validate = module.params.get('validate', None)
95 valid = not validate
96 if validate:
97 if "%s" not in validate:
98 module.fail_json(msg="validate must contain %%s: %s" % (validate))
99 (rc, out, err) = module.run_command(validate % tmpfile)
100 valid = rc == 0
101 if rc != 0:
102 module.fail_json(msg='failed to validate: '
103 'rc:%s error:%s' % (rc,err))
104 if valid:
105 module.atomic_move(tmpfile, dest, unsafe_writes=module.params['unsafe_writes'])
106
107 def check_file_attrs(module, changed, message):
108
109 file_args = module.load_file_common_arguments(module.params)
110 if module.set_file_attributes_if_different(file_args, False):
111
112 if changed:
113 message += " and "
114 changed = True
115 message += "ownership, perms or SE linux context changed"
116
117 return message, changed
118
119 def main():
120 module = AnsibleModule(
121 argument_spec=dict(
122 dest=dict(required=True, aliases=['name', 'destfile']),
123 regexp=dict(required=True),
124 replace=dict(default='', type='str'),
125 backup=dict(default=False, type='bool'),
126 validate=dict(default=None, type='str'),
127 ),
128 add_file_common_args=True,
129 supports_check_mode=True
130 )
131
132 params = module.params
133 dest = os.path.expanduser(params['dest'])
134 diff = dict()
135
136 if os.path.isdir(dest):
137 module.fail_json(rc=256, msg='Destination %s is a directory !' % dest)
138
139 if not os.path.exists(dest):
140 module.fail_json(rc=257, msg='Destination %s does not exist !' % dest)
141 else:
142 f = open(dest, 'rb')
143 contents = f.read()
144 f.close()
145
146 if module._diff:
147 diff = {
148 'before_header': dest,
149 'before': contents,
150 }
151
152 mre = re.compile(params['regexp'], re.MULTILINE)
153 result = re.subn(mre, params['replace'], contents, 0)
154
155 if result[1] > 0 and contents != result[0]:
156 msg = '%s replacements made' % result[1]
157 changed = True
158 if module._diff:
159 diff['after_header'] = dest
160 diff['after'] = result[0]
161 else:
162 msg = ''
163 changed = False
164 diff = dict()
165
166 if changed and not module.check_mode:
167 if params['backup'] and os.path.exists(dest):
168 module.backup_local(dest)
169 if params['follow'] and os.path.islink(dest):
170 dest = os.path.realpath(dest)
171 write_changes(module, result[0], dest)
172
173 msg, changed = check_file_attrs(module, changed, msg)
174 module.exit_json(changed=changed, msg=msg, diff=diff)
175
176 # this is magic, see lib/ansible/module_common.py
177 from ansible.module_utils.basic import *
178
179 if __name__ == '__main__':
180 main()
181
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/files/replace.py b/files/replace.py
--- a/files/replace.py
+++ b/files/replace.py
@@ -131,7 +131,7 @@
params = module.params
dest = os.path.expanduser(params['dest'])
- diff = dict()
+ res_args = dict()
if os.path.isdir(dest):
module.fail_json(rc=256, msg='Destination %s is a directory !' % dest)
@@ -143,12 +143,6 @@
contents = f.read()
f.close()
- if module._diff:
- diff = {
- 'before_header': dest,
- 'before': contents,
- }
-
mre = re.compile(params['regexp'], re.MULTILINE)
result = re.subn(mre, params['replace'], contents, 0)
@@ -156,22 +150,25 @@
msg = '%s replacements made' % result[1]
changed = True
if module._diff:
- diff['after_header'] = dest
- diff['after'] = result[0]
+ res_args['diff'] = {
+ 'before_header': dest,
+ 'before': contents,
+ 'after_header': dest,
+ 'after': result[0],
+ }
else:
msg = ''
changed = False
- diff = dict()
if changed and not module.check_mode:
if params['backup'] and os.path.exists(dest):
- module.backup_local(dest)
+ res_args['backup_file'] = module.backup_local(dest)
if params['follow'] and os.path.islink(dest):
dest = os.path.realpath(dest)
write_changes(module, result[0], dest)
- msg, changed = check_file_attrs(module, changed, msg)
- module.exit_json(changed=changed, msg=msg, diff=diff)
+ res_args['msg'], res_args['changed'] = check_file_attrs(module, changed, msg)
+ module.exit_json(**res_args)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
|
{"golden_diff": "diff --git a/files/replace.py b/files/replace.py\n--- a/files/replace.py\n+++ b/files/replace.py\n@@ -131,7 +131,7 @@\n \n params = module.params\n dest = os.path.expanduser(params['dest'])\n- diff = dict()\n+ res_args = dict()\n \n if os.path.isdir(dest):\n module.fail_json(rc=256, msg='Destination %s is a directory !' % dest)\n@@ -143,12 +143,6 @@\n contents = f.read()\n f.close()\n \n- if module._diff:\n- diff = {\n- 'before_header': dest,\n- 'before': contents,\n- }\n-\n mre = re.compile(params['regexp'], re.MULTILINE)\n result = re.subn(mre, params['replace'], contents, 0)\n \n@@ -156,22 +150,25 @@\n msg = '%s replacements made' % result[1]\n changed = True\n if module._diff:\n- diff['after_header'] = dest\n- diff['after'] = result[0]\n+ res_args['diff'] = {\n+ 'before_header': dest,\n+ 'before': contents,\n+ 'after_header': dest,\n+ 'after': result[0],\n+ }\n else:\n msg = ''\n changed = False\n- diff = dict()\n \n if changed and not module.check_mode:\n if params['backup'] and os.path.exists(dest):\n- module.backup_local(dest)\n+ res_args['backup_file'] = module.backup_local(dest)\n if params['follow'] and os.path.islink(dest):\n dest = os.path.realpath(dest)\n write_changes(module, result[0], dest)\n \n- msg, changed = check_file_attrs(module, changed, msg)\n- module.exit_json(changed=changed, msg=msg, diff=diff)\n+ res_args['msg'], res_args['changed'] = check_file_attrs(module, changed, msg)\n+ module.exit_json(**res_args)\n \n # this is magic, see lib/ansible/module_common.py\n from ansible.module_utils.basic import *\n", "issue": "Expose replace module's backup file path\n##### Issue Type:\n\nFeature Idea\n##### Ansible Version:\n\n1.8\n##### Environment:\n\nN/A\n##### Summary:\n\nExpose backup file path in the output of replace module.\n##### Steps To Reproduce:\n\nN/A\n##### Expected Results:\n\nWe would add \"backup\" and the file path to the JSON output.\n##### Actual Results:\n\nN/A\n\n", "before_files": [{"content": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\n# (c) 2013, Evan Kaufman <[email protected]\n#\n# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# Ansible is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Ansible. If not, see <http://www.gnu.org/licenses/>.\n\nimport re\nimport os\nimport tempfile\n\nDOCUMENTATION = \"\"\"\n---\nmodule: replace\nauthor: \"Evan Kaufman (@EvanK)\"\nextends_documentation_fragment:\n - files\n - validate\nshort_description: Replace all instances of a particular string in a\n file using a back-referenced regular expression.\ndescription:\n - This module will replace all instances of a pattern within a file.\n - It is up to the user to maintain idempotence by ensuring that the\n same pattern would never match any replacements made.\nversion_added: \"1.6\"\noptions:\n dest:\n required: true\n aliases: [ name, destfile ]\n description:\n - The file to modify.\n regexp:\n required: true\n description:\n - The regular expression to look for in the contents of the file.\n Uses Python regular expressions; see\n U(http://docs.python.org/2/library/re.html).\n Uses multiline mode, which means C(^) and C($) match the beginning\n and end respectively of I(each line) of the file.\n replace:\n required: false\n description:\n - The string to replace regexp matches. May contain backreferences\n that will get expanded with the regexp capture groups if the regexp\n matches. If not set, matches are removed entirely.\n backup:\n required: false\n default: \"no\"\n choices: [ \"yes\", \"no\" ]\n description:\n - Create a backup file including the timestamp information so you can\n get the original file back if you somehow clobbered it incorrectly.\n others:\n description:\n - All arguments accepted by the M(file) module also work here.\n required: false\n follow:\n required: false\n default: \"no\"\n choices: [ \"yes\", \"no\" ]\n version_added: \"1.9\"\n description:\n - 'This flag indicates that filesystem links, if they exist, should be followed.'\n\"\"\"\n\nEXAMPLES = r\"\"\"\n- replace: dest=/etc/hosts regexp='(\\s+)old\\.host\\.name(\\s+.*)?$' replace='\\1new.host.name\\2' backup=yes\n\n- replace: dest=/home/jdoe/.ssh/known_hosts regexp='^old\\.host\\.name[^\\n]*\\n' owner=jdoe group=jdoe mode=644\n\n- replace: dest=/etc/apache/ports regexp='^(NameVirtualHost|Listen)\\s+80\\s*$' replace='\\1 127.0.0.1:8080' validate='/usr/sbin/apache2ctl -f %s -t'\n\"\"\"\n\ndef write_changes(module,contents,dest):\n\n tmpfd, tmpfile = tempfile.mkstemp()\n f = os.fdopen(tmpfd,'wb')\n f.write(contents)\n f.close()\n\n validate = module.params.get('validate', None)\n valid = not validate\n if validate:\n if \"%s\" not in validate:\n module.fail_json(msg=\"validate must contain %%s: %s\" % (validate))\n (rc, out, err) = module.run_command(validate % tmpfile)\n valid = rc == 0\n if rc != 0:\n module.fail_json(msg='failed to validate: '\n 'rc:%s error:%s' % (rc,err))\n if valid:\n module.atomic_move(tmpfile, dest, unsafe_writes=module.params['unsafe_writes'])\n\ndef check_file_attrs(module, changed, message):\n\n file_args = module.load_file_common_arguments(module.params)\n if module.set_file_attributes_if_different(file_args, False):\n\n if changed:\n message += \" and \"\n changed = True\n message += \"ownership, perms or SE linux context changed\"\n\n return message, changed\n\ndef main():\n module = AnsibleModule(\n argument_spec=dict(\n dest=dict(required=True, aliases=['name', 'destfile']),\n regexp=dict(required=True),\n replace=dict(default='', type='str'),\n backup=dict(default=False, type='bool'),\n validate=dict(default=None, type='str'),\n ),\n add_file_common_args=True,\n supports_check_mode=True\n )\n\n params = module.params\n dest = os.path.expanduser(params['dest'])\n diff = dict()\n\n if os.path.isdir(dest):\n module.fail_json(rc=256, msg='Destination %s is a directory !' % dest)\n\n if not os.path.exists(dest):\n module.fail_json(rc=257, msg='Destination %s does not exist !' % dest)\n else:\n f = open(dest, 'rb')\n contents = f.read()\n f.close()\n\n if module._diff:\n diff = {\n 'before_header': dest,\n 'before': contents,\n }\n\n mre = re.compile(params['regexp'], re.MULTILINE)\n result = re.subn(mre, params['replace'], contents, 0)\n\n if result[1] > 0 and contents != result[0]:\n msg = '%s replacements made' % result[1]\n changed = True\n if module._diff:\n diff['after_header'] = dest\n diff['after'] = result[0]\n else:\n msg = ''\n changed = False\n diff = dict()\n\n if changed and not module.check_mode:\n if params['backup'] and os.path.exists(dest):\n module.backup_local(dest)\n if params['follow'] and os.path.islink(dest):\n dest = os.path.realpath(dest)\n write_changes(module, result[0], dest)\n\n msg, changed = check_file_attrs(module, changed, msg)\n module.exit_json(changed=changed, msg=msg, diff=diff)\n\n# this is magic, see lib/ansible/module_common.py\nfrom ansible.module_utils.basic import *\n\nif __name__ == '__main__':\n main()\n", "path": "files/replace.py"}], "after_files": [{"content": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\n# (c) 2013, Evan Kaufman <[email protected]\n#\n# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# Ansible is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Ansible. If not, see <http://www.gnu.org/licenses/>.\n\nimport re\nimport os\nimport tempfile\n\nDOCUMENTATION = \"\"\"\n---\nmodule: replace\nauthor: \"Evan Kaufman (@EvanK)\"\nextends_documentation_fragment:\n - files\n - validate\nshort_description: Replace all instances of a particular string in a\n file using a back-referenced regular expression.\ndescription:\n - This module will replace all instances of a pattern within a file.\n - It is up to the user to maintain idempotence by ensuring that the\n same pattern would never match any replacements made.\nversion_added: \"1.6\"\noptions:\n dest:\n required: true\n aliases: [ name, destfile ]\n description:\n - The file to modify.\n regexp:\n required: true\n description:\n - The regular expression to look for in the contents of the file.\n Uses Python regular expressions; see\n U(http://docs.python.org/2/library/re.html).\n Uses multiline mode, which means C(^) and C($) match the beginning\n and end respectively of I(each line) of the file.\n replace:\n required: false\n description:\n - The string to replace regexp matches. May contain backreferences\n that will get expanded with the regexp capture groups if the regexp\n matches. If not set, matches are removed entirely.\n backup:\n required: false\n default: \"no\"\n choices: [ \"yes\", \"no\" ]\n description:\n - Create a backup file including the timestamp information so you can\n get the original file back if you somehow clobbered it incorrectly.\n others:\n description:\n - All arguments accepted by the M(file) module also work here.\n required: false\n follow:\n required: false\n default: \"no\"\n choices: [ \"yes\", \"no\" ]\n version_added: \"1.9\"\n description:\n - 'This flag indicates that filesystem links, if they exist, should be followed.'\n\"\"\"\n\nEXAMPLES = r\"\"\"\n- replace: dest=/etc/hosts regexp='(\\s+)old\\.host\\.name(\\s+.*)?$' replace='\\1new.host.name\\2' backup=yes\n\n- replace: dest=/home/jdoe/.ssh/known_hosts regexp='^old\\.host\\.name[^\\n]*\\n' owner=jdoe group=jdoe mode=644\n\n- replace: dest=/etc/apache/ports regexp='^(NameVirtualHost|Listen)\\s+80\\s*$' replace='\\1 127.0.0.1:8080' validate='/usr/sbin/apache2ctl -f %s -t'\n\"\"\"\n\ndef write_changes(module,contents,dest):\n\n tmpfd, tmpfile = tempfile.mkstemp()\n f = os.fdopen(tmpfd,'wb')\n f.write(contents)\n f.close()\n\n validate = module.params.get('validate', None)\n valid = not validate\n if validate:\n if \"%s\" not in validate:\n module.fail_json(msg=\"validate must contain %%s: %s\" % (validate))\n (rc, out, err) = module.run_command(validate % tmpfile)\n valid = rc == 0\n if rc != 0:\n module.fail_json(msg='failed to validate: '\n 'rc:%s error:%s' % (rc,err))\n if valid:\n module.atomic_move(tmpfile, dest, unsafe_writes=module.params['unsafe_writes'])\n\ndef check_file_attrs(module, changed, message):\n\n file_args = module.load_file_common_arguments(module.params)\n if module.set_file_attributes_if_different(file_args, False):\n\n if changed:\n message += \" and \"\n changed = True\n message += \"ownership, perms or SE linux context changed\"\n\n return message, changed\n\ndef main():\n module = AnsibleModule(\n argument_spec=dict(\n dest=dict(required=True, aliases=['name', 'destfile']),\n regexp=dict(required=True),\n replace=dict(default='', type='str'),\n backup=dict(default=False, type='bool'),\n validate=dict(default=None, type='str'),\n ),\n add_file_common_args=True,\n supports_check_mode=True\n )\n\n params = module.params\n dest = os.path.expanduser(params['dest'])\n res_args = dict()\n\n if os.path.isdir(dest):\n module.fail_json(rc=256, msg='Destination %s is a directory !' % dest)\n\n if not os.path.exists(dest):\n module.fail_json(rc=257, msg='Destination %s does not exist !' % dest)\n else:\n f = open(dest, 'rb')\n contents = f.read()\n f.close()\n\n mre = re.compile(params['regexp'], re.MULTILINE)\n result = re.subn(mre, params['replace'], contents, 0)\n\n if result[1] > 0 and contents != result[0]:\n msg = '%s replacements made' % result[1]\n changed = True\n if module._diff:\n res_args['diff'] = {\n 'before_header': dest,\n 'before': contents,\n 'after_header': dest,\n 'after': result[0],\n }\n else:\n msg = ''\n changed = False\n\n if changed and not module.check_mode:\n if params['backup'] and os.path.exists(dest):\n res_args['backup_file'] = module.backup_local(dest)\n if params['follow'] and os.path.islink(dest):\n dest = os.path.realpath(dest)\n write_changes(module, result[0], dest)\n\n res_args['msg'], res_args['changed'] = check_file_attrs(module, changed, msg)\n module.exit_json(**res_args)\n\n# this is magic, see lib/ansible/module_common.py\nfrom ansible.module_utils.basic import *\n\nif __name__ == '__main__':\n main()\n", "path": "files/replace.py"}]}
| 2,222 | 477 |
gh_patches_debug_37579
|
rasdani/github-patches
|
git_diff
|
urllib3__urllib3-1186
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Running `tox -e gae` is broken on current master
Extracted from #1182:
```console
$ tox -e gae
GLOB sdist-make: /Users/alexwlchan/repos/urllib3/setup.py
gae inst-nodeps: /Users/alexwlchan/repos/urllib3/.tox/dist/urllib3-dev.zip
gae installed: appdirs==1.4.3,backports.ssl-match-hostname==3.5.0.1,certifi==2017.4.17,coverage==3.7.1,funcsigs==1.0.2,mock==1.3.0,nose==1.3.7,nose-exclude==0.4.1,NoseGAE==0.5.7,packaging==16.8,pbr==3.0.1,pkginfo==1.4.1,pluggy==0.3.1,psutil==4.3.1,py==1.4.33,pyparsing==2.2.0,PySocks==1.5.6,pytest==3.1.0,requests==2.14.2,six==1.10.0,tornado==4.2.1,tox==2.1.1,twine==1.5.0,urllib3===dev,virtualenv==15.1.0
gae runtests: PYTHONHASHSEED='2409600760'
gae runtests: commands[0] | nosetests -c /Users/alexwlchan/repos/urllib3/test/appengine/nose.cfg test/appengine
Traceback (most recent call last):
File ".tox/gae/bin/nosetests", line 11, in <module>
sys.exit(run_exit())
File "/Users/alexwlchan/repos/urllib3/.tox/gae/lib/python2.7/site-packages/nose/core.py", line 121, in __init__
**extra_args)
File "/usr/local/Cellar/python/2.7.13/Frameworks/Python.framework/Versions/2.7/lib/python2.7/unittest/main.py", line 94, in __init__
self.parseArgs(argv)
File "/Users/alexwlchan/repos/urllib3/.tox/gae/lib/python2.7/site-packages/nose/core.py", line 145, in parseArgs
self.config.configure(argv, doc=self.usage())
File "/Users/alexwlchan/repos/urllib3/.tox/gae/lib/python2.7/site-packages/nose/config.py", line 346, in configure
self.plugins.configure(options, self)
File "/Users/alexwlchan/repos/urllib3/.tox/gae/lib/python2.7/site-packages/nose/plugins/manager.py", line 284, in configure
cfg(options, config)
File "/Users/alexwlchan/repos/urllib3/.tox/gae/lib/python2.7/site-packages/nose/plugins/manager.py", line 99, in __call__
return self.call(*arg, **kw)
File "/Users/alexwlchan/repos/urllib3/.tox/gae/lib/python2.7/site-packages/nose/plugins/manager.py", line 167, in simple
result = meth(*arg, **kw)
File "/Users/alexwlchan/repos/urllib3/.tox/gae/lib/python2.7/site-packages/nosegae.py", line 91, in configure
import dev_appserver
ImportError: No module named dev_appserver
ERROR: InvocationError: '/Users/alexwlchan/repos/urllib3/.tox/gae/bin/nosetests -c /Users/alexwlchan/repos/urllib3/test/appengine/nose.cfg test/appengine'
________________________________________________________________________________ summary _________________________________________________________________________________
ERROR: gae: commands failed
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `_travis/fetch_gae_sdk.py`
Content:
```
1 #!/usr/bin/env python
2
3 # Copyright 2015 Google Inc. All rights reserved.
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
16
17 # Retrieved from https://github.com/Google/oauth2client
18
19 """Fetch the most recent GAE SDK and decompress it in the current directory.
20
21 Usage:
22 fetch_gae_sdk.py [<dest_dir>]
23
24 Current releases are listed here:
25 https://www.googleapis.com/storage/v1/b/appengine-sdks/o?prefix=featured
26 """
27 from __future__ import print_function
28
29 import json
30 import os
31 import StringIO
32 import sys
33 import urllib2
34 import zipfile
35
36
37 _SDK_URL = (
38 'https://www.googleapis.com/storage/v1/b/appengine-sdks/o?prefix=featured')
39
40
41 def get_gae_versions():
42 try:
43 version_info_json = urllib2.urlopen(_SDK_URL).read()
44 except:
45 return {}
46 try:
47 version_info = json.loads(version_info_json)
48 except:
49 return {}
50 return version_info.get('items', {})
51
52
53 def _version_tuple(v):
54 version_string = os.path.splitext(v['name'])[0].rpartition('_')[2]
55 return tuple(int(x) for x in version_string.split('.'))
56
57
58 def get_sdk_urls(sdk_versions):
59 python_releases = [v for v in sdk_versions
60 if v['name'].startswith('featured/google_appengine')]
61 current_releases = sorted(python_releases, key=_version_tuple,
62 reverse=True)
63 return [release['mediaLink'] for release in current_releases]
64
65
66 def main(argv):
67 if len(argv) > 2:
68 print('Usage: {0} [<destination_dir>]'.format(argv[0]))
69 return 1
70 if len(argv) > 1:
71 dest_dir = argv[1]
72 else:
73 try:
74 dest_dir = os.path.dirname(os.environ['GAE_PYTHONPATH'])
75 except IndexError:
76 dest_dir = '.'
77 if not os.path.exists(dest_dir):
78 os.makedirs(dest_dir)
79
80 if os.path.exists(os.path.join(dest_dir, 'google_appengine')):
81 print('GAE SDK already installed at {0}, exiting.'.format(dest_dir))
82 return 0
83
84 sdk_versions = get_gae_versions()
85 if not sdk_versions:
86 print('Error fetching GAE SDK version info')
87 return 1
88 sdk_urls = get_sdk_urls(sdk_versions)
89 for sdk_url in sdk_urls:
90 try:
91 sdk_contents = StringIO.StringIO(urllib2.urlopen(sdk_url).read())
92 break
93 except:
94 pass
95 else:
96 print('Could not read SDK from any of ', sdk_urls)
97 return 1
98 sdk_contents.seek(0)
99 try:
100 with zipfile.ZipFile(sdk_contents) as zip_contents:
101 zip_contents.extractall(dest_dir)
102 except:
103 print('Error extracting SDK contents')
104 return 1
105
106
107 if __name__ == '__main__':
108 sys.exit(main(sys.argv[:]))
109
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/_travis/fetch_gae_sdk.py b/_travis/fetch_gae_sdk.py
deleted file mode 100644
--- a/_travis/fetch_gae_sdk.py
+++ /dev/null
@@ -1,108 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2015 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Retrieved from https://github.com/Google/oauth2client
-
-"""Fetch the most recent GAE SDK and decompress it in the current directory.
-
-Usage:
- fetch_gae_sdk.py [<dest_dir>]
-
-Current releases are listed here:
- https://www.googleapis.com/storage/v1/b/appengine-sdks/o?prefix=featured
-"""
-from __future__ import print_function
-
-import json
-import os
-import StringIO
-import sys
-import urllib2
-import zipfile
-
-
-_SDK_URL = (
- 'https://www.googleapis.com/storage/v1/b/appengine-sdks/o?prefix=featured')
-
-
-def get_gae_versions():
- try:
- version_info_json = urllib2.urlopen(_SDK_URL).read()
- except:
- return {}
- try:
- version_info = json.loads(version_info_json)
- except:
- return {}
- return version_info.get('items', {})
-
-
-def _version_tuple(v):
- version_string = os.path.splitext(v['name'])[0].rpartition('_')[2]
- return tuple(int(x) for x in version_string.split('.'))
-
-
-def get_sdk_urls(sdk_versions):
- python_releases = [v for v in sdk_versions
- if v['name'].startswith('featured/google_appengine')]
- current_releases = sorted(python_releases, key=_version_tuple,
- reverse=True)
- return [release['mediaLink'] for release in current_releases]
-
-
-def main(argv):
- if len(argv) > 2:
- print('Usage: {0} [<destination_dir>]'.format(argv[0]))
- return 1
- if len(argv) > 1:
- dest_dir = argv[1]
- else:
- try:
- dest_dir = os.path.dirname(os.environ['GAE_PYTHONPATH'])
- except IndexError:
- dest_dir = '.'
- if not os.path.exists(dest_dir):
- os.makedirs(dest_dir)
-
- if os.path.exists(os.path.join(dest_dir, 'google_appengine')):
- print('GAE SDK already installed at {0}, exiting.'.format(dest_dir))
- return 0
-
- sdk_versions = get_gae_versions()
- if not sdk_versions:
- print('Error fetching GAE SDK version info')
- return 1
- sdk_urls = get_sdk_urls(sdk_versions)
- for sdk_url in sdk_urls:
- try:
- sdk_contents = StringIO.StringIO(urllib2.urlopen(sdk_url).read())
- break
- except:
- pass
- else:
- print('Could not read SDK from any of ', sdk_urls)
- return 1
- sdk_contents.seek(0)
- try:
- with zipfile.ZipFile(sdk_contents) as zip_contents:
- zip_contents.extractall(dest_dir)
- except:
- print('Error extracting SDK contents')
- return 1
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[:]))
|
{"golden_diff": "diff --git a/_travis/fetch_gae_sdk.py b/_travis/fetch_gae_sdk.py\ndeleted file mode 100644\n--- a/_travis/fetch_gae_sdk.py\n+++ /dev/null\n@@ -1,108 +0,0 @@\n-#!/usr/bin/env python\n-\n-# Copyright 2015 Google Inc. All rights reserved.\n-#\n-# Licensed under the Apache License, Version 2.0 (the \"License\");\n-# you may not use this file except in compliance with the License.\n-# You may obtain a copy of the License at\n-#\n-# http://www.apache.org/licenses/LICENSE-2.0\n-#\n-# Unless required by applicable law or agreed to in writing, software\n-# distributed under the License is distributed on an \"AS IS\" BASIS,\n-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n-# See the License for the specific language governing permissions and\n-# limitations under the License.\n-\n-# Retrieved from https://github.com/Google/oauth2client\n-\n-\"\"\"Fetch the most recent GAE SDK and decompress it in the current directory.\n-\n-Usage:\n- fetch_gae_sdk.py [<dest_dir>]\n-\n-Current releases are listed here:\n- https://www.googleapis.com/storage/v1/b/appengine-sdks/o?prefix=featured\n-\"\"\"\n-from __future__ import print_function\n-\n-import json\n-import os\n-import StringIO\n-import sys\n-import urllib2\n-import zipfile\n-\n-\n-_SDK_URL = (\n- 'https://www.googleapis.com/storage/v1/b/appengine-sdks/o?prefix=featured')\n-\n-\n-def get_gae_versions():\n- try:\n- version_info_json = urllib2.urlopen(_SDK_URL).read()\n- except:\n- return {}\n- try:\n- version_info = json.loads(version_info_json)\n- except:\n- return {}\n- return version_info.get('items', {})\n-\n-\n-def _version_tuple(v):\n- version_string = os.path.splitext(v['name'])[0].rpartition('_')[2]\n- return tuple(int(x) for x in version_string.split('.'))\n-\n-\n-def get_sdk_urls(sdk_versions):\n- python_releases = [v for v in sdk_versions\n- if v['name'].startswith('featured/google_appengine')]\n- current_releases = sorted(python_releases, key=_version_tuple,\n- reverse=True)\n- return [release['mediaLink'] for release in current_releases]\n-\n-\n-def main(argv):\n- if len(argv) > 2:\n- print('Usage: {0} [<destination_dir>]'.format(argv[0]))\n- return 1\n- if len(argv) > 1:\n- dest_dir = argv[1]\n- else:\n- try:\n- dest_dir = os.path.dirname(os.environ['GAE_PYTHONPATH'])\n- except IndexError:\n- dest_dir = '.'\n- if not os.path.exists(dest_dir):\n- os.makedirs(dest_dir)\n-\n- if os.path.exists(os.path.join(dest_dir, 'google_appengine')):\n- print('GAE SDK already installed at {0}, exiting.'.format(dest_dir))\n- return 0\n-\n- sdk_versions = get_gae_versions()\n- if not sdk_versions:\n- print('Error fetching GAE SDK version info')\n- return 1\n- sdk_urls = get_sdk_urls(sdk_versions)\n- for sdk_url in sdk_urls:\n- try:\n- sdk_contents = StringIO.StringIO(urllib2.urlopen(sdk_url).read())\n- break\n- except:\n- pass\n- else:\n- print('Could not read SDK from any of ', sdk_urls)\n- return 1\n- sdk_contents.seek(0)\n- try:\n- with zipfile.ZipFile(sdk_contents) as zip_contents:\n- zip_contents.extractall(dest_dir)\n- except:\n- print('Error extracting SDK contents')\n- return 1\n-\n-\n-if __name__ == '__main__':\n- sys.exit(main(sys.argv[:]))\n", "issue": "Running `tox -e gae` is broken on current master\nExtracted from #1182:\r\n\r\n```console\r\n$ tox -e gae\r\nGLOB sdist-make: /Users/alexwlchan/repos/urllib3/setup.py\r\ngae inst-nodeps: /Users/alexwlchan/repos/urllib3/.tox/dist/urllib3-dev.zip\r\ngae installed: appdirs==1.4.3,backports.ssl-match-hostname==3.5.0.1,certifi==2017.4.17,coverage==3.7.1,funcsigs==1.0.2,mock==1.3.0,nose==1.3.7,nose-exclude==0.4.1,NoseGAE==0.5.7,packaging==16.8,pbr==3.0.1,pkginfo==1.4.1,pluggy==0.3.1,psutil==4.3.1,py==1.4.33,pyparsing==2.2.0,PySocks==1.5.6,pytest==3.1.0,requests==2.14.2,six==1.10.0,tornado==4.2.1,tox==2.1.1,twine==1.5.0,urllib3===dev,virtualenv==15.1.0\r\ngae runtests: PYTHONHASHSEED='2409600760'\r\ngae runtests: commands[0] | nosetests -c /Users/alexwlchan/repos/urllib3/test/appengine/nose.cfg test/appengine\r\nTraceback (most recent call last):\r\n File \".tox/gae/bin/nosetests\", line 11, in <module>\r\n sys.exit(run_exit())\r\n File \"/Users/alexwlchan/repos/urllib3/.tox/gae/lib/python2.7/site-packages/nose/core.py\", line 121, in __init__\r\n **extra_args)\r\n File \"/usr/local/Cellar/python/2.7.13/Frameworks/Python.framework/Versions/2.7/lib/python2.7/unittest/main.py\", line 94, in __init__\r\n self.parseArgs(argv)\r\n File \"/Users/alexwlchan/repos/urllib3/.tox/gae/lib/python2.7/site-packages/nose/core.py\", line 145, in parseArgs\r\n self.config.configure(argv, doc=self.usage())\r\n File \"/Users/alexwlchan/repos/urllib3/.tox/gae/lib/python2.7/site-packages/nose/config.py\", line 346, in configure\r\n self.plugins.configure(options, self)\r\n File \"/Users/alexwlchan/repos/urllib3/.tox/gae/lib/python2.7/site-packages/nose/plugins/manager.py\", line 284, in configure\r\n cfg(options, config)\r\n File \"/Users/alexwlchan/repos/urllib3/.tox/gae/lib/python2.7/site-packages/nose/plugins/manager.py\", line 99, in __call__\r\n return self.call(*arg, **kw)\r\n File \"/Users/alexwlchan/repos/urllib3/.tox/gae/lib/python2.7/site-packages/nose/plugins/manager.py\", line 167, in simple\r\n result = meth(*arg, **kw)\r\n File \"/Users/alexwlchan/repos/urllib3/.tox/gae/lib/python2.7/site-packages/nosegae.py\", line 91, in configure\r\n import dev_appserver\r\nImportError: No module named dev_appserver\r\nERROR: InvocationError: '/Users/alexwlchan/repos/urllib3/.tox/gae/bin/nosetests -c /Users/alexwlchan/repos/urllib3/test/appengine/nose.cfg test/appengine'\r\n________________________________________________________________________________ summary _________________________________________________________________________________\r\nERROR: gae: commands failed\r\n```\n", "before_files": [{"content": "#!/usr/bin/env python\n\n# Copyright 2015 Google Inc. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# Retrieved from https://github.com/Google/oauth2client\n\n\"\"\"Fetch the most recent GAE SDK and decompress it in the current directory.\n\nUsage:\n fetch_gae_sdk.py [<dest_dir>]\n\nCurrent releases are listed here:\n https://www.googleapis.com/storage/v1/b/appengine-sdks/o?prefix=featured\n\"\"\"\nfrom __future__ import print_function\n\nimport json\nimport os\nimport StringIO\nimport sys\nimport urllib2\nimport zipfile\n\n\n_SDK_URL = (\n 'https://www.googleapis.com/storage/v1/b/appengine-sdks/o?prefix=featured')\n\n\ndef get_gae_versions():\n try:\n version_info_json = urllib2.urlopen(_SDK_URL).read()\n except:\n return {}\n try:\n version_info = json.loads(version_info_json)\n except:\n return {}\n return version_info.get('items', {})\n\n\ndef _version_tuple(v):\n version_string = os.path.splitext(v['name'])[0].rpartition('_')[2]\n return tuple(int(x) for x in version_string.split('.'))\n\n\ndef get_sdk_urls(sdk_versions):\n python_releases = [v for v in sdk_versions\n if v['name'].startswith('featured/google_appengine')]\n current_releases = sorted(python_releases, key=_version_tuple,\n reverse=True)\n return [release['mediaLink'] for release in current_releases]\n\n\ndef main(argv):\n if len(argv) > 2:\n print('Usage: {0} [<destination_dir>]'.format(argv[0]))\n return 1\n if len(argv) > 1:\n dest_dir = argv[1]\n else:\n try:\n dest_dir = os.path.dirname(os.environ['GAE_PYTHONPATH'])\n except IndexError:\n dest_dir = '.'\n if not os.path.exists(dest_dir):\n os.makedirs(dest_dir)\n\n if os.path.exists(os.path.join(dest_dir, 'google_appengine')):\n print('GAE SDK already installed at {0}, exiting.'.format(dest_dir))\n return 0\n\n sdk_versions = get_gae_versions()\n if not sdk_versions:\n print('Error fetching GAE SDK version info')\n return 1\n sdk_urls = get_sdk_urls(sdk_versions)\n for sdk_url in sdk_urls:\n try:\n sdk_contents = StringIO.StringIO(urllib2.urlopen(sdk_url).read())\n break\n except:\n pass\n else:\n print('Could not read SDK from any of ', sdk_urls)\n return 1\n sdk_contents.seek(0)\n try:\n with zipfile.ZipFile(sdk_contents) as zip_contents:\n zip_contents.extractall(dest_dir)\n except:\n print('Error extracting SDK contents')\n return 1\n\n\nif __name__ == '__main__':\n sys.exit(main(sys.argv[:]))\n", "path": "_travis/fetch_gae_sdk.py"}], "after_files": [{"content": null, "path": "_travis/fetch_gae_sdk.py"}]}
| 2,102 | 892 |
gh_patches_debug_44517
|
rasdani/github-patches
|
git_diff
|
streamlink__streamlink-2205
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
BT sports plugin does not find playable streams
<!--
Thanks for reporting a plugin issue!
USE THE TEMPLATE. Otherwise your plugin issue may be rejected.
First, see the contribution guidelines:
https://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink
Also check the list of open and closed plugin issues:
https://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22plugin+issue%22
Please see the text preview to avoid unnecessary formatting errors.
-->
## Plugin Issue
<!-- Replace [ ] with [x] in order to check the box -->
- [x] This is a plugin issue and I have read the contribution guidelines.
### Description
<!-- Explain the plugin issue as thoroughly as you can. -->
The plugin doesn't find any playable streams.
### Reproduction steps / Explicit stream URLs to test
<!-- How can we reproduce this? Please note the exact steps below using the list format supplied. If you need more steps please add them. -->
1. https://sport.bt.com/btsportplayer/live-streaming-01363980987389?leId=1364293056880
2. https://sport.bt.com/btsportplayer/live-streaming-01363980987389?leId=1364293057384
3. ...
### Log output
<!--
TEXT LOG OUTPUT IS REQUIRED for a plugin issue!
Use the `--loglevel debug` parameter and avoid using parameters which suppress log output.
https://streamlink.github.io/cli.html#cmdoption-l
Make sure to **remove usernames and passwords**
You can copy the output to https://gist.github.com/ or paste it below.
-->
```
C:\Users\>streamlink --btsports-email EMAIL --btsports-password PASSWORD https://sport.bt.co
m/btsportplayer/live-streaming-01363980987389?leId=1364293057384 --loglevel debug
[cli][debug] OS: Windows 10
[cli][debug] Python: 3.5.2
[cli][debug] Streamlink: 0.14.2
[cli][debug] Requests(2.19.1), Socks(1.6.7), Websocket(0.48.0)
[cli][info] Found matching plugin btsports for URL https://sport.bt.com/btsportplayer/live-streaming-01363980987389?leId
=1364293057384
[cli][debug] Plugin specific arguments:
[cli][debug] [email protected] (email)
[cli][debug] --btsports-password=******** (password)
[plugin.btsports][debug] Logging in as [email protected]
[plugin.btsports][debug] Redirected to: https://home.bt.com/s/assets/pages/login-interstitial.html?TARGET=https%253A%2F%
2Fsport.bt.com%2Fbtsportplayer%2Flive-streaming-01363980987389%253FleId%253D1364293057384
error: No playable streams found on this URL: https://sport.bt.com/btsportplayer/live-streaming-01363980987389?leId=1364
293057384
```
### Additional comments, screenshots, etc.
the links are for Champions League replay matches
should give multiple options for streams.
[Love Streamlink? Please consider supporting our collective. Thanks!](https://opencollective.com/streamlink/donate)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/streamlink/plugins/btsports.py`
Content:
```
1 import re
2 import time
3 from uuid import uuid4
4
5 from streamlink.compat import quote
6 from streamlink.plugin import Plugin, PluginArguments, PluginArgument
7 from streamlink.plugin.api import useragents
8 from streamlink.stream import HLSStream
9 from streamlink.utils import url_equal
10
11
12 class BTSports(Plugin):
13 url_re = re.compile(r"https?://sport.bt.com")
14
15 arguments = PluginArguments(
16 PluginArgument(
17 "email",
18 requires=["password"],
19 metavar="EMAIL",
20 required=True,
21 help="""
22 The email associated with your BT Sport account, required to access any
23 BT Sport stream.
24 """
25
26 ),
27 PluginArgument(
28 "password",
29 sensitive=True,
30 metavar="PASSWORD",
31 help="Your BT Sport account password."
32 )
33 )
34
35 content_re = re.compile(r"CONTENT_(\w+)\s*=\s*'(\w+)'")
36 saml_re = re.compile(r'''name="SAMLResponse" value="(.*?)"''', re.M | re.DOTALL)
37 api_url = "https://be.avs.bt.com/AVS/besc"
38 saml_url = "https://samlfed.bt.com/sportgetfedwebhls"
39 login_url = "https://signin1.bt.com/siteminderagent/forms/login.fcc"
40
41 def __init__(self, url):
42 super(BTSports, self).__init__(url)
43 self.session.http.headers = {"User-Agent": useragents.FIREFOX}
44
45 @classmethod
46 def can_handle_url(cls, url):
47 return cls.url_re.match(url) is not None
48
49 def login(self, username, password):
50 self.logger.debug("Logging in as {0}".format(username))
51
52 redirect_to = "https://home.bt.com/ss/Satellite/secure/loginforward?redirectURL={0}".format(quote(self.url))
53 data = {
54 "cookieExpp": "30",
55 "Switch": "yes",
56 "SMPostLoginUrl": "/appsyouraccount/secure/postlogin",
57 "loginforward": "https://home.bt.com/ss/Satellite/secure/loginforward",
58 "smauthreason": "0",
59 "TARGET": redirect_to,
60 "USER": username,
61 "PASSWORD": password}
62
63 res = self.session.http.post(self.login_url, data=data)
64
65 self.logger.debug("Redirected to: {0}".format(res.url))
66
67
68 if url_equal(res.url, self.url, ignore_scheme=True):
69 self.logger.debug("Login successful, getting SAML token")
70 res = self.session.http.get("https://samlfed.bt.com/sportgetfedwebhls?bt.cid={0}".format(self.acid()))
71 d = self.saml_re.search(res.text)
72 if d:
73 saml_data = d.group(1)
74 self.logger.debug("BT Sports federated login...")
75 res = self.session.http.post(self.api_url,
76 params={"action": "LoginBT", "channel": "WEBHLS", "bt.cid": self.acid},
77 data={"SAMLResponse": saml_data})
78 fed_json = self.session.http.json(res)
79 success = fed_json['resultCode'] == "OK"
80 if not success:
81 self.logger.error("Failed to login: {0} - {1}".format(fed_json['errorDescription'],
82 fed_json['message']))
83 return success
84 return False
85
86 def device_id(self):
87 device_id = self.cache.get("device_id") or str(uuid4())
88 self.cache.set("device_id", device_id)
89 return device_id
90
91 def acid(self):
92 acid = self.cache.get("acid") or "{cid}-B-{timestamp}".format(cid=self.device_id(), timestamp=int(time.time()))
93 self.cache.set("acid", acid)
94 return acid
95
96 def _get_cdn(self, channel_id, channel_type="LIVE"):
97 d = {"action": "GetCDN",
98 "type": channel_type,
99 "id": channel_id,
100 "channel": "WEBHLS",
101 "asJson": "Y",
102 "bt.cid": self.acid(),
103 "_": int(time.time())}
104
105 res = self.session.http.get(self.api_url, params=d, headers={"Accept": "application/json"})
106 return self.session.http.json(res)
107
108 def _get_streams(self):
109 if self.options.get("email") and self.options.get("password"):
110 if self.login(self.options.get("email"), self.options.get("password")):
111 self.logger.debug("Logged in and authenticated with BT Sports.")
112
113 res = self.session.http.get(self.url)
114 m = self.content_re.findall(res.text)
115 if m:
116 info = dict(m)
117 data = self._get_cdn(info.get("ID"), info.get("TYPE"))
118 if data['resultCode'] == 'OK':
119 return HLSStream.parse_variant_playlist(self.session, data['resultObj']['src'])
120 else:
121 self.logger.error("Failed to get stream with error: {0} - {1}".format(data['errorDescription'],
122 data['message']))
123 else:
124 self.logger.error("A username and password is required to use BT Sports")
125
126 __plugin__ = BTSports
127
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/streamlink/plugins/btsports.py b/src/streamlink/plugins/btsports.py
--- a/src/streamlink/plugins/btsports.py
+++ b/src/streamlink/plugins/btsports.py
@@ -1,12 +1,15 @@
-import re
import time
+
+import logging
+import re
from uuid import uuid4
from streamlink.compat import quote
from streamlink.plugin import Plugin, PluginArguments, PluginArgument
from streamlink.plugin.api import useragents
from streamlink.stream import HLSStream
-from streamlink.utils import url_equal
+
+log = logging.getLogger(__name__)
class BTSports(Plugin):
@@ -47,25 +50,23 @@
return cls.url_re.match(url) is not None
def login(self, username, password):
- self.logger.debug("Logging in as {0}".format(username))
+ log.debug("Logging in as {0}".format(username))
- redirect_to = "https://home.bt.com/ss/Satellite/secure/loginforward?redirectURL={0}".format(quote(self.url))
+ redirect_to = "https://home.bt.com/ss/Satellite/secure/loginforward?view=btsport&redirectURL={0}".format(quote(self.url))
data = {
"cookieExpp": "30",
"Switch": "yes",
"SMPostLoginUrl": "/appsyouraccount/secure/postlogin",
- "loginforward": "https://home.bt.com/ss/Satellite/secure/loginforward",
+ "loginforward": "https://home.bt.com/ss/Satellite/secure/loginforward?view=btsport",
"smauthreason": "0",
"TARGET": redirect_to,
"USER": username,
"PASSWORD": password}
-
res = self.session.http.post(self.login_url, data=data)
- self.logger.debug("Redirected to: {0}".format(res.url))
-
+ log.debug("Redirected to: {0}".format(res.url))
- if url_equal(res.url, self.url, ignore_scheme=True):
+ if "loginerror" not in res.text:
self.logger.debug("Login successful, getting SAML token")
res = self.session.http.get("https://samlfed.bt.com/sportgetfedwebhls?bt.cid={0}".format(self.acid()))
d = self.saml_re.search(res.text)
@@ -81,7 +82,8 @@
self.logger.error("Failed to login: {0} - {1}".format(fed_json['errorDescription'],
fed_json['message']))
return success
- return False
+ else:
+ return False
def device_id(self):
device_id = self.cache.get("device_id") or str(uuid4())
@@ -108,19 +110,23 @@
def _get_streams(self):
if self.options.get("email") and self.options.get("password"):
if self.login(self.options.get("email"), self.options.get("password")):
- self.logger.debug("Logged in and authenticated with BT Sports.")
+ log.debug("Logged in and authenticated with BT Sports.")
res = self.session.http.get(self.url)
m = self.content_re.findall(res.text)
if m:
info = dict(m)
data = self._get_cdn(info.get("ID"), info.get("TYPE"))
+ log.debug("CDN respsonse: {0}".format(data))
if data['resultCode'] == 'OK':
return HLSStream.parse_variant_playlist(self.session, data['resultObj']['src'])
else:
- self.logger.error("Failed to get stream with error: {0} - {1}".format(data['errorDescription'],
+ log.error("Failed to get stream with error: {0} - {1}".format(data['errorDescription'],
data['message']))
+ else:
+ log.error("Login failed.")
else:
- self.logger.error("A username and password is required to use BT Sports")
+ log.error("A username and password is required to use BT Sports")
+
__plugin__ = BTSports
|
{"golden_diff": "diff --git a/src/streamlink/plugins/btsports.py b/src/streamlink/plugins/btsports.py\n--- a/src/streamlink/plugins/btsports.py\n+++ b/src/streamlink/plugins/btsports.py\n@@ -1,12 +1,15 @@\n-import re\n import time\n+\n+import logging\n+import re\n from uuid import uuid4\n \n from streamlink.compat import quote\n from streamlink.plugin import Plugin, PluginArguments, PluginArgument\n from streamlink.plugin.api import useragents\n from streamlink.stream import HLSStream\n-from streamlink.utils import url_equal\n+\n+log = logging.getLogger(__name__)\n \n \n class BTSports(Plugin):\n@@ -47,25 +50,23 @@\n return cls.url_re.match(url) is not None\n \n def login(self, username, password):\n- self.logger.debug(\"Logging in as {0}\".format(username))\n+ log.debug(\"Logging in as {0}\".format(username))\n \n- redirect_to = \"https://home.bt.com/ss/Satellite/secure/loginforward?redirectURL={0}\".format(quote(self.url))\n+ redirect_to = \"https://home.bt.com/ss/Satellite/secure/loginforward?view=btsport&redirectURL={0}\".format(quote(self.url))\n data = {\n \"cookieExpp\": \"30\",\n \"Switch\": \"yes\",\n \"SMPostLoginUrl\": \"/appsyouraccount/secure/postlogin\",\n- \"loginforward\": \"https://home.bt.com/ss/Satellite/secure/loginforward\",\n+ \"loginforward\": \"https://home.bt.com/ss/Satellite/secure/loginforward?view=btsport\",\n \"smauthreason\": \"0\",\n \"TARGET\": redirect_to,\n \"USER\": username,\n \"PASSWORD\": password}\n-\n res = self.session.http.post(self.login_url, data=data)\n \n- self.logger.debug(\"Redirected to: {0}\".format(res.url))\n-\n+ log.debug(\"Redirected to: {0}\".format(res.url))\n \n- if url_equal(res.url, self.url, ignore_scheme=True):\n+ if \"loginerror\" not in res.text:\n self.logger.debug(\"Login successful, getting SAML token\")\n res = self.session.http.get(\"https://samlfed.bt.com/sportgetfedwebhls?bt.cid={0}\".format(self.acid()))\n d = self.saml_re.search(res.text)\n@@ -81,7 +82,8 @@\n self.logger.error(\"Failed to login: {0} - {1}\".format(fed_json['errorDescription'],\n fed_json['message']))\n return success\n- return False\n+ else:\n+ return False\n \n def device_id(self):\n device_id = self.cache.get(\"device_id\") or str(uuid4())\n@@ -108,19 +110,23 @@\n def _get_streams(self):\n if self.options.get(\"email\") and self.options.get(\"password\"):\n if self.login(self.options.get(\"email\"), self.options.get(\"password\")):\n- self.logger.debug(\"Logged in and authenticated with BT Sports.\")\n+ log.debug(\"Logged in and authenticated with BT Sports.\")\n \n res = self.session.http.get(self.url)\n m = self.content_re.findall(res.text)\n if m:\n info = dict(m)\n data = self._get_cdn(info.get(\"ID\"), info.get(\"TYPE\"))\n+ log.debug(\"CDN respsonse: {0}\".format(data))\n if data['resultCode'] == 'OK':\n return HLSStream.parse_variant_playlist(self.session, data['resultObj']['src'])\n else:\n- self.logger.error(\"Failed to get stream with error: {0} - {1}\".format(data['errorDescription'],\n+ log.error(\"Failed to get stream with error: {0} - {1}\".format(data['errorDescription'],\n data['message']))\n+ else:\n+ log.error(\"Login failed.\")\n else:\n- self.logger.error(\"A username and password is required to use BT Sports\")\n+ log.error(\"A username and password is required to use BT Sports\")\n+\n \n __plugin__ = BTSports\n", "issue": "BT sports plugin does not find playable streams\n<!--\r\nThanks for reporting a plugin issue!\r\nUSE THE TEMPLATE. Otherwise your plugin issue may be rejected.\r\n\r\nFirst, see the contribution guidelines:\r\nhttps://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink\r\n\r\nAlso check the list of open and closed plugin issues:\r\nhttps://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22plugin+issue%22\r\n\r\nPlease see the text preview to avoid unnecessary formatting errors.\r\n-->\r\n\r\n\r\n## Plugin Issue\r\n\r\n<!-- Replace [ ] with [x] in order to check the box -->\r\n- [x] This is a plugin issue and I have read the contribution guidelines.\r\n\r\n\r\n### Description\r\n\r\n<!-- Explain the plugin issue as thoroughly as you can. -->\r\nThe plugin doesn't find any playable streams.\r\n\r\n### Reproduction steps / Explicit stream URLs to test\r\n\r\n<!-- How can we reproduce this? Please note the exact steps below using the list format supplied. If you need more steps please add them. -->\r\n\r\n1. https://sport.bt.com/btsportplayer/live-streaming-01363980987389?leId=1364293056880\r\n2. https://sport.bt.com/btsportplayer/live-streaming-01363980987389?leId=1364293057384\r\n3. ...\r\n\r\n\r\n### Log output\r\n\r\n<!--\r\nTEXT LOG OUTPUT IS REQUIRED for a plugin issue!\r\nUse the `--loglevel debug` parameter and avoid using parameters which suppress log output.\r\nhttps://streamlink.github.io/cli.html#cmdoption-l\r\n\r\nMake sure to **remove usernames and passwords**\r\nYou can copy the output to https://gist.github.com/ or paste it below.\r\n-->\r\n\r\n```\r\nC:\\Users\\>streamlink --btsports-email EMAIL --btsports-password PASSWORD https://sport.bt.co\r\nm/btsportplayer/live-streaming-01363980987389?leId=1364293057384 --loglevel debug\r\n[cli][debug] OS: Windows 10\r\n[cli][debug] Python: 3.5.2\r\n[cli][debug] Streamlink: 0.14.2\r\n[cli][debug] Requests(2.19.1), Socks(1.6.7), Websocket(0.48.0)\r\n[cli][info] Found matching plugin btsports for URL https://sport.bt.com/btsportplayer/live-streaming-01363980987389?leId\r\n=1364293057384\r\n[cli][debug] Plugin specific arguments:\r\n[cli][debug] [email protected] (email)\r\n[cli][debug] --btsports-password=******** (password)\r\n[plugin.btsports][debug] Logging in as [email protected]\r\n[plugin.btsports][debug] Redirected to: https://home.bt.com/s/assets/pages/login-interstitial.html?TARGET=https%253A%2F%\r\n2Fsport.bt.com%2Fbtsportplayer%2Flive-streaming-01363980987389%253FleId%253D1364293057384\r\nerror: No playable streams found on this URL: https://sport.bt.com/btsportplayer/live-streaming-01363980987389?leId=1364\r\n293057384\r\n```\r\n\r\n\r\n### Additional comments, screenshots, etc.\r\nthe links are for Champions League replay matches\r\nshould give multiple options for streams.\r\n\r\n\r\n[Love Streamlink? Please consider supporting our collective. Thanks!](https://opencollective.com/streamlink/donate)\r\n\n", "before_files": [{"content": "import re\nimport time\nfrom uuid import uuid4\n\nfrom streamlink.compat import quote\nfrom streamlink.plugin import Plugin, PluginArguments, PluginArgument\nfrom streamlink.plugin.api import useragents\nfrom streamlink.stream import HLSStream\nfrom streamlink.utils import url_equal\n\n\nclass BTSports(Plugin):\n url_re = re.compile(r\"https?://sport.bt.com\")\n\n arguments = PluginArguments(\n PluginArgument(\n \"email\",\n requires=[\"password\"],\n metavar=\"EMAIL\",\n required=True,\n help=\"\"\"\n The email associated with your BT Sport account, required to access any\n BT Sport stream.\n \"\"\"\n\n ),\n PluginArgument(\n \"password\",\n sensitive=True,\n metavar=\"PASSWORD\",\n help=\"Your BT Sport account password.\"\n )\n )\n\n content_re = re.compile(r\"CONTENT_(\\w+)\\s*=\\s*'(\\w+)'\")\n saml_re = re.compile(r'''name=\"SAMLResponse\" value=\"(.*?)\"''', re.M | re.DOTALL)\n api_url = \"https://be.avs.bt.com/AVS/besc\"\n saml_url = \"https://samlfed.bt.com/sportgetfedwebhls\"\n login_url = \"https://signin1.bt.com/siteminderagent/forms/login.fcc\"\n\n def __init__(self, url):\n super(BTSports, self).__init__(url)\n self.session.http.headers = {\"User-Agent\": useragents.FIREFOX}\n\n @classmethod\n def can_handle_url(cls, url):\n return cls.url_re.match(url) is not None\n\n def login(self, username, password):\n self.logger.debug(\"Logging in as {0}\".format(username))\n\n redirect_to = \"https://home.bt.com/ss/Satellite/secure/loginforward?redirectURL={0}\".format(quote(self.url))\n data = {\n \"cookieExpp\": \"30\",\n \"Switch\": \"yes\",\n \"SMPostLoginUrl\": \"/appsyouraccount/secure/postlogin\",\n \"loginforward\": \"https://home.bt.com/ss/Satellite/secure/loginforward\",\n \"smauthreason\": \"0\",\n \"TARGET\": redirect_to,\n \"USER\": username,\n \"PASSWORD\": password}\n\n res = self.session.http.post(self.login_url, data=data)\n\n self.logger.debug(\"Redirected to: {0}\".format(res.url))\n\n\n if url_equal(res.url, self.url, ignore_scheme=True):\n self.logger.debug(\"Login successful, getting SAML token\")\n res = self.session.http.get(\"https://samlfed.bt.com/sportgetfedwebhls?bt.cid={0}\".format(self.acid()))\n d = self.saml_re.search(res.text)\n if d:\n saml_data = d.group(1)\n self.logger.debug(\"BT Sports federated login...\")\n res = self.session.http.post(self.api_url,\n params={\"action\": \"LoginBT\", \"channel\": \"WEBHLS\", \"bt.cid\": self.acid},\n data={\"SAMLResponse\": saml_data})\n fed_json = self.session.http.json(res)\n success = fed_json['resultCode'] == \"OK\"\n if not success:\n self.logger.error(\"Failed to login: {0} - {1}\".format(fed_json['errorDescription'],\n fed_json['message']))\n return success\n return False\n\n def device_id(self):\n device_id = self.cache.get(\"device_id\") or str(uuid4())\n self.cache.set(\"device_id\", device_id)\n return device_id\n\n def acid(self):\n acid = self.cache.get(\"acid\") or \"{cid}-B-{timestamp}\".format(cid=self.device_id(), timestamp=int(time.time()))\n self.cache.set(\"acid\", acid)\n return acid\n\n def _get_cdn(self, channel_id, channel_type=\"LIVE\"):\n d = {\"action\": \"GetCDN\",\n \"type\": channel_type,\n \"id\": channel_id,\n \"channel\": \"WEBHLS\",\n \"asJson\": \"Y\",\n \"bt.cid\": self.acid(),\n \"_\": int(time.time())}\n\n res = self.session.http.get(self.api_url, params=d, headers={\"Accept\": \"application/json\"})\n return self.session.http.json(res)\n\n def _get_streams(self):\n if self.options.get(\"email\") and self.options.get(\"password\"):\n if self.login(self.options.get(\"email\"), self.options.get(\"password\")):\n self.logger.debug(\"Logged in and authenticated with BT Sports.\")\n\n res = self.session.http.get(self.url)\n m = self.content_re.findall(res.text)\n if m:\n info = dict(m)\n data = self._get_cdn(info.get(\"ID\"), info.get(\"TYPE\"))\n if data['resultCode'] == 'OK':\n return HLSStream.parse_variant_playlist(self.session, data['resultObj']['src'])\n else:\n self.logger.error(\"Failed to get stream with error: {0} - {1}\".format(data['errorDescription'],\n data['message']))\n else:\n self.logger.error(\"A username and password is required to use BT Sports\")\n\n__plugin__ = BTSports\n", "path": "src/streamlink/plugins/btsports.py"}], "after_files": [{"content": "import time\n\nimport logging\nimport re\nfrom uuid import uuid4\n\nfrom streamlink.compat import quote\nfrom streamlink.plugin import Plugin, PluginArguments, PluginArgument\nfrom streamlink.plugin.api import useragents\nfrom streamlink.stream import HLSStream\n\nlog = logging.getLogger(__name__)\n\n\nclass BTSports(Plugin):\n url_re = re.compile(r\"https?://sport.bt.com\")\n\n arguments = PluginArguments(\n PluginArgument(\n \"email\",\n requires=[\"password\"],\n metavar=\"EMAIL\",\n required=True,\n help=\"\"\"\n The email associated with your BT Sport account, required to access any\n BT Sport stream.\n \"\"\"\n\n ),\n PluginArgument(\n \"password\",\n sensitive=True,\n metavar=\"PASSWORD\",\n help=\"Your BT Sport account password.\"\n )\n )\n\n content_re = re.compile(r\"CONTENT_(\\w+)\\s*=\\s*'(\\w+)'\")\n saml_re = re.compile(r'''name=\"SAMLResponse\" value=\"(.*?)\"''', re.M | re.DOTALL)\n api_url = \"https://be.avs.bt.com/AVS/besc\"\n saml_url = \"https://samlfed.bt.com/sportgetfedwebhls\"\n login_url = \"https://signin1.bt.com/siteminderagent/forms/login.fcc\"\n\n def __init__(self, url):\n super(BTSports, self).__init__(url)\n self.session.http.headers = {\"User-Agent\": useragents.FIREFOX}\n\n @classmethod\n def can_handle_url(cls, url):\n return cls.url_re.match(url) is not None\n\n def login(self, username, password):\n log.debug(\"Logging in as {0}\".format(username))\n\n redirect_to = \"https://home.bt.com/ss/Satellite/secure/loginforward?view=btsport&redirectURL={0}\".format(quote(self.url))\n data = {\n \"cookieExpp\": \"30\",\n \"Switch\": \"yes\",\n \"SMPostLoginUrl\": \"/appsyouraccount/secure/postlogin\",\n \"loginforward\": \"https://home.bt.com/ss/Satellite/secure/loginforward?view=btsport\",\n \"smauthreason\": \"0\",\n \"TARGET\": redirect_to,\n \"USER\": username,\n \"PASSWORD\": password}\n res = self.session.http.post(self.login_url, data=data)\n\n log.debug(\"Redirected to: {0}\".format(res.url))\n\n if \"loginerror\" not in res.text:\n self.logger.debug(\"Login successful, getting SAML token\")\n res = self.session.http.get(\"https://samlfed.bt.com/sportgetfedwebhls?bt.cid={0}\".format(self.acid()))\n d = self.saml_re.search(res.text)\n if d:\n saml_data = d.group(1)\n self.logger.debug(\"BT Sports federated login...\")\n res = self.session.http.post(self.api_url,\n params={\"action\": \"LoginBT\", \"channel\": \"WEBHLS\", \"bt.cid\": self.acid},\n data={\"SAMLResponse\": saml_data})\n fed_json = self.session.http.json(res)\n success = fed_json['resultCode'] == \"OK\"\n if not success:\n self.logger.error(\"Failed to login: {0} - {1}\".format(fed_json['errorDescription'],\n fed_json['message']))\n return success\n else:\n return False\n\n def device_id(self):\n device_id = self.cache.get(\"device_id\") or str(uuid4())\n self.cache.set(\"device_id\", device_id)\n return device_id\n\n def acid(self):\n acid = self.cache.get(\"acid\") or \"{cid}-B-{timestamp}\".format(cid=self.device_id(), timestamp=int(time.time()))\n self.cache.set(\"acid\", acid)\n return acid\n\n def _get_cdn(self, channel_id, channel_type=\"LIVE\"):\n d = {\"action\": \"GetCDN\",\n \"type\": channel_type,\n \"id\": channel_id,\n \"channel\": \"WEBHLS\",\n \"asJson\": \"Y\",\n \"bt.cid\": self.acid(),\n \"_\": int(time.time())}\n\n res = self.session.http.get(self.api_url, params=d, headers={\"Accept\": \"application/json\"})\n return self.session.http.json(res)\n\n def _get_streams(self):\n if self.options.get(\"email\") and self.options.get(\"password\"):\n if self.login(self.options.get(\"email\"), self.options.get(\"password\")):\n log.debug(\"Logged in and authenticated with BT Sports.\")\n\n res = self.session.http.get(self.url)\n m = self.content_re.findall(res.text)\n if m:\n info = dict(m)\n data = self._get_cdn(info.get(\"ID\"), info.get(\"TYPE\"))\n log.debug(\"CDN respsonse: {0}\".format(data))\n if data['resultCode'] == 'OK':\n return HLSStream.parse_variant_playlist(self.session, data['resultObj']['src'])\n else:\n log.error(\"Failed to get stream with error: {0} - {1}\".format(data['errorDescription'],\n data['message']))\n else:\n log.error(\"Login failed.\")\n else:\n log.error(\"A username and password is required to use BT Sports\")\n\n\n__plugin__ = BTSports\n", "path": "src/streamlink/plugins/btsports.py"}]}
| 2,516 | 892 |
gh_patches_debug_25948
|
rasdani/github-patches
|
git_diff
|
ytdl-org__youtube-dl-18336
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[azmedien] Information extractor is broken
### Make sure you are using the *latest* version: run `youtube-dl --version` and ensure your version is *2018.11.23*. If it's not, read [this FAQ entry](https://github.com/rg3/youtube-dl/blob/master/README.md#how-do-i-update-youtube-dl) and update. Issues with outdated version will be rejected.
- [x] I've **verified** and **I assure** that I'm running youtube-dl **2018.11.23**
### Before submitting an *issue* make sure you have:
- [x] At least skimmed through the [README](https://github.com/rg3/youtube-dl/blob/master/README.md), **most notably** the [FAQ](https://github.com/rg3/youtube-dl#faq) and [BUGS](https://github.com/rg3/youtube-dl#bugs) sections
- [x] [Searched](https://github.com/rg3/youtube-dl/search?type=Issues) the bugtracker for similar issues including closed ones
- [x] Checked that provided video/audio/playlist URLs (if any) are alive and playable in a browser
### What is the purpose of your *issue*?
- [x] Bug report (encountered problems with youtube-dl)
- [ ] Site support request (request for adding support for a new site)
- [ ] Feature request (request for a new functionality)
- [ ] Question
- [ ] Other
---
### If the purpose of this *issue* is a *bug report*, *site support request* or you are not completely sure provide the full verbose output as follows:
Add the `-v` flag to **your command line** you run youtube-dl with (`youtube-dl -v <your command line>`), copy the **whole** output and insert it here. It should look similar to one below (replace it with **your** log inserted between triple ```):
```
youtube-dl "https://www.telezueri.ch/talktaeglich/toni-brunner-vom-jungbauern-zum-svp-star-133756399" -v
[debug] System config: []
[debug] User config: ['--netrc', '--retries', '30', '--mark-watched']
[debug] Custom config: []
[debug] Command-line args: ['https://www.telezueri.ch/talktaeglich/toni-brunner-vom-jungbauern-zum-svp-star-133756399', '-v']
[debug] Encodings: locale UTF-8, fs utf-8, out UTF-8, pref UTF-8
[debug] youtube-dl version 2018.11.23
[debug] Python version 3.7.1 (CPython) - Linux-4.19.4-arch1-1-ARCH-x86_64-with-arch
[debug] exe versions: ffmpeg n4.1, ffprobe n4.1, rtmpdump 2.4
[debug] Proxy map: {}
[AZMedien] toni-brunner-vom-jungbauern-zum-svp-star-133756399: Downloading webpage
ERROR: Unable to extract api path; please report this issue on https://yt-dl.org/bug . Make sure you are using the latest version; see https://yt-dl.org/update on how to update. Be sure to call youtube-dl with the --verbose flag and include its complete output.
Traceback (most recent call last):
File "/usr/lib/python3.7/site-packages/youtube_dl/YoutubeDL.py", line 792, in extract_info
ie_result = ie.extract(url)
File "/usr/lib/python3.7/site-packages/youtube_dl/extractor/common.py", line 508, in extract
ie_result = self._real_extract(url)
File "/usr/lib/python3.7/site-packages/youtube_dl/extractor/azmedien.py", line 63, in _real_extract
webpage, 'api path')
File "/usr/lib/python3.7/site-packages/youtube_dl/extractor/common.py", line 983, in _search_regex
raise RegexNotFoundError('Unable to extract %s' % _name)
youtube_dl.utils.RegexNotFoundError: Unable to extract api path; please report this issue on https://yt-dl.org/bug . Make sure you are using the latest version; see https://yt-dl.org/update on how to update. Be sure to call youtube-dl with the --verbose flag and include its complete output.
```
---
### Description of your *issue*, suggested solution and other information
The information extractor `AZMedien` is currently broken, see the log output above.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `youtube_dl/extractor/azmedien.py`
Content:
```
1 # coding: utf-8
2 from __future__ import unicode_literals
3
4 import json
5 import re
6
7 from .common import InfoExtractor
8 from .kaltura import KalturaIE
9
10
11 class AZMedienIE(InfoExtractor):
12 IE_DESC = 'AZ Medien videos'
13 _VALID_URL = r'''(?x)
14 https?://
15 (?:www\.)?
16 (?P<host>
17 telezueri\.ch|
18 telebaern\.tv|
19 telem1\.ch
20 )/
21 [^/]+/
22 (?P<id>
23 [^/]+-(?P<article_id>\d+)
24 )
25 (?:
26 \#video=
27 (?P<kaltura_id>
28 [_0-9a-z]+
29 )
30 )?
31 '''
32
33 _TESTS = [{
34 'url': 'https://www.telezueri.ch/sonntalk/bundesrats-vakanzen-eu-rahmenabkommen-133214569',
35 'info_dict': {
36 'id': '1_anruz3wy',
37 'ext': 'mp4',
38 'title': 'Bundesrats-Vakanzen / EU-Rahmenabkommen',
39 'description': 'md5:dd9f96751ec9c35e409a698a328402f3',
40 'uploader_id': 'TVOnline',
41 'upload_date': '20180930',
42 'timestamp': 1538328802,
43 },
44 'params': {
45 'skip_download': True,
46 },
47 }, {
48 'url': 'https://www.telebaern.tv/telebaern-news/montag-1-oktober-2018-ganze-sendung-133531189#video=0_7xjo9lf1',
49 'only_matching': True
50 }]
51
52 _PARTNER_ID = '1719221'
53
54 def _real_extract(self, url):
55 mobj = re.match(self._VALID_URL, url)
56 video_id = mobj.group('id')
57 entry_id = mobj.group('kaltura_id')
58
59 if not entry_id:
60 webpage = self._download_webpage(url, video_id)
61 api_path = self._search_regex(
62 r'["\']apiPath["\']\s*:\s*["\']([^"^\']+)["\']',
63 webpage, 'api path')
64 api_url = 'https://www.%s%s' % (mobj.group('host'), api_path)
65 payload = {
66 'query': '''query VideoContext($articleId: ID!) {
67 article: node(id: $articleId) {
68 ... on Article {
69 mainAssetRelation {
70 asset {
71 ... on VideoAsset {
72 kalturaId
73 }
74 }
75 }
76 }
77 }
78 }''',
79 'variables': {'articleId': 'Article:%s' % mobj.group('article_id')},
80 }
81 json_data = self._download_json(
82 api_url, video_id, headers={
83 'Content-Type': 'application/json',
84 },
85 data=json.dumps(payload).encode())
86 entry_id = json_data['data']['article']['mainAssetRelation']['asset']['kalturaId']
87
88 return self.url_result(
89 'kaltura:%s:%s' % (self._PARTNER_ID, entry_id),
90 ie=KalturaIE.ie_key(), video_id=entry_id)
91
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/youtube_dl/extractor/azmedien.py b/youtube_dl/extractor/azmedien.py
--- a/youtube_dl/extractor/azmedien.py
+++ b/youtube_dl/extractor/azmedien.py
@@ -36,7 +36,6 @@
'id': '1_anruz3wy',
'ext': 'mp4',
'title': 'Bundesrats-Vakanzen / EU-Rahmenabkommen',
- 'description': 'md5:dd9f96751ec9c35e409a698a328402f3',
'uploader_id': 'TVOnline',
'upload_date': '20180930',
'timestamp': 1538328802,
@@ -53,15 +52,12 @@
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
+ host = mobj.group('host')
video_id = mobj.group('id')
entry_id = mobj.group('kaltura_id')
if not entry_id:
- webpage = self._download_webpage(url, video_id)
- api_path = self._search_regex(
- r'["\']apiPath["\']\s*:\s*["\']([^"^\']+)["\']',
- webpage, 'api path')
- api_url = 'https://www.%s%s' % (mobj.group('host'), api_path)
+ api_url = 'https://www.%s/api/pub/gql/%s' % (host, host.split('.')[0])
payload = {
'query': '''query VideoContext($articleId: ID!) {
article: node(id: $articleId) {
|
{"golden_diff": "diff --git a/youtube_dl/extractor/azmedien.py b/youtube_dl/extractor/azmedien.py\n--- a/youtube_dl/extractor/azmedien.py\n+++ b/youtube_dl/extractor/azmedien.py\n@@ -36,7 +36,6 @@\n 'id': '1_anruz3wy',\n 'ext': 'mp4',\n 'title': 'Bundesrats-Vakanzen / EU-Rahmenabkommen',\n- 'description': 'md5:dd9f96751ec9c35e409a698a328402f3',\n 'uploader_id': 'TVOnline',\n 'upload_date': '20180930',\n 'timestamp': 1538328802,\n@@ -53,15 +52,12 @@\n \n def _real_extract(self, url):\n mobj = re.match(self._VALID_URL, url)\n+ host = mobj.group('host')\n video_id = mobj.group('id')\n entry_id = mobj.group('kaltura_id')\n \n if not entry_id:\n- webpage = self._download_webpage(url, video_id)\n- api_path = self._search_regex(\n- r'[\"\\']apiPath[\"\\']\\s*:\\s*[\"\\']([^\"^\\']+)[\"\\']',\n- webpage, 'api path')\n- api_url = 'https://www.%s%s' % (mobj.group('host'), api_path)\n+ api_url = 'https://www.%s/api/pub/gql/%s' % (host, host.split('.')[0])\n payload = {\n 'query': '''query VideoContext($articleId: ID!) {\n article: node(id: $articleId) {\n", "issue": "[azmedien] Information extractor is broken\n### Make sure you are using the *latest* version: run `youtube-dl --version` and ensure your version is *2018.11.23*. If it's not, read [this FAQ entry](https://github.com/rg3/youtube-dl/blob/master/README.md#how-do-i-update-youtube-dl) and update. Issues with outdated version will be rejected.\r\n- [x] I've **verified** and **I assure** that I'm running youtube-dl **2018.11.23**\r\n\r\n### Before submitting an *issue* make sure you have:\r\n- [x] At least skimmed through the [README](https://github.com/rg3/youtube-dl/blob/master/README.md), **most notably** the [FAQ](https://github.com/rg3/youtube-dl#faq) and [BUGS](https://github.com/rg3/youtube-dl#bugs) sections\r\n- [x] [Searched](https://github.com/rg3/youtube-dl/search?type=Issues) the bugtracker for similar issues including closed ones\r\n- [x] Checked that provided video/audio/playlist URLs (if any) are alive and playable in a browser\r\n\r\n### What is the purpose of your *issue*?\r\n- [x] Bug report (encountered problems with youtube-dl)\r\n- [ ] Site support request (request for adding support for a new site)\r\n- [ ] Feature request (request for a new functionality)\r\n- [ ] Question\r\n- [ ] Other\r\n\r\n---\r\n\r\n### If the purpose of this *issue* is a *bug report*, *site support request* or you are not completely sure provide the full verbose output as follows:\r\n\r\nAdd the `-v` flag to **your command line** you run youtube-dl with (`youtube-dl -v <your command line>`), copy the **whole** output and insert it here. It should look similar to one below (replace it with **your** log inserted between triple ```):\r\n\r\n```\r\nyoutube-dl \"https://www.telezueri.ch/talktaeglich/toni-brunner-vom-jungbauern-zum-svp-star-133756399\" -v\r\n[debug] System config: []\r\n[debug] User config: ['--netrc', '--retries', '30', '--mark-watched']\r\n[debug] Custom config: []\r\n[debug] Command-line args: ['https://www.telezueri.ch/talktaeglich/toni-brunner-vom-jungbauern-zum-svp-star-133756399', '-v']\r\n[debug] Encodings: locale UTF-8, fs utf-8, out UTF-8, pref UTF-8\r\n[debug] youtube-dl version 2018.11.23\r\n[debug] Python version 3.7.1 (CPython) - Linux-4.19.4-arch1-1-ARCH-x86_64-with-arch\r\n[debug] exe versions: ffmpeg n4.1, ffprobe n4.1, rtmpdump 2.4\r\n[debug] Proxy map: {}\r\n[AZMedien] toni-brunner-vom-jungbauern-zum-svp-star-133756399: Downloading webpage\r\nERROR: Unable to extract api path; please report this issue on https://yt-dl.org/bug . Make sure you are using the latest version; see https://yt-dl.org/update on how to update. Be sure to call youtube-dl with the --verbose flag and include its complete output.\r\nTraceback (most recent call last):\r\n File \"/usr/lib/python3.7/site-packages/youtube_dl/YoutubeDL.py\", line 792, in extract_info\r\n ie_result = ie.extract(url)\r\n File \"/usr/lib/python3.7/site-packages/youtube_dl/extractor/common.py\", line 508, in extract\r\n ie_result = self._real_extract(url)\r\n File \"/usr/lib/python3.7/site-packages/youtube_dl/extractor/azmedien.py\", line 63, in _real_extract\r\n webpage, 'api path')\r\n File \"/usr/lib/python3.7/site-packages/youtube_dl/extractor/common.py\", line 983, in _search_regex\r\n raise RegexNotFoundError('Unable to extract %s' % _name)\r\nyoutube_dl.utils.RegexNotFoundError: Unable to extract api path; please report this issue on https://yt-dl.org/bug . Make sure you are using the latest version; see https://yt-dl.org/update on how to update. Be sure to call youtube-dl with the --verbose flag and include its complete output.\r\n```\r\n\r\n---\r\n\r\n### Description of your *issue*, suggested solution and other information\r\n\r\nThe information extractor `AZMedien` is currently broken, see the log output above.\n", "before_files": [{"content": "# coding: utf-8\nfrom __future__ import unicode_literals\n\nimport json\nimport re\n\nfrom .common import InfoExtractor\nfrom .kaltura import KalturaIE\n\n\nclass AZMedienIE(InfoExtractor):\n IE_DESC = 'AZ Medien videos'\n _VALID_URL = r'''(?x)\n https?://\n (?:www\\.)?\n (?P<host>\n telezueri\\.ch|\n telebaern\\.tv|\n telem1\\.ch\n )/\n [^/]+/\n (?P<id>\n [^/]+-(?P<article_id>\\d+)\n )\n (?:\n \\#video=\n (?P<kaltura_id>\n [_0-9a-z]+\n )\n )?\n '''\n\n _TESTS = [{\n 'url': 'https://www.telezueri.ch/sonntalk/bundesrats-vakanzen-eu-rahmenabkommen-133214569',\n 'info_dict': {\n 'id': '1_anruz3wy',\n 'ext': 'mp4',\n 'title': 'Bundesrats-Vakanzen / EU-Rahmenabkommen',\n 'description': 'md5:dd9f96751ec9c35e409a698a328402f3',\n 'uploader_id': 'TVOnline',\n 'upload_date': '20180930',\n 'timestamp': 1538328802,\n },\n 'params': {\n 'skip_download': True,\n },\n }, {\n 'url': 'https://www.telebaern.tv/telebaern-news/montag-1-oktober-2018-ganze-sendung-133531189#video=0_7xjo9lf1',\n 'only_matching': True\n }]\n\n _PARTNER_ID = '1719221'\n\n def _real_extract(self, url):\n mobj = re.match(self._VALID_URL, url)\n video_id = mobj.group('id')\n entry_id = mobj.group('kaltura_id')\n\n if not entry_id:\n webpage = self._download_webpage(url, video_id)\n api_path = self._search_regex(\n r'[\"\\']apiPath[\"\\']\\s*:\\s*[\"\\']([^\"^\\']+)[\"\\']',\n webpage, 'api path')\n api_url = 'https://www.%s%s' % (mobj.group('host'), api_path)\n payload = {\n 'query': '''query VideoContext($articleId: ID!) {\n article: node(id: $articleId) {\n ... on Article {\n mainAssetRelation {\n asset {\n ... on VideoAsset {\n kalturaId\n }\n }\n }\n }\n }\n }''',\n 'variables': {'articleId': 'Article:%s' % mobj.group('article_id')},\n }\n json_data = self._download_json(\n api_url, video_id, headers={\n 'Content-Type': 'application/json',\n },\n data=json.dumps(payload).encode())\n entry_id = json_data['data']['article']['mainAssetRelation']['asset']['kalturaId']\n\n return self.url_result(\n 'kaltura:%s:%s' % (self._PARTNER_ID, entry_id),\n ie=KalturaIE.ie_key(), video_id=entry_id)\n", "path": "youtube_dl/extractor/azmedien.py"}], "after_files": [{"content": "# coding: utf-8\nfrom __future__ import unicode_literals\n\nimport json\nimport re\n\nfrom .common import InfoExtractor\nfrom .kaltura import KalturaIE\n\n\nclass AZMedienIE(InfoExtractor):\n IE_DESC = 'AZ Medien videos'\n _VALID_URL = r'''(?x)\n https?://\n (?:www\\.)?\n (?P<host>\n telezueri\\.ch|\n telebaern\\.tv|\n telem1\\.ch\n )/\n [^/]+/\n (?P<id>\n [^/]+-(?P<article_id>\\d+)\n )\n (?:\n \\#video=\n (?P<kaltura_id>\n [_0-9a-z]+\n )\n )?\n '''\n\n _TESTS = [{\n 'url': 'https://www.telezueri.ch/sonntalk/bundesrats-vakanzen-eu-rahmenabkommen-133214569',\n 'info_dict': {\n 'id': '1_anruz3wy',\n 'ext': 'mp4',\n 'title': 'Bundesrats-Vakanzen / EU-Rahmenabkommen',\n 'uploader_id': 'TVOnline',\n 'upload_date': '20180930',\n 'timestamp': 1538328802,\n },\n 'params': {\n 'skip_download': True,\n },\n }, {\n 'url': 'https://www.telebaern.tv/telebaern-news/montag-1-oktober-2018-ganze-sendung-133531189#video=0_7xjo9lf1',\n 'only_matching': True\n }]\n\n _PARTNER_ID = '1719221'\n\n def _real_extract(self, url):\n mobj = re.match(self._VALID_URL, url)\n host = mobj.group('host')\n video_id = mobj.group('id')\n entry_id = mobj.group('kaltura_id')\n\n if not entry_id:\n api_url = 'https://www.%s/api/pub/gql/%s' % (host, host.split('.')[0])\n payload = {\n 'query': '''query VideoContext($articleId: ID!) {\n article: node(id: $articleId) {\n ... on Article {\n mainAssetRelation {\n asset {\n ... on VideoAsset {\n kalturaId\n }\n }\n }\n }\n }\n }''',\n 'variables': {'articleId': 'Article:%s' % mobj.group('article_id')},\n }\n json_data = self._download_json(\n api_url, video_id, headers={\n 'Content-Type': 'application/json',\n },\n data=json.dumps(payload).encode())\n entry_id = json_data['data']['article']['mainAssetRelation']['asset']['kalturaId']\n\n return self.url_result(\n 'kaltura:%s:%s' % (self._PARTNER_ID, entry_id),\n ie=KalturaIE.ie_key(), video_id=entry_id)\n", "path": "youtube_dl/extractor/azmedien.py"}]}
| 2,273 | 406 |
gh_patches_debug_16614
|
rasdani/github-patches
|
git_diff
|
inventree__InvenTree-4492
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Email settings not configured
### Deployment Method
- [ ] Installer
- [ ] Docker Development
- [X] Docker Production
- [ ] Bare metal Development
- [ ] Bare metal Production
- [ ] Digital Ocean image
- [ ] Other (please provide a link `Steps to Reproduce`
### Describe the problem*
Hello,
I am having issues configuring the email settings to connect to our on-prem exchange server.
I have configured the .env file with the following variables (Note - exchange does not require authentication internally)
INVENTREE_EMAIL_HOST
INVENTREE_EMAIL_PORT
INVENTREE_EMAIL_SENDER
However, when running the docker container, the 'System Information' screen reports that 'Email settings not configured. Under 'Global Settings' > 'Login Settings' a prompt shows at the top saying 'Outgoing email has not been configured. Some login and sign-up features may not work correctly!'
Apologies if I have missed something obvious in the documentation but I seem to be going round in circles currently. Any help that you could offer would be greatly appreciated. Thank you
### Steps to Reproduce
1. Stop the contain with `docker compose down`
2. Edit .env file to include the email variables (as per https://docs.inventree.org/en/0.10.0/start/config/#email-settings)
3. Starting the container with `docker compose up -d`
Further steps (Apologies, these might be all guesses at this stage)
1. I have tried setting these within the configuration file (I believe saved here: `volume/inventree-data/config.yaml` to include host, port, sender, tls and ssl variables, however with no change.
2. Feeling like I am missing a step, I am running `docker compose run inventree-server invoke update`
3. I am now running 0.10.1 but with the same issue.
### Relevant log output
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `InvenTree/InvenTree/status.py`
Content:
```
1 """Provides system status functionality checks."""
2 # -*- coding: utf-8 -*-
3
4 import logging
5 from datetime import timedelta
6
7 from django.conf import settings
8 from django.utils import timezone
9 from django.utils.translation import gettext_lazy as _
10
11 from django_q.models import Success
12 from django_q.monitor import Stat
13
14 import InvenTree.ready
15
16 logger = logging.getLogger("inventree")
17
18
19 def is_worker_running(**kwargs):
20 """Return True if the background worker process is oprational."""
21 clusters = Stat.get_all()
22
23 if len(clusters) > 0:
24 # TODO - Introspect on any cluster information
25 return True
26
27 """
28 Sometimes Stat.get_all() returns [].
29 In this case we have the 'heartbeat' task running every 5 minutes.
30 Check to see if we have any successful result within the last 10 minutes
31 """
32
33 now = timezone.now()
34 past = now - timedelta(minutes=10)
35
36 results = Success.objects.filter(
37 started__gte=past
38 )
39
40 # If any results are returned, then the background worker is running!
41 return results.exists()
42
43
44 def is_email_configured():
45 """Check if email backend is configured.
46
47 NOTE: This does not check if the configuration is valid!
48 """
49 configured = True
50
51 if InvenTree.ready.isInTestMode():
52 return False
53
54 if InvenTree.ready.isImportingData():
55 return False
56
57 if not settings.EMAIL_HOST:
58 configured = False
59
60 # Display warning unless in test mode
61 if not settings.TESTING: # pragma: no cover
62 logger.debug("EMAIL_HOST is not configured")
63
64 if not settings.EMAIL_HOST_USER:
65 configured = False
66
67 # Display warning unless in test mode
68 if not settings.TESTING: # pragma: no cover
69 logger.debug("EMAIL_HOST_USER is not configured")
70
71 if not settings.EMAIL_HOST_PASSWORD:
72 configured = False
73
74 # Display warning unless in test mode
75 if not settings.TESTING: # pragma: no cover
76 logger.debug("EMAIL_HOST_PASSWORD is not configured")
77
78 return configured
79
80
81 def check_system_health(**kwargs):
82 """Check that the InvenTree system is running OK.
83
84 Returns True if all system checks pass.
85 """
86 result = True
87
88 if InvenTree.ready.isInTestMode():
89 # Do not perform further checks if we are running unit tests
90 return False
91
92 if InvenTree.ready.isImportingData():
93 # Do not perform further checks if we are importing data
94 return False
95
96 if not is_worker_running(**kwargs): # pragma: no cover
97 result = False
98 logger.warning(_("Background worker check failed"))
99
100 if not is_email_configured(): # pragma: no cover
101 result = False
102 logger.warning(_("Email backend not configured"))
103
104 if not result: # pragma: no cover
105 logger.warning(_("InvenTree system health checks failed"))
106
107 return result
108
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/InvenTree/InvenTree/status.py b/InvenTree/InvenTree/status.py
--- a/InvenTree/InvenTree/status.py
+++ b/InvenTree/InvenTree/status.py
@@ -61,19 +61,13 @@
if not settings.TESTING: # pragma: no cover
logger.debug("EMAIL_HOST is not configured")
- if not settings.EMAIL_HOST_USER:
- configured = False
-
- # Display warning unless in test mode
- if not settings.TESTING: # pragma: no cover
- logger.debug("EMAIL_HOST_USER is not configured")
+ # Display warning unless in test mode
+ if not settings.TESTING: # pragma: no cover
+ logger.debug("EMAIL_HOST_USER is not configured")
- if not settings.EMAIL_HOST_PASSWORD:
- configured = False
-
- # Display warning unless in test mode
- if not settings.TESTING: # pragma: no cover
- logger.debug("EMAIL_HOST_PASSWORD is not configured")
+ # Display warning unless in test mode
+ if not settings.TESTING: # pragma: no cover
+ logger.debug("EMAIL_HOST_PASSWORD is not configured")
return configured
|
{"golden_diff": "diff --git a/InvenTree/InvenTree/status.py b/InvenTree/InvenTree/status.py\n--- a/InvenTree/InvenTree/status.py\n+++ b/InvenTree/InvenTree/status.py\n@@ -61,19 +61,13 @@\n if not settings.TESTING: # pragma: no cover\n logger.debug(\"EMAIL_HOST is not configured\")\n \n- if not settings.EMAIL_HOST_USER:\n- configured = False\n-\n- # Display warning unless in test mode\n- if not settings.TESTING: # pragma: no cover\n- logger.debug(\"EMAIL_HOST_USER is not configured\")\n+ # Display warning unless in test mode\n+ if not settings.TESTING: # pragma: no cover\n+ logger.debug(\"EMAIL_HOST_USER is not configured\")\n \n- if not settings.EMAIL_HOST_PASSWORD:\n- configured = False\n-\n- # Display warning unless in test mode\n- if not settings.TESTING: # pragma: no cover\n- logger.debug(\"EMAIL_HOST_PASSWORD is not configured\")\n+ # Display warning unless in test mode\n+ if not settings.TESTING: # pragma: no cover\n+ logger.debug(\"EMAIL_HOST_PASSWORD is not configured\")\n \n return configured\n", "issue": "Email settings not configured\n### Deployment Method\n\n- [ ] Installer\n- [ ] Docker Development\n- [X] Docker Production\n- [ ] Bare metal Development\n- [ ] Bare metal Production\n- [ ] Digital Ocean image\n- [ ] Other (please provide a link `Steps to Reproduce`\n\n### Describe the problem*\n\nHello,\r\n\r\nI am having issues configuring the email settings to connect to our on-prem exchange server. \r\n\r\nI have configured the .env file with the following variables (Note - exchange does not require authentication internally)\r\n\r\nINVENTREE_EMAIL_HOST\r\nINVENTREE_EMAIL_PORT\r\nINVENTREE_EMAIL_SENDER\r\n\r\nHowever, when running the docker container, the 'System Information' screen reports that 'Email settings not configured. Under 'Global Settings' > 'Login Settings' a prompt shows at the top saying 'Outgoing email has not been configured. Some login and sign-up features may not work correctly!'\r\n\r\nApologies if I have missed something obvious in the documentation but I seem to be going round in circles currently. Any help that you could offer would be greatly appreciated. Thank you\n\n### Steps to Reproduce\n\n1. Stop the contain with `docker compose down` \r\n2. Edit .env file to include the email variables (as per https://docs.inventree.org/en/0.10.0/start/config/#email-settings)\r\n3. Starting the container with `docker compose up -d`\r\n\r\nFurther steps (Apologies, these might be all guesses at this stage)\r\n\r\n1. I have tried setting these within the configuration file (I believe saved here: `volume/inventree-data/config.yaml` to include host, port, sender, tls and ssl variables, however with no change.\r\n2. Feeling like I am missing a step, I am running `docker compose run inventree-server invoke update`\r\n3. I am now running 0.10.1 but with the same issue.\r\n\r\n\n\n### Relevant log output\n\n_No response_\n", "before_files": [{"content": "\"\"\"Provides system status functionality checks.\"\"\"\n# -*- coding: utf-8 -*-\n\nimport logging\nfrom datetime import timedelta\n\nfrom django.conf import settings\nfrom django.utils import timezone\nfrom django.utils.translation import gettext_lazy as _\n\nfrom django_q.models import Success\nfrom django_q.monitor import Stat\n\nimport InvenTree.ready\n\nlogger = logging.getLogger(\"inventree\")\n\n\ndef is_worker_running(**kwargs):\n \"\"\"Return True if the background worker process is oprational.\"\"\"\n clusters = Stat.get_all()\n\n if len(clusters) > 0:\n # TODO - Introspect on any cluster information\n return True\n\n \"\"\"\n Sometimes Stat.get_all() returns [].\n In this case we have the 'heartbeat' task running every 5 minutes.\n Check to see if we have any successful result within the last 10 minutes\n \"\"\"\n\n now = timezone.now()\n past = now - timedelta(minutes=10)\n\n results = Success.objects.filter(\n started__gte=past\n )\n\n # If any results are returned, then the background worker is running!\n return results.exists()\n\n\ndef is_email_configured():\n \"\"\"Check if email backend is configured.\n\n NOTE: This does not check if the configuration is valid!\n \"\"\"\n configured = True\n\n if InvenTree.ready.isInTestMode():\n return False\n\n if InvenTree.ready.isImportingData():\n return False\n\n if not settings.EMAIL_HOST:\n configured = False\n\n # Display warning unless in test mode\n if not settings.TESTING: # pragma: no cover\n logger.debug(\"EMAIL_HOST is not configured\")\n\n if not settings.EMAIL_HOST_USER:\n configured = False\n\n # Display warning unless in test mode\n if not settings.TESTING: # pragma: no cover\n logger.debug(\"EMAIL_HOST_USER is not configured\")\n\n if not settings.EMAIL_HOST_PASSWORD:\n configured = False\n\n # Display warning unless in test mode\n if not settings.TESTING: # pragma: no cover\n logger.debug(\"EMAIL_HOST_PASSWORD is not configured\")\n\n return configured\n\n\ndef check_system_health(**kwargs):\n \"\"\"Check that the InvenTree system is running OK.\n\n Returns True if all system checks pass.\n \"\"\"\n result = True\n\n if InvenTree.ready.isInTestMode():\n # Do not perform further checks if we are running unit tests\n return False\n\n if InvenTree.ready.isImportingData():\n # Do not perform further checks if we are importing data\n return False\n\n if not is_worker_running(**kwargs): # pragma: no cover\n result = False\n logger.warning(_(\"Background worker check failed\"))\n\n if not is_email_configured(): # pragma: no cover\n result = False\n logger.warning(_(\"Email backend not configured\"))\n\n if not result: # pragma: no cover\n logger.warning(_(\"InvenTree system health checks failed\"))\n\n return result\n", "path": "InvenTree/InvenTree/status.py"}], "after_files": [{"content": "\"\"\"Provides system status functionality checks.\"\"\"\n# -*- coding: utf-8 -*-\n\nimport logging\nfrom datetime import timedelta\n\nfrom django.conf import settings\nfrom django.utils import timezone\nfrom django.utils.translation import gettext_lazy as _\n\nfrom django_q.models import Success\nfrom django_q.monitor import Stat\n\nimport InvenTree.ready\n\nlogger = logging.getLogger(\"inventree\")\n\n\ndef is_worker_running(**kwargs):\n \"\"\"Return True if the background worker process is oprational.\"\"\"\n clusters = Stat.get_all()\n\n if len(clusters) > 0:\n # TODO - Introspect on any cluster information\n return True\n\n \"\"\"\n Sometimes Stat.get_all() returns [].\n In this case we have the 'heartbeat' task running every 5 minutes.\n Check to see if we have any successful result within the last 10 minutes\n \"\"\"\n\n now = timezone.now()\n past = now - timedelta(minutes=10)\n\n results = Success.objects.filter(\n started__gte=past\n )\n\n # If any results are returned, then the background worker is running!\n return results.exists()\n\n\ndef is_email_configured():\n \"\"\"Check if email backend is configured.\n\n NOTE: This does not check if the configuration is valid!\n \"\"\"\n configured = True\n\n if InvenTree.ready.isInTestMode():\n return False\n\n if InvenTree.ready.isImportingData():\n return False\n\n if not settings.EMAIL_HOST:\n configured = False\n\n # Display warning unless in test mode\n if not settings.TESTING: # pragma: no cover\n logger.debug(\"EMAIL_HOST is not configured\")\n\n # Display warning unless in test mode\n if not settings.TESTING: # pragma: no cover\n logger.debug(\"EMAIL_HOST_USER is not configured\")\n\n # Display warning unless in test mode\n if not settings.TESTING: # pragma: no cover\n logger.debug(\"EMAIL_HOST_PASSWORD is not configured\")\n\n return configured\n\n\ndef check_system_health(**kwargs):\n \"\"\"Check that the InvenTree system is running OK.\n\n Returns True if all system checks pass.\n \"\"\"\n result = True\n\n if InvenTree.ready.isInTestMode():\n # Do not perform further checks if we are running unit tests\n return False\n\n if InvenTree.ready.isImportingData():\n # Do not perform further checks if we are importing data\n return False\n\n if not is_worker_running(**kwargs): # pragma: no cover\n result = False\n logger.warning(_(\"Background worker check failed\"))\n\n if not is_email_configured(): # pragma: no cover\n result = False\n logger.warning(_(\"Email backend not configured\"))\n\n if not result: # pragma: no cover\n logger.warning(_(\"InvenTree system health checks failed\"))\n\n return result\n", "path": "InvenTree/InvenTree/status.py"}]}
| 1,531 | 277 |
gh_patches_debug_22815
|
rasdani/github-patches
|
git_diff
|
streamlink__streamlink-1129
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
AdultSwim stream broken
### Checklist
- [x] This is a bug report.
- [ ] This is a feature request.
- [ ] This is a plugin (improvement) request.
- [x] I have read the contribution guidelines.
### Description
running `streamlink.exe http://www.adultswim.com/videos/streams best` does not open the AdultSwim stream.
### Expected / Actual behavior
Expected: open the AdultSwim stream
Actual behavior: `error: Unable to validate JSON: Unable to validate key 'streams': Key 'isLive' not found in {'description': 'Walk with Him all-day long.', 'email': '', 'archiveCollection': [], 'stream': 'At-rci-fT1SgqmQ2ZA5XtQ', 'id': 'black-jesus', 'pipVideoID': '', 'chat': '', 'doc_id': 'video_stream_shows_black_jesus_marathon', 'showLinks': [{'external': False, 'value': 'More Black Jesus', 'url': 'http://www.adultswim.com/videos/black-jesus/'}, {'external': False, 'value': 'Facebook', 'url': 'http://www.facebook.com/BlackJesus'}, {'external': True, 'value': 'Twitter', 'url': 'https://twitter.com/blackjesusshow'}], 'url': 'http://www.adultswim.com/videos/streams/black-jesus/', 'telephone': '', 'schedule': [], 'images': {'video': 'http://i.cdn.turner.com/adultswim/big/video/black-jesus-marathon/marathonStream_blackjesus.jpg'}, 'chatService': 'http://www.adultswim.com/utilities/api/v1/live/chat/black-jesus', 'type': 'marathon', 'archiveEpisodes': [], 'sponsor':{'link': '', 'title': '', 'imageUrl': ''}, 'title': 'Black Jesus', 'rating': 'TV-MA'}`
### Reproduction steps / Explicit stream URLs to test
look above
### Environment details
Operating system and version: Windows 7 64bit
Streamlink and Python version: Python 3.5.2/Streamlink 0.7.0
[Love Streamlink? Please consider supporting our collective. Thanks!](https://opencollective.com/streamlink/donate)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/streamlink/plugins/adultswim.py`
Content:
```
1 import re
2
3 from streamlink.plugin import Plugin
4 from streamlink.plugin.api import StreamMapper
5 from streamlink.plugin.api import http, validate
6 from streamlink.plugin.api import useragents
7 from streamlink.stream import HDSStream
8 from streamlink.stream import HLSStream
9 from streamlink.stream import HTTPStream
10 from streamlink.utils import parse_json, parse_xml
11
12
13 class AdultSwim(Plugin):
14 API_URL = "http://www.adultswim.com/videos/api/v2/videos/{id}?fields=stream"
15 vod_api = " http://www.adultswim.com/videos/api/v0/assets"
16
17 url_re = re.compile(r"""https?://(?:www\.)?adultswim\.com/videos
18 (?:/(streams))?
19 (?:/([^/]+))?
20 (?:/([^/]+))?
21 """, re.VERBOSE)
22 _stream_data_re = re.compile(r"(?:__)?AS_INITIAL_DATA(?:__)? = (\{.*?});", re.M | re.DOTALL)
23
24 live_schema = validate.Schema({
25 u"streams": {
26 validate.text: {u"stream": validate.text,
27 u"isLive": bool,
28 u"archiveEpisodes": [{
29 u"id": validate.text,
30 u"slug": validate.text,
31 }]}}
32
33 })
34 vod_id_schema = validate.Schema({u"show": {u"sluggedVideo": {u"id": validate.text}}},
35 validate.transform(lambda x: x["show"]["sluggedVideo"]["id"]))
36 _api_schema = validate.Schema({
37 u'status': u'ok',
38 u'data': {u'stream': {
39 u'assets': [{u'url': validate.url()}]
40 }}
41 })
42 _vod_api_schema = validate.Schema(
43 validate.all(
44 validate.xml_findall(".//files/file"),
45 [validate.xml_element,
46 validate.transform(lambda v: {"bitrate": v.attrib.get("bitrate"), "url": v.text})
47 ]
48 )
49 )
50
51 @classmethod
52 def can_handle_url(cls, url):
53 match = AdultSwim.url_re.match(url)
54 return match is not None
55
56 def _make_hls_hds_stream(self, func, stream, *args, **kwargs):
57 return func(self.session, stream["url"], *args, **kwargs)
58
59 def _get_show_streams(self, stream_data, show, episode, platform="desktop"):
60 video_id = parse_json(stream_data.group(1), schema=self.vod_id_schema)
61 res = http.get(self.vod_api, params={"platform": platform, "id": video_id})
62
63 # create a unique list of the stream manifest URLs
64 streams = []
65 urldups = []
66 for stream in parse_xml(res.text, schema=self._vod_api_schema):
67 if stream["url"] not in urldups:
68 streams.append(stream)
69 urldups.append(stream["url"])
70
71 mapper = StreamMapper(lambda fmt, strm: strm["url"].endswith(fmt))
72 mapper.map(".m3u8", self._make_hls_hds_stream, HLSStream.parse_variant_playlist)
73 mapper.map(".f4m", self._make_hls_hds_stream, HDSStream.parse_manifest, is_akamai=True)
74 mapper.map(".mp4", lambda s: (s["bitrate"]+"k", HTTPStream(self.session, s["url"])))
75
76 for q, s in mapper(streams):
77 yield q, s
78
79 def _get_live_stream(self, stream_data, show, episode=None):
80 # parse the stream info as json
81 stream_info = parse_json(stream_data.group(1), schema=self.live_schema)
82 # get the stream ID
83 stream_id = None
84 show_info = stream_info[u"streams"][show]
85
86 if episode:
87 self.logger.debug("Loading replay of episode: {0}/{1}", show, episode)
88 for epi in show_info[u"archiveEpisodes"]:
89 if epi[u"slug"] == episode:
90 stream_id = epi[u"id"]
91 elif show_info["isLive"] or not len(show_info[u"archiveEpisodes"]):
92 self.logger.debug("Loading LIVE streams for: {0}", show)
93 stream_id = show_info[u"stream"]
94 else: # off-air
95 if len(show_info[u"archiveEpisodes"]):
96 epi = show_info[u"archiveEpisodes"][0]
97 self.logger.debug("Loading replay of episode: {0}/{1}", show, epi[u"slug"])
98 stream_id = epi[u"id"]
99 else:
100 self.logger.error("This stream is currently offline")
101 return
102
103
104 if stream_id:
105 api_url = self.API_URL.format(id=stream_id)
106
107 res = http.get(api_url, headers={"User-Agent": useragents.SAFARI_8})
108 stream_data = http.json(res, schema=self._api_schema)
109
110 mapper = StreamMapper(lambda fmt, surl: surl.endswith(fmt))
111 mapper.map(".m3u8", HLSStream.parse_variant_playlist, self.session)
112 mapper.map(".f4m", HDSStream.parse_manifest, self.session)
113
114 stream_urls = [asset[u"url"] for asset in stream_data[u'data'][u'stream'][u'assets']]
115 for q, s in mapper(stream_urls):
116 yield q, s
117
118 else:
119 self.logger.error("Couldn't find the stream ID for this stream: {0}".format(show))
120
121 def _get_streams(self):
122 # get the page
123 url_match = self.url_re.match(self.url)
124 live_stream, show_name, episode_name = url_match.groups()
125 if live_stream:
126 show_name = show_name or "live-stream"
127
128 res = http.get(self.url, headers={"User-Agent": useragents.SAFARI_8})
129 # find the big blob of stream info in the page
130 stream_data = self._stream_data_re.search(res.text)
131
132 if stream_data:
133 if live_stream:
134 streams = self._get_live_stream(stream_data, show_name, episode_name)
135 else:
136 self.logger.debug("Loading VOD streams for: {0}/{1}", show_name, episode_name)
137 streams = self._get_show_streams(stream_data, show_name, episode_name)
138
139 # De-dup the streams, some of the mobile streams overlap the desktop streams
140 dups = set()
141 for q, s in streams:
142 if hasattr(s, "args") and "url" in s.args:
143 if s.args["url"] not in dups:
144 yield q, s
145 dups.add(s.args["url"])
146 else:
147 yield q, s
148
149 else:
150 self.logger.error("Couldn't find the stream data for this stream: {0}".format(show_name))
151
152
153 __plugin__ = AdultSwim
154
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/streamlink/plugins/adultswim.py b/src/streamlink/plugins/adultswim.py
--- a/src/streamlink/plugins/adultswim.py
+++ b/src/streamlink/plugins/adultswim.py
@@ -24,7 +24,7 @@
live_schema = validate.Schema({
u"streams": {
validate.text: {u"stream": validate.text,
- u"isLive": bool,
+ validate.optional(u"isLive"): bool,
u"archiveEpisodes": [{
u"id": validate.text,
u"slug": validate.text,
@@ -88,7 +88,7 @@
for epi in show_info[u"archiveEpisodes"]:
if epi[u"slug"] == episode:
stream_id = epi[u"id"]
- elif show_info["isLive"] or not len(show_info[u"archiveEpisodes"]):
+ elif show_info.get("isLive") or not len(show_info[u"archiveEpisodes"]):
self.logger.debug("Loading LIVE streams for: {0}", show)
stream_id = show_info[u"stream"]
else: # off-air
|
{"golden_diff": "diff --git a/src/streamlink/plugins/adultswim.py b/src/streamlink/plugins/adultswim.py\n--- a/src/streamlink/plugins/adultswim.py\n+++ b/src/streamlink/plugins/adultswim.py\n@@ -24,7 +24,7 @@\n live_schema = validate.Schema({\n u\"streams\": {\n validate.text: {u\"stream\": validate.text,\n- u\"isLive\": bool,\n+ validate.optional(u\"isLive\"): bool,\n u\"archiveEpisodes\": [{\n u\"id\": validate.text,\n u\"slug\": validate.text,\n@@ -88,7 +88,7 @@\n for epi in show_info[u\"archiveEpisodes\"]:\n if epi[u\"slug\"] == episode:\n stream_id = epi[u\"id\"]\n- elif show_info[\"isLive\"] or not len(show_info[u\"archiveEpisodes\"]):\n+ elif show_info.get(\"isLive\") or not len(show_info[u\"archiveEpisodes\"]):\n self.logger.debug(\"Loading LIVE streams for: {0}\", show)\n stream_id = show_info[u\"stream\"]\n else: # off-air\n", "issue": "AdultSwim stream broken\n### Checklist\r\n\r\n- [x] This is a bug report.\r\n- [ ] This is a feature request.\r\n- [ ] This is a plugin (improvement) request.\r\n- [x] I have read the contribution guidelines.\r\n\r\n### Description\r\n\r\nrunning `streamlink.exe http://www.adultswim.com/videos/streams best` does not open the AdultSwim stream.\r\n\r\n### Expected / Actual behavior\r\n\r\nExpected: open the AdultSwim stream\r\n\r\nActual behavior: `error: Unable to validate JSON: Unable to validate key 'streams': Key 'isLive' not found in {'description': 'Walk with Him all-day long.', 'email': '', 'archiveCollection': [], 'stream': 'At-rci-fT1SgqmQ2ZA5XtQ', 'id': 'black-jesus', 'pipVideoID': '', 'chat': '', 'doc_id': 'video_stream_shows_black_jesus_marathon', 'showLinks': [{'external': False, 'value': 'More Black Jesus', 'url': 'http://www.adultswim.com/videos/black-jesus/'}, {'external': False, 'value': 'Facebook', 'url': 'http://www.facebook.com/BlackJesus'}, {'external': True, 'value': 'Twitter', 'url': 'https://twitter.com/blackjesusshow'}], 'url': 'http://www.adultswim.com/videos/streams/black-jesus/', 'telephone': '', 'schedule': [], 'images': {'video': 'http://i.cdn.turner.com/adultswim/big/video/black-jesus-marathon/marathonStream_blackjesus.jpg'}, 'chatService': 'http://www.adultswim.com/utilities/api/v1/live/chat/black-jesus', 'type': 'marathon', 'archiveEpisodes': [], 'sponsor':{'link': '', 'title': '', 'imageUrl': ''}, 'title': 'Black Jesus', 'rating': 'TV-MA'}`\r\n\r\n### Reproduction steps / Explicit stream URLs to test\r\n\r\nlook above\r\n\r\n### Environment details\r\n\r\nOperating system and version: Windows 7 64bit \r\nStreamlink and Python version: Python 3.5.2/Streamlink 0.7.0\r\n\r\n\r\n\r\n[Love Streamlink? Please consider supporting our collective. Thanks!](https://opencollective.com/streamlink/donate)\r\n\n", "before_files": [{"content": "import re\n\nfrom streamlink.plugin import Plugin\nfrom streamlink.plugin.api import StreamMapper\nfrom streamlink.plugin.api import http, validate\nfrom streamlink.plugin.api import useragents\nfrom streamlink.stream import HDSStream\nfrom streamlink.stream import HLSStream\nfrom streamlink.stream import HTTPStream\nfrom streamlink.utils import parse_json, parse_xml\n\n\nclass AdultSwim(Plugin):\n API_URL = \"http://www.adultswim.com/videos/api/v2/videos/{id}?fields=stream\"\n vod_api = \" http://www.adultswim.com/videos/api/v0/assets\"\n\n url_re = re.compile(r\"\"\"https?://(?:www\\.)?adultswim\\.com/videos\n (?:/(streams))?\n (?:/([^/]+))?\n (?:/([^/]+))?\n \"\"\", re.VERBOSE)\n _stream_data_re = re.compile(r\"(?:__)?AS_INITIAL_DATA(?:__)? = (\\{.*?});\", re.M | re.DOTALL)\n\n live_schema = validate.Schema({\n u\"streams\": {\n validate.text: {u\"stream\": validate.text,\n u\"isLive\": bool,\n u\"archiveEpisodes\": [{\n u\"id\": validate.text,\n u\"slug\": validate.text,\n }]}}\n\n })\n vod_id_schema = validate.Schema({u\"show\": {u\"sluggedVideo\": {u\"id\": validate.text}}},\n validate.transform(lambda x: x[\"show\"][\"sluggedVideo\"][\"id\"]))\n _api_schema = validate.Schema({\n u'status': u'ok',\n u'data': {u'stream': {\n u'assets': [{u'url': validate.url()}]\n }}\n })\n _vod_api_schema = validate.Schema(\n validate.all(\n validate.xml_findall(\".//files/file\"),\n [validate.xml_element,\n validate.transform(lambda v: {\"bitrate\": v.attrib.get(\"bitrate\"), \"url\": v.text})\n ]\n )\n )\n\n @classmethod\n def can_handle_url(cls, url):\n match = AdultSwim.url_re.match(url)\n return match is not None\n\n def _make_hls_hds_stream(self, func, stream, *args, **kwargs):\n return func(self.session, stream[\"url\"], *args, **kwargs)\n\n def _get_show_streams(self, stream_data, show, episode, platform=\"desktop\"):\n video_id = parse_json(stream_data.group(1), schema=self.vod_id_schema)\n res = http.get(self.vod_api, params={\"platform\": platform, \"id\": video_id})\n\n # create a unique list of the stream manifest URLs\n streams = []\n urldups = []\n for stream in parse_xml(res.text, schema=self._vod_api_schema):\n if stream[\"url\"] not in urldups:\n streams.append(stream)\n urldups.append(stream[\"url\"])\n\n mapper = StreamMapper(lambda fmt, strm: strm[\"url\"].endswith(fmt))\n mapper.map(\".m3u8\", self._make_hls_hds_stream, HLSStream.parse_variant_playlist)\n mapper.map(\".f4m\", self._make_hls_hds_stream, HDSStream.parse_manifest, is_akamai=True)\n mapper.map(\".mp4\", lambda s: (s[\"bitrate\"]+\"k\", HTTPStream(self.session, s[\"url\"])))\n\n for q, s in mapper(streams):\n yield q, s\n\n def _get_live_stream(self, stream_data, show, episode=None):\n # parse the stream info as json\n stream_info = parse_json(stream_data.group(1), schema=self.live_schema)\n # get the stream ID\n stream_id = None\n show_info = stream_info[u\"streams\"][show]\n\n if episode:\n self.logger.debug(\"Loading replay of episode: {0}/{1}\", show, episode)\n for epi in show_info[u\"archiveEpisodes\"]:\n if epi[u\"slug\"] == episode:\n stream_id = epi[u\"id\"]\n elif show_info[\"isLive\"] or not len(show_info[u\"archiveEpisodes\"]):\n self.logger.debug(\"Loading LIVE streams for: {0}\", show)\n stream_id = show_info[u\"stream\"]\n else: # off-air\n if len(show_info[u\"archiveEpisodes\"]):\n epi = show_info[u\"archiveEpisodes\"][0]\n self.logger.debug(\"Loading replay of episode: {0}/{1}\", show, epi[u\"slug\"])\n stream_id = epi[u\"id\"]\n else:\n self.logger.error(\"This stream is currently offline\")\n return\n\n\n if stream_id:\n api_url = self.API_URL.format(id=stream_id)\n\n res = http.get(api_url, headers={\"User-Agent\": useragents.SAFARI_8})\n stream_data = http.json(res, schema=self._api_schema)\n\n mapper = StreamMapper(lambda fmt, surl: surl.endswith(fmt))\n mapper.map(\".m3u8\", HLSStream.parse_variant_playlist, self.session)\n mapper.map(\".f4m\", HDSStream.parse_manifest, self.session)\n\n stream_urls = [asset[u\"url\"] for asset in stream_data[u'data'][u'stream'][u'assets']]\n for q, s in mapper(stream_urls):\n yield q, s\n\n else:\n self.logger.error(\"Couldn't find the stream ID for this stream: {0}\".format(show))\n\n def _get_streams(self):\n # get the page\n url_match = self.url_re.match(self.url)\n live_stream, show_name, episode_name = url_match.groups()\n if live_stream:\n show_name = show_name or \"live-stream\"\n\n res = http.get(self.url, headers={\"User-Agent\": useragents.SAFARI_8})\n # find the big blob of stream info in the page\n stream_data = self._stream_data_re.search(res.text)\n\n if stream_data:\n if live_stream:\n streams = self._get_live_stream(stream_data, show_name, episode_name)\n else:\n self.logger.debug(\"Loading VOD streams for: {0}/{1}\", show_name, episode_name)\n streams = self._get_show_streams(stream_data, show_name, episode_name)\n\n # De-dup the streams, some of the mobile streams overlap the desktop streams\n dups = set()\n for q, s in streams:\n if hasattr(s, \"args\") and \"url\" in s.args:\n if s.args[\"url\"] not in dups:\n yield q, s\n dups.add(s.args[\"url\"])\n else:\n yield q, s\n\n else:\n self.logger.error(\"Couldn't find the stream data for this stream: {0}\".format(show_name))\n\n\n__plugin__ = AdultSwim\n", "path": "src/streamlink/plugins/adultswim.py"}], "after_files": [{"content": "import re\n\nfrom streamlink.plugin import Plugin\nfrom streamlink.plugin.api import StreamMapper\nfrom streamlink.plugin.api import http, validate\nfrom streamlink.plugin.api import useragents\nfrom streamlink.stream import HDSStream\nfrom streamlink.stream import HLSStream\nfrom streamlink.stream import HTTPStream\nfrom streamlink.utils import parse_json, parse_xml\n\n\nclass AdultSwim(Plugin):\n API_URL = \"http://www.adultswim.com/videos/api/v2/videos/{id}?fields=stream\"\n vod_api = \" http://www.adultswim.com/videos/api/v0/assets\"\n\n url_re = re.compile(r\"\"\"https?://(?:www\\.)?adultswim\\.com/videos\n (?:/(streams))?\n (?:/([^/]+))?\n (?:/([^/]+))?\n \"\"\", re.VERBOSE)\n _stream_data_re = re.compile(r\"(?:__)?AS_INITIAL_DATA(?:__)? = (\\{.*?});\", re.M | re.DOTALL)\n\n live_schema = validate.Schema({\n u\"streams\": {\n validate.text: {u\"stream\": validate.text,\n validate.optional(u\"isLive\"): bool,\n u\"archiveEpisodes\": [{\n u\"id\": validate.text,\n u\"slug\": validate.text,\n }]}}\n\n })\n vod_id_schema = validate.Schema({u\"show\": {u\"sluggedVideo\": {u\"id\": validate.text}}},\n validate.transform(lambda x: x[\"show\"][\"sluggedVideo\"][\"id\"]))\n _api_schema = validate.Schema({\n u'status': u'ok',\n u'data': {u'stream': {\n u'assets': [{u'url': validate.url()}]\n }}\n })\n _vod_api_schema = validate.Schema(\n validate.all(\n validate.xml_findall(\".//files/file\"),\n [validate.xml_element,\n validate.transform(lambda v: {\"bitrate\": v.attrib.get(\"bitrate\"), \"url\": v.text})\n ]\n )\n )\n\n @classmethod\n def can_handle_url(cls, url):\n match = AdultSwim.url_re.match(url)\n return match is not None\n\n def _make_hls_hds_stream(self, func, stream, *args, **kwargs):\n return func(self.session, stream[\"url\"], *args, **kwargs)\n\n def _get_show_streams(self, stream_data, show, episode, platform=\"desktop\"):\n video_id = parse_json(stream_data.group(1), schema=self.vod_id_schema)\n res = http.get(self.vod_api, params={\"platform\": platform, \"id\": video_id})\n\n # create a unique list of the stream manifest URLs\n streams = []\n urldups = []\n for stream in parse_xml(res.text, schema=self._vod_api_schema):\n if stream[\"url\"] not in urldups:\n streams.append(stream)\n urldups.append(stream[\"url\"])\n\n mapper = StreamMapper(lambda fmt, strm: strm[\"url\"].endswith(fmt))\n mapper.map(\".m3u8\", self._make_hls_hds_stream, HLSStream.parse_variant_playlist)\n mapper.map(\".f4m\", self._make_hls_hds_stream, HDSStream.parse_manifest, is_akamai=True)\n mapper.map(\".mp4\", lambda s: (s[\"bitrate\"]+\"k\", HTTPStream(self.session, s[\"url\"])))\n\n for q, s in mapper(streams):\n yield q, s\n\n def _get_live_stream(self, stream_data, show, episode=None):\n # parse the stream info as json\n stream_info = parse_json(stream_data.group(1), schema=self.live_schema)\n # get the stream ID\n stream_id = None\n show_info = stream_info[u\"streams\"][show]\n\n if episode:\n self.logger.debug(\"Loading replay of episode: {0}/{1}\", show, episode)\n for epi in show_info[u\"archiveEpisodes\"]:\n if epi[u\"slug\"] == episode:\n stream_id = epi[u\"id\"]\n elif show_info.get(\"isLive\") or not len(show_info[u\"archiveEpisodes\"]):\n self.logger.debug(\"Loading LIVE streams for: {0}\", show)\n stream_id = show_info[u\"stream\"]\n else: # off-air\n if len(show_info[u\"archiveEpisodes\"]):\n epi = show_info[u\"archiveEpisodes\"][0]\n self.logger.debug(\"Loading replay of episode: {0}/{1}\", show, epi[u\"slug\"])\n stream_id = epi[u\"id\"]\n else:\n self.logger.error(\"This stream is currently offline\")\n return\n\n\n if stream_id:\n api_url = self.API_URL.format(id=stream_id)\n\n res = http.get(api_url, headers={\"User-Agent\": useragents.SAFARI_8})\n stream_data = http.json(res, schema=self._api_schema)\n\n mapper = StreamMapper(lambda fmt, surl: surl.endswith(fmt))\n mapper.map(\".m3u8\", HLSStream.parse_variant_playlist, self.session)\n mapper.map(\".f4m\", HDSStream.parse_manifest, self.session)\n\n stream_urls = [asset[u\"url\"] for asset in stream_data[u'data'][u'stream'][u'assets']]\n for q, s in mapper(stream_urls):\n yield q, s\n\n else:\n self.logger.error(\"Couldn't find the stream ID for this stream: {0}\".format(show))\n\n def _get_streams(self):\n # get the page\n url_match = self.url_re.match(self.url)\n live_stream, show_name, episode_name = url_match.groups()\n if live_stream:\n show_name = show_name or \"live-stream\"\n\n res = http.get(self.url, headers={\"User-Agent\": useragents.SAFARI_8})\n # find the big blob of stream info in the page\n stream_data = self._stream_data_re.search(res.text)\n\n if stream_data:\n if live_stream:\n streams = self._get_live_stream(stream_data, show_name, episode_name)\n else:\n self.logger.debug(\"Loading VOD streams for: {0}/{1}\", show_name, episode_name)\n streams = self._get_show_streams(stream_data, show_name, episode_name)\n\n # De-dup the streams, some of the mobile streams overlap the desktop streams\n dups = set()\n for q, s in streams:\n if hasattr(s, \"args\") and \"url\" in s.args:\n if s.args[\"url\"] not in dups:\n yield q, s\n dups.add(s.args[\"url\"])\n else:\n yield q, s\n\n else:\n self.logger.error(\"Couldn't find the stream data for this stream: {0}\".format(show_name))\n\n\n__plugin__ = AdultSwim\n", "path": "src/streamlink/plugins/adultswim.py"}]}
| 2,580 | 247 |
gh_patches_debug_1205
|
rasdani/github-patches
|
git_diff
|
mkdocs__mkdocs-2893
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Support latest realise of Markdown library
I believe there has been some update to the `Markdown` library and how it internally records its version that is breaking things.
With a brand new environment and a fresh install of `mkdocs`, a `mkdocs build --strict --verbose` fails my project with this error:
```bash
DEBUG - Loading configuration file: /Users/sh/Projects/dataportalapiclient/mkdocs.yml
ERROR - Config value: 'markdown_extensions'. Error: module 'markdown' has no attribute 'version_info'
```
At this point, mkdocs has a dependency on `Markdown==3.4.1`, which was released [three days ago](https://github.com/Python-Markdown/markdown/tags).
After running a `pip install Markdown==3.3.7` to downgrade the version, rerunning the build is successful:
```bash
DEBUG - Loading configuration file: /Users/sh/Projects/dataportalapiclient/mkdocs.yml
...
DEBUG - mkdocstrings: Tearing handlers down
INFO - Documentation built in 3.45 seconds
```
I notice in [this commit from May 27th on the Markdown repository](https://github.com/Python-Markdown/markdown/commit/a767b2daaad78ba32d45a4f1dabb7c5e218f030a), the deprecated `version_info` info object was removed, and replaced with the `__version_info__` object, as per this table:
| Deprecated Object | Replacement Object |
|----------------------------------------|-------------------------------------|
| `markdown.version` | `markdown.__version__` |
| `markdown.version_info` | `markdown.__version_info__` |
| `markdown.util.etree` | `xml.etree.ElementTree` |
| `markdown.util.string_type` | `str` |
| `markdown.util.text_type` | `str` |
| `markdown.util.int2str` | `chr` |
| `markdown.util.iterrange` | `range` |
| `markdown.util.isBlockLevel` | `markdown.Markdown.is_block_level` |
| `markdown.util.Processor().markdown` | `markdown.util.Processor().md` |
| `markdown.util.Registry().__setitem__` | `markdown.util.Registry().register` |
| `markdown.util.Registry().__delitem__` |`markdown.util.Registry().deregister`|
| `markdown.util.Registry().add` | `markdown.util.Registry().register` |
Hopefully the fix is a simple change to this dunder object! Whether this repository is the right place for the packaged markdown extension or not, I'm unsure, I couldn't quite see where that config gets run either here or in the [Python Markdown library](https://github.com/Python-Markdown/markdown/).
If this isn't the place, I'd appreciate if you can please point me towards the right repo.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2
3 from setuptools import setup
4 import re
5 import os
6 import sys
7
8 from mkdocs.commands.setup import babel_cmdclass
9
10 with open('README.md') as f:
11 long_description = f.read()
12
13
14 def get_version(package):
15 """Return package version as listed in `__version__` in `init.py`."""
16 init_py = open(os.path.join(package, '__init__.py')).read()
17 return re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1)
18
19
20 def get_packages(package):
21 """Return root package and all sub-packages."""
22 return [dirpath
23 for dirpath, dirnames, filenames in os.walk(package)
24 if os.path.exists(os.path.join(dirpath, '__init__.py'))]
25
26
27 if sys.argv[-1] == 'publish':
28 if os.system("pip freeze | grep wheel"):
29 print("wheel not installed.\nUse `pip install wheel`.\nExiting.")
30 sys.exit()
31 if os.system("pip freeze | grep twine"):
32 print("twine not installed.\nUse `pip install twine`.\nExiting.")
33 sys.exit()
34 if os.system("pip freeze | grep Babel"):
35 print("babel not installed.\nUse `pip install babel`.\nExiting.")
36 sys.exit()
37 for locale in os.listdir("mkdocs/themes/mkdocs/locales"):
38 os.system(f"python setup.py compile_catalog -t mkdocs -l {locale}")
39 os.system(f"python setup.py compile_catalog -t readthedocs -l {locale}")
40 os.system("python setup.py sdist bdist_wheel")
41 os.system("twine upload dist/*")
42 print("You probably want to also tag the version now:")
43 version = get_version("mkdocs")
44 print(f" git tag -a {version} -m 'version {version}'")
45 print(" git push --tags")
46 sys.exit()
47
48
49 setup(
50 name="mkdocs",
51 version=get_version("mkdocs"),
52 url='https://www.mkdocs.org',
53 project_urls={
54 'Source': 'https://github.com/mkdocs/mkdocs',
55 },
56 license='BSD',
57 description='Project documentation with Markdown.',
58 long_description=long_description,
59 long_description_content_type='text/markdown',
60 author='Tom Christie',
61 author_email='[email protected]', # SEE NOTE BELOW (*)
62 packages=get_packages("mkdocs"),
63 include_package_data=True,
64 install_requires=[
65 'click>=3.3',
66 'Jinja2>=2.10.2',
67 'Markdown>=3.2.1',
68 'PyYAML>=3.10',
69 'watchdog>=2.0',
70 'ghp-import>=1.0',
71 'pyyaml_env_tag>=0.1',
72 'importlib_metadata>=4.3',
73 'packaging>=20.5',
74 'mergedeep>=1.3.4'
75 ],
76 extras_require={"i18n": ['babel>=2.9.0']},
77 python_requires='>=3.6',
78 entry_points={
79 'console_scripts': [
80 'mkdocs = mkdocs.__main__:cli',
81 ],
82 'mkdocs.themes': [
83 'mkdocs = mkdocs.themes.mkdocs',
84 'readthedocs = mkdocs.themes.readthedocs',
85 ],
86 'mkdocs.plugins': [
87 'search = mkdocs.contrib.search:SearchPlugin',
88 ],
89 },
90 classifiers=[
91 'Development Status :: 5 - Production/Stable',
92 'Environment :: Console',
93 'Environment :: Web Environment',
94 'Intended Audience :: Developers',
95 'License :: OSI Approved :: BSD License',
96 'Operating System :: OS Independent',
97 'Programming Language :: Python',
98 'Programming Language :: Python :: 3',
99 'Programming Language :: Python :: 3.6',
100 'Programming Language :: Python :: 3.7',
101 'Programming Language :: Python :: 3.8',
102 'Programming Language :: Python :: 3.9',
103 'Programming Language :: Python :: 3.10',
104 'Programming Language :: Python :: 3 :: Only',
105 "Programming Language :: Python :: Implementation :: CPython",
106 "Programming Language :: Python :: Implementation :: PyPy",
107 'Topic :: Documentation',
108 'Topic :: Text Processing',
109 ],
110 zip_safe=False,
111 cmdclass=babel_cmdclass,
112 )
113
114 # (*) Please direct queries to the discussion group:
115 # https://groups.google.com/forum/#!forum/mkdocs
116
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -64,7 +64,7 @@
install_requires=[
'click>=3.3',
'Jinja2>=2.10.2',
- 'Markdown>=3.2.1',
+ 'Markdown>=3.2.1,<3.4',
'PyYAML>=3.10',
'watchdog>=2.0',
'ghp-import>=1.0',
|
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -64,7 +64,7 @@\n install_requires=[\n 'click>=3.3',\n 'Jinja2>=2.10.2',\n- 'Markdown>=3.2.1',\n+ 'Markdown>=3.2.1,<3.4',\n 'PyYAML>=3.10',\n 'watchdog>=2.0',\n 'ghp-import>=1.0',\n", "issue": "Support latest realise of Markdown library\nI believe there has been some update to the `Markdown` library and how it internally records its version that is breaking things.\r\n\r\nWith a brand new environment and a fresh install of `mkdocs`, a `mkdocs build --strict --verbose` fails my project with this error:\r\n\r\n```bash\r\nDEBUG - Loading configuration file: /Users/sh/Projects/dataportalapiclient/mkdocs.yml\r\nERROR - Config value: 'markdown_extensions'. Error: module 'markdown' has no attribute 'version_info'\r\n```\r\nAt this point, mkdocs has a dependency on `Markdown==3.4.1`, which was released [three days ago](https://github.com/Python-Markdown/markdown/tags). \r\n\r\nAfter running a `pip install Markdown==3.3.7` to downgrade the version, rerunning the build is successful:\r\n\r\n```bash\r\nDEBUG - Loading configuration file: /Users/sh/Projects/dataportalapiclient/mkdocs.yml\r\n...\r\nDEBUG - mkdocstrings: Tearing handlers down\r\nINFO - Documentation built in 3.45 seconds\r\n```\r\n\r\nI notice in [this commit from May 27th on the Markdown repository](https://github.com/Python-Markdown/markdown/commit/a767b2daaad78ba32d45a4f1dabb7c5e218f030a), the deprecated `version_info` info object was removed, and replaced with the `__version_info__` object, as per this table:\r\n\r\n| Deprecated Object | Replacement Object |\r\n|----------------------------------------|-------------------------------------|\r\n| `markdown.version` | `markdown.__version__` |\r\n| `markdown.version_info` | `markdown.__version_info__` |\r\n| `markdown.util.etree` | `xml.etree.ElementTree` |\r\n| `markdown.util.string_type` | `str` |\r\n| `markdown.util.text_type` | `str` |\r\n| `markdown.util.int2str` | `chr` |\r\n| `markdown.util.iterrange` | `range` |\r\n| `markdown.util.isBlockLevel` | `markdown.Markdown.is_block_level` |\r\n| `markdown.util.Processor().markdown` | `markdown.util.Processor().md` |\r\n| `markdown.util.Registry().__setitem__` | `markdown.util.Registry().register` |\r\n| `markdown.util.Registry().__delitem__` |`markdown.util.Registry().deregister`|\r\n| `markdown.util.Registry().add` | `markdown.util.Registry().register` |\r\n\r\nHopefully the fix is a simple change to this dunder object! Whether this repository is the right place for the packaged markdown extension or not, I'm unsure, I couldn't quite see where that config gets run either here or in the [Python Markdown library](https://github.com/Python-Markdown/markdown/).\r\n\r\n If this isn't the place, I'd appreciate if you can please point me towards the right repo.\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\nfrom setuptools import setup\nimport re\nimport os\nimport sys\n\nfrom mkdocs.commands.setup import babel_cmdclass\n\nwith open('README.md') as f:\n long_description = f.read()\n\n\ndef get_version(package):\n \"\"\"Return package version as listed in `__version__` in `init.py`.\"\"\"\n init_py = open(os.path.join(package, '__init__.py')).read()\n return re.search(\"__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", init_py).group(1)\n\n\ndef get_packages(package):\n \"\"\"Return root package and all sub-packages.\"\"\"\n return [dirpath\n for dirpath, dirnames, filenames in os.walk(package)\n if os.path.exists(os.path.join(dirpath, '__init__.py'))]\n\n\nif sys.argv[-1] == 'publish':\n if os.system(\"pip freeze | grep wheel\"):\n print(\"wheel not installed.\\nUse `pip install wheel`.\\nExiting.\")\n sys.exit()\n if os.system(\"pip freeze | grep twine\"):\n print(\"twine not installed.\\nUse `pip install twine`.\\nExiting.\")\n sys.exit()\n if os.system(\"pip freeze | grep Babel\"):\n print(\"babel not installed.\\nUse `pip install babel`.\\nExiting.\")\n sys.exit()\n for locale in os.listdir(\"mkdocs/themes/mkdocs/locales\"):\n os.system(f\"python setup.py compile_catalog -t mkdocs -l {locale}\")\n os.system(f\"python setup.py compile_catalog -t readthedocs -l {locale}\")\n os.system(\"python setup.py sdist bdist_wheel\")\n os.system(\"twine upload dist/*\")\n print(\"You probably want to also tag the version now:\")\n version = get_version(\"mkdocs\")\n print(f\" git tag -a {version} -m 'version {version}'\")\n print(\" git push --tags\")\n sys.exit()\n\n\nsetup(\n name=\"mkdocs\",\n version=get_version(\"mkdocs\"),\n url='https://www.mkdocs.org',\n project_urls={\n 'Source': 'https://github.com/mkdocs/mkdocs',\n },\n license='BSD',\n description='Project documentation with Markdown.',\n long_description=long_description,\n long_description_content_type='text/markdown',\n author='Tom Christie',\n author_email='[email protected]', # SEE NOTE BELOW (*)\n packages=get_packages(\"mkdocs\"),\n include_package_data=True,\n install_requires=[\n 'click>=3.3',\n 'Jinja2>=2.10.2',\n 'Markdown>=3.2.1',\n 'PyYAML>=3.10',\n 'watchdog>=2.0',\n 'ghp-import>=1.0',\n 'pyyaml_env_tag>=0.1',\n 'importlib_metadata>=4.3',\n 'packaging>=20.5',\n 'mergedeep>=1.3.4'\n ],\n extras_require={\"i18n\": ['babel>=2.9.0']},\n python_requires='>=3.6',\n entry_points={\n 'console_scripts': [\n 'mkdocs = mkdocs.__main__:cli',\n ],\n 'mkdocs.themes': [\n 'mkdocs = mkdocs.themes.mkdocs',\n 'readthedocs = mkdocs.themes.readthedocs',\n ],\n 'mkdocs.plugins': [\n 'search = mkdocs.contrib.search:SearchPlugin',\n ],\n },\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n 'Programming Language :: Python :: 3.10',\n 'Programming Language :: Python :: 3 :: Only',\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n 'Topic :: Documentation',\n 'Topic :: Text Processing',\n ],\n zip_safe=False,\n cmdclass=babel_cmdclass,\n)\n\n# (*) Please direct queries to the discussion group:\n# https://groups.google.com/forum/#!forum/mkdocs\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\nfrom setuptools import setup\nimport re\nimport os\nimport sys\n\nfrom mkdocs.commands.setup import babel_cmdclass\n\nwith open('README.md') as f:\n long_description = f.read()\n\n\ndef get_version(package):\n \"\"\"Return package version as listed in `__version__` in `init.py`.\"\"\"\n init_py = open(os.path.join(package, '__init__.py')).read()\n return re.search(\"__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", init_py).group(1)\n\n\ndef get_packages(package):\n \"\"\"Return root package and all sub-packages.\"\"\"\n return [dirpath\n for dirpath, dirnames, filenames in os.walk(package)\n if os.path.exists(os.path.join(dirpath, '__init__.py'))]\n\n\nif sys.argv[-1] == 'publish':\n if os.system(\"pip freeze | grep wheel\"):\n print(\"wheel not installed.\\nUse `pip install wheel`.\\nExiting.\")\n sys.exit()\n if os.system(\"pip freeze | grep twine\"):\n print(\"twine not installed.\\nUse `pip install twine`.\\nExiting.\")\n sys.exit()\n if os.system(\"pip freeze | grep Babel\"):\n print(\"babel not installed.\\nUse `pip install babel`.\\nExiting.\")\n sys.exit()\n for locale in os.listdir(\"mkdocs/themes/mkdocs/locales\"):\n os.system(f\"python setup.py compile_catalog -t mkdocs -l {locale}\")\n os.system(f\"python setup.py compile_catalog -t readthedocs -l {locale}\")\n os.system(\"python setup.py sdist bdist_wheel\")\n os.system(\"twine upload dist/*\")\n print(\"You probably want to also tag the version now:\")\n version = get_version(\"mkdocs\")\n print(f\" git tag -a {version} -m 'version {version}'\")\n print(\" git push --tags\")\n sys.exit()\n\n\nsetup(\n name=\"mkdocs\",\n version=get_version(\"mkdocs\"),\n url='https://www.mkdocs.org',\n project_urls={\n 'Source': 'https://github.com/mkdocs/mkdocs',\n },\n license='BSD',\n description='Project documentation with Markdown.',\n long_description=long_description,\n long_description_content_type='text/markdown',\n author='Tom Christie',\n author_email='[email protected]', # SEE NOTE BELOW (*)\n packages=get_packages(\"mkdocs\"),\n include_package_data=True,\n install_requires=[\n 'click>=3.3',\n 'Jinja2>=2.10.2',\n 'Markdown>=3.2.1,<3.4',\n 'PyYAML>=3.10',\n 'watchdog>=2.0',\n 'ghp-import>=1.0',\n 'pyyaml_env_tag>=0.1',\n 'importlib_metadata>=4.3',\n 'packaging>=20.5',\n 'mergedeep>=1.3.4'\n ],\n extras_require={\"i18n\": ['babel>=2.9.0']},\n python_requires='>=3.6',\n entry_points={\n 'console_scripts': [\n 'mkdocs = mkdocs.__main__:cli',\n ],\n 'mkdocs.themes': [\n 'mkdocs = mkdocs.themes.mkdocs',\n 'readthedocs = mkdocs.themes.readthedocs',\n ],\n 'mkdocs.plugins': [\n 'search = mkdocs.contrib.search:SearchPlugin',\n ],\n },\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n 'Programming Language :: Python :: 3.10',\n 'Programming Language :: Python :: 3 :: Only',\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n 'Topic :: Documentation',\n 'Topic :: Text Processing',\n ],\n zip_safe=False,\n cmdclass=babel_cmdclass,\n)\n\n# (*) Please direct queries to the discussion group:\n# https://groups.google.com/forum/#!forum/mkdocs\n", "path": "setup.py"}]}
| 2,122 | 113 |
gh_patches_debug_4837
|
rasdani/github-patches
|
git_diff
|
kivy__python-for-android-2800
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Python 3.10 cffi build fails
<!--
The issue tracker is a tool to address bugs NOT a support platform.
Please use the Discord community or Stack Overflow for support questions,
more information at https://github.com/kivy/python-for-android#support
-->
### Checklist
- [ x] the issue is indeed a bug and not a support request
- [ x] issue doesn't already exist: https://github.com/kivy/python-for-android/issues
- [ x] I have a short, runnable example that reproduces the issue
- [x ] I reproduced the problem with the latest development version (`p4a.branch = develop`)
- [ x] I used the grave accent (aka backticks) to format code or logs when appropriated
### Versions
- Python: 2.10.6 (OS)
- OS: Ubuntu 22.04.2
- Kivy: 2.2.0
- Cython: 0.29.33
- OpenJDK:
### Description
p4a v2023.5.21 build of cffi fails
### buildozer.spec
Spec file:
```
requirements = python3,kivy, cffi
```
### Logs
```
/home/bobf/.buildozer/android/platform/android-ndk-r25b/toolchains/llvm/prebuilt/linux-x86_64/bin/clang -target aarch64-linux-android21 -fomit-frame-pointer -march=armv8-a -fPIC -Wno-unused-result -Wsign-compare -DNDEBUG -g -fwrapv -O3 -Wall -target aarch64-linux-android21 -fomit-frame-pointer -march=armv8-a -fPIC -I/home/bobf/ex/hello/.buildozer/android/platform/build-arm64-v8a/build/other_builds/libffi/arm64-v8a__ndk_target_21/libffi/include -I/home/bobf/ex/hello/.buildozer/android/platform/build-arm64-v8a/build/other_builds/python3/arm64-v8a__ndk_target_21/python3/Include -DANDROID -I/home/bobf/.buildozer/android/platform/android-ndk-r25b/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/include -I/home/bobf/ex/hello/.buildozer/android/platform/build-arm64-v8a/build/python-installs/apptest/arm64-v8a/include/python3.1 -fPIC -DUSE__THREAD -DHAVE_SYNC_SYNCHRONIZE -I/home/bobf/ex/hello/.buildozer/android/platform/build-arm64-v8a/build/other_builds/hostpython3/desktop/hostpython3/Include -I/home/bobf/ex/hello/.buildozer/android/platform/build-arm64-v8a/build/other_builds/hostpython3/desktop/hostpython3/native-build -c c/_cffi_backend.c -o build/temp.linux-x86_64-3.10/c/_cffi_backend.o
c/_cffi_backend.c:407:23: error: expression is not assignable
Py_REFCNT(ct) = 43;
~~~~~~~~~~~~~ ^
c/_cffi_backend.c:410:23: error: expression is not assignable
Py_REFCNT(ct) = 0;
~~~~~~~~~~~~~ ^
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pythonforandroid/recipes/cffi/__init__.py`
Content:
```
1 import os
2 from pythonforandroid.recipe import CompiledComponentsPythonRecipe
3
4
5 class CffiRecipe(CompiledComponentsPythonRecipe):
6 """
7 Extra system dependencies: autoconf, automake and libtool.
8 """
9 name = 'cffi'
10 version = '1.13.2'
11 url = 'https://pypi.python.org/packages/source/c/cffi/cffi-{version}.tar.gz'
12
13 depends = ['setuptools', 'pycparser', 'libffi']
14
15 patches = ['disable-pkg-config.patch']
16
17 # call_hostpython_via_targetpython = False
18 install_in_hostpython = True
19
20 def get_hostrecipe_env(self, arch=None):
21 # fixes missing ffi.h on some host systems (e.g. gentoo)
22 env = super().get_hostrecipe_env(arch)
23 libffi = self.get_recipe('libffi', self.ctx)
24 includes = libffi.get_include_dirs(arch)
25 env['FFI_INC'] = ",".join(includes)
26 return env
27
28 def get_recipe_env(self, arch=None):
29 env = super().get_recipe_env(arch)
30 libffi = self.get_recipe('libffi', self.ctx)
31 includes = libffi.get_include_dirs(arch)
32 env['CFLAGS'] = ' -I'.join([env.get('CFLAGS', '')] + includes)
33 env['CFLAGS'] += ' -I{}'.format(self.ctx.python_recipe.include_root(arch.arch))
34 env['LDFLAGS'] = (env.get('CFLAGS', '') + ' -L' +
35 self.ctx.get_libs_dir(arch.arch))
36 env['LDFLAGS'] += ' -L{}'.format(os.path.join(self.ctx.bootstrap.build_dir, 'libs', arch.arch))
37 # required for libc and libdl
38 env['LDFLAGS'] += ' -L{}'.format(arch.ndk_lib_dir_versioned)
39 env['PYTHONPATH'] = ':'.join([
40 self.ctx.get_site_packages_dir(arch),
41 env['BUILDLIB_PATH'],
42 ])
43 env['LDFLAGS'] += ' -L{}'.format(self.ctx.python_recipe.link_root(arch.arch))
44 env['LDFLAGS'] += ' -lpython{}'.format(self.ctx.python_recipe.link_version)
45 return env
46
47
48 recipe = CffiRecipe()
49
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pythonforandroid/recipes/cffi/__init__.py b/pythonforandroid/recipes/cffi/__init__.py
--- a/pythonforandroid/recipes/cffi/__init__.py
+++ b/pythonforandroid/recipes/cffi/__init__.py
@@ -7,7 +7,7 @@
Extra system dependencies: autoconf, automake and libtool.
"""
name = 'cffi'
- version = '1.13.2'
+ version = '1.15.1'
url = 'https://pypi.python.org/packages/source/c/cffi/cffi-{version}.tar.gz'
depends = ['setuptools', 'pycparser', 'libffi']
|
{"golden_diff": "diff --git a/pythonforandroid/recipes/cffi/__init__.py b/pythonforandroid/recipes/cffi/__init__.py\n--- a/pythonforandroid/recipes/cffi/__init__.py\n+++ b/pythonforandroid/recipes/cffi/__init__.py\n@@ -7,7 +7,7 @@\n Extra system dependencies: autoconf, automake and libtool.\n \"\"\"\n name = 'cffi'\n- version = '1.13.2'\n+ version = '1.15.1'\n url = 'https://pypi.python.org/packages/source/c/cffi/cffi-{version}.tar.gz'\n \n depends = ['setuptools', 'pycparser', 'libffi']\n", "issue": "Python 3.10 cffi build fails\n<!--\r\nThe issue tracker is a tool to address bugs NOT a support platform.\r\nPlease use the Discord community or Stack Overflow for support questions,\r\nmore information at https://github.com/kivy/python-for-android#support\r\n-->\r\n\r\n### Checklist\r\n\r\n- [ x] the issue is indeed a bug and not a support request\r\n- [ x] issue doesn't already exist: https://github.com/kivy/python-for-android/issues\r\n- [ x] I have a short, runnable example that reproduces the issue\r\n- [x ] I reproduced the problem with the latest development version (`p4a.branch = develop`)\r\n- [ x] I used the grave accent (aka backticks) to format code or logs when appropriated\r\n\r\n### Versions\r\n\r\n- Python: 2.10.6 (OS)\r\n- OS: Ubuntu 22.04.2\r\n- Kivy: 2.2.0\r\n- Cython: 0.29.33\r\n- OpenJDK:\r\n\r\n### Description\r\n\r\np4a v2023.5.21 build of cffi fails\r\n\r\n### buildozer.spec\r\n\r\n\r\nSpec file:\r\n```\r\nrequirements = python3,kivy, cffi\r\n```\r\n\r\n### Logs\r\n\r\n```\r\n/home/bobf/.buildozer/android/platform/android-ndk-r25b/toolchains/llvm/prebuilt/linux-x86_64/bin/clang -target aarch64-linux-android21 -fomit-frame-pointer -march=armv8-a -fPIC -Wno-unused-result -Wsign-compare -DNDEBUG -g -fwrapv -O3 -Wall -target aarch64-linux-android21 -fomit-frame-pointer -march=armv8-a -fPIC -I/home/bobf/ex/hello/.buildozer/android/platform/build-arm64-v8a/build/other_builds/libffi/arm64-v8a__ndk_target_21/libffi/include -I/home/bobf/ex/hello/.buildozer/android/platform/build-arm64-v8a/build/other_builds/python3/arm64-v8a__ndk_target_21/python3/Include -DANDROID -I/home/bobf/.buildozer/android/platform/android-ndk-r25b/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/include -I/home/bobf/ex/hello/.buildozer/android/platform/build-arm64-v8a/build/python-installs/apptest/arm64-v8a/include/python3.1 -fPIC -DUSE__THREAD -DHAVE_SYNC_SYNCHRONIZE -I/home/bobf/ex/hello/.buildozer/android/platform/build-arm64-v8a/build/other_builds/hostpython3/desktop/hostpython3/Include -I/home/bobf/ex/hello/.buildozer/android/platform/build-arm64-v8a/build/other_builds/hostpython3/desktop/hostpython3/native-build -c c/_cffi_backend.c -o build/temp.linux-x86_64-3.10/c/_cffi_backend.o\r\nc/_cffi_backend.c:407:23: error: expression is not assignable\r\n Py_REFCNT(ct) = 43;\r\n ~~~~~~~~~~~~~ ^\r\nc/_cffi_backend.c:410:23: error: expression is not assignable\r\n Py_REFCNT(ct) = 0;\r\n ~~~~~~~~~~~~~ ^\r\n\r\n```\r\n\n", "before_files": [{"content": "import os\nfrom pythonforandroid.recipe import CompiledComponentsPythonRecipe\n\n\nclass CffiRecipe(CompiledComponentsPythonRecipe):\n \"\"\"\n Extra system dependencies: autoconf, automake and libtool.\n \"\"\"\n name = 'cffi'\n version = '1.13.2'\n url = 'https://pypi.python.org/packages/source/c/cffi/cffi-{version}.tar.gz'\n\n depends = ['setuptools', 'pycparser', 'libffi']\n\n patches = ['disable-pkg-config.patch']\n\n # call_hostpython_via_targetpython = False\n install_in_hostpython = True\n\n def get_hostrecipe_env(self, arch=None):\n # fixes missing ffi.h on some host systems (e.g. gentoo)\n env = super().get_hostrecipe_env(arch)\n libffi = self.get_recipe('libffi', self.ctx)\n includes = libffi.get_include_dirs(arch)\n env['FFI_INC'] = \",\".join(includes)\n return env\n\n def get_recipe_env(self, arch=None):\n env = super().get_recipe_env(arch)\n libffi = self.get_recipe('libffi', self.ctx)\n includes = libffi.get_include_dirs(arch)\n env['CFLAGS'] = ' -I'.join([env.get('CFLAGS', '')] + includes)\n env['CFLAGS'] += ' -I{}'.format(self.ctx.python_recipe.include_root(arch.arch))\n env['LDFLAGS'] = (env.get('CFLAGS', '') + ' -L' +\n self.ctx.get_libs_dir(arch.arch))\n env['LDFLAGS'] += ' -L{}'.format(os.path.join(self.ctx.bootstrap.build_dir, 'libs', arch.arch))\n # required for libc and libdl\n env['LDFLAGS'] += ' -L{}'.format(arch.ndk_lib_dir_versioned)\n env['PYTHONPATH'] = ':'.join([\n self.ctx.get_site_packages_dir(arch),\n env['BUILDLIB_PATH'],\n ])\n env['LDFLAGS'] += ' -L{}'.format(self.ctx.python_recipe.link_root(arch.arch))\n env['LDFLAGS'] += ' -lpython{}'.format(self.ctx.python_recipe.link_version)\n return env\n\n\nrecipe = CffiRecipe()\n", "path": "pythonforandroid/recipes/cffi/__init__.py"}], "after_files": [{"content": "import os\nfrom pythonforandroid.recipe import CompiledComponentsPythonRecipe\n\n\nclass CffiRecipe(CompiledComponentsPythonRecipe):\n \"\"\"\n Extra system dependencies: autoconf, automake and libtool.\n \"\"\"\n name = 'cffi'\n version = '1.15.1'\n url = 'https://pypi.python.org/packages/source/c/cffi/cffi-{version}.tar.gz'\n\n depends = ['setuptools', 'pycparser', 'libffi']\n\n patches = ['disable-pkg-config.patch']\n\n # call_hostpython_via_targetpython = False\n install_in_hostpython = True\n\n def get_hostrecipe_env(self, arch=None):\n # fixes missing ffi.h on some host systems (e.g. gentoo)\n env = super().get_hostrecipe_env(arch)\n libffi = self.get_recipe('libffi', self.ctx)\n includes = libffi.get_include_dirs(arch)\n env['FFI_INC'] = \",\".join(includes)\n return env\n\n def get_recipe_env(self, arch=None):\n env = super().get_recipe_env(arch)\n libffi = self.get_recipe('libffi', self.ctx)\n includes = libffi.get_include_dirs(arch)\n env['CFLAGS'] = ' -I'.join([env.get('CFLAGS', '')] + includes)\n env['CFLAGS'] += ' -I{}'.format(self.ctx.python_recipe.include_root(arch.arch))\n env['LDFLAGS'] = (env.get('CFLAGS', '') + ' -L' +\n self.ctx.get_libs_dir(arch.arch))\n env['LDFLAGS'] += ' -L{}'.format(os.path.join(self.ctx.bootstrap.build_dir, 'libs', arch.arch))\n # required for libc and libdl\n env['LDFLAGS'] += ' -L{}'.format(arch.ndk_lib_dir_versioned)\n env['PYTHONPATH'] = ':'.join([\n self.ctx.get_site_packages_dir(arch),\n env['BUILDLIB_PATH'],\n ])\n env['LDFLAGS'] += ' -L{}'.format(self.ctx.python_recipe.link_root(arch.arch))\n env['LDFLAGS'] += ' -lpython{}'.format(self.ctx.python_recipe.link_version)\n return env\n\n\nrecipe = CffiRecipe()\n", "path": "pythonforandroid/recipes/cffi/__init__.py"}]}
| 1,595 | 153 |
gh_patches_debug_34459
|
rasdani/github-patches
|
git_diff
|
elastic__apm-agent-python-1833
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[META 773] Service environment correlation
See meta issue for the description and details:
- Meta issue: https://github.com/elastic/apm/issues/773
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `elasticapm/handlers/structlog.py`
Content:
```
1 # Copyright (c) 2019, Elasticsearch BV
2 # All rights reserved.
3 #
4 # Redistribution and use in source and binary forms, with or without
5 # modification, are permitted provided that the following conditions are met:
6 #
7 # * Redistributions of source code must retain the above copyright notice, this
8 # list of conditions and the following disclaimer.
9 #
10 # * Redistributions in binary form must reproduce the above copyright notice,
11 # this list of conditions and the following disclaimer in the documentation
12 # and/or other materials provided with the distribution.
13 #
14 # * Neither the name of the copyright holder nor the names of its
15 # contributors may be used to endorse or promote products derived from
16 # this software without specific prior written permission.
17 #
18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
22 # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23 # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
24 # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
25 # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
26 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27
28
29 from __future__ import absolute_import
30
31 from elasticapm import get_client
32 from elasticapm.traces import execution_context
33
34
35 def structlog_processor(logger, method_name, event_dict):
36 """
37 Add three new entries to the event_dict for any processed events:
38
39 * transaction.id
40 * trace.id
41 * span.id
42
43 Only adds non-None IDs.
44
45 :param logger:
46 Unused (logger instance in structlog)
47 :param method_name:
48 Unused (wrapped method_name)
49 :param event_dict:
50 Event dictionary for the event we're processing
51 :return:
52 `event_dict`, with three new entries.
53 """
54 transaction = execution_context.get_transaction()
55 if transaction:
56 event_dict["transaction.id"] = transaction.id
57 client = get_client()
58 if client:
59 event_dict["service.name"] = client.config.service_name
60 if transaction and transaction.trace_parent:
61 event_dict["trace.id"] = transaction.trace_parent.trace_id
62 span = execution_context.get_span()
63 if span and span.id:
64 event_dict["span.id"] = span.id
65 return event_dict
66
```
Path: `elasticapm/handlers/logging.py`
Content:
```
1 # BSD 3-Clause License
2 #
3 # Copyright (c) 2012, the Sentry Team, see AUTHORS for more details
4 # Copyright (c) 2019, Elasticsearch BV
5 # All rights reserved.
6 #
7 # Redistribution and use in source and binary forms, with or without
8 # modification, are permitted provided that the following conditions are met:
9 #
10 # * Redistributions of source code must retain the above copyright notice, this
11 # list of conditions and the following disclaimer.
12 #
13 # * Redistributions in binary form must reproduce the above copyright notice,
14 # this list of conditions and the following disclaimer in the documentation
15 # and/or other materials provided with the distribution.
16 #
17 # * Neither the name of the copyright holder nor the names of its
18 # contributors may be used to endorse or promote products derived from
19 # this software without specific prior written permission.
20 #
21 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22 # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23 # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
24 # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
25 # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
26 # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
28 # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
29 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30
31
32 from __future__ import absolute_import
33
34 import logging
35 import sys
36 import traceback
37 import warnings
38
39 import wrapt
40
41 from elasticapm import get_client
42 from elasticapm.base import Client
43 from elasticapm.traces import execution_context
44 from elasticapm.utils.stacks import iter_stack_frames
45
46
47 class LoggingHandler(logging.Handler):
48 def __init__(self, *args, **kwargs):
49 self.client = None
50 if "client" in kwargs:
51 self.client = kwargs.pop("client")
52 elif len(args) > 0:
53 arg = args[0]
54 if isinstance(arg, Client):
55 self.client = arg
56
57 if not self.client:
58 client_cls = kwargs.pop("client_cls", None)
59 if client_cls:
60 self.client = client_cls(*args, **kwargs)
61 else:
62 # In 6.0, this should raise a ValueError
63 warnings.warn(
64 "LoggingHandler requires a Client instance. No Client was "
65 "received. This will result in an error starting in v6.0 "
66 "of the agent",
67 PendingDeprecationWarning,
68 )
69 self.client = Client(*args, **kwargs)
70 logging.Handler.__init__(self, level=kwargs.get("level", logging.NOTSET))
71
72 def emit(self, record):
73 self.format(record)
74
75 # Avoid typical config issues by overriding loggers behavior
76 if record.name.startswith(("elasticapm.errors",)):
77 sys.stderr.write(record.getMessage() + "\n")
78 return
79
80 try:
81 return self._emit(record)
82 except Exception:
83 sys.stderr.write("Top level ElasticAPM exception caught - failed creating log record.\n")
84 sys.stderr.write(record.getMessage() + "\n")
85 sys.stderr.write(traceback.format_exc() + "\n")
86
87 try:
88 self.client.capture("Exception")
89 except Exception:
90 pass
91
92 def _emit(self, record, **kwargs):
93 data = {}
94
95 for k, v in record.__dict__.items():
96 if "." not in k and k not in ("culprit",):
97 continue
98 data[k] = v
99
100 stack = getattr(record, "stack", None)
101 if stack is True:
102 stack = iter_stack_frames(config=self.client.config)
103
104 if stack:
105 frames = []
106 started = False
107 last_mod = ""
108 for item in stack:
109 if isinstance(item, (list, tuple)):
110 frame, lineno = item
111 else:
112 frame, lineno = item, item.f_lineno
113
114 if not started:
115 f_globals = getattr(frame, "f_globals", {})
116 module_name = f_globals.get("__name__", "")
117 if last_mod.startswith("logging") and not module_name.startswith("logging"):
118 started = True
119 else:
120 last_mod = module_name
121 continue
122 frames.append((frame, lineno))
123 stack = frames
124
125 custom = getattr(record, "data", {})
126 # Add in all of the data from the record that we aren't already capturing
127 for k in record.__dict__.keys():
128 if k in (
129 "stack",
130 "name",
131 "args",
132 "msg",
133 "levelno",
134 "exc_text",
135 "exc_info",
136 "data",
137 "created",
138 "levelname",
139 "msecs",
140 "relativeCreated",
141 ):
142 continue
143 if k.startswith("_"):
144 continue
145 custom[k] = record.__dict__[k]
146
147 # If there's no exception being processed,
148 # exc_info may be a 3-tuple of None
149 # http://docs.python.org/library/sys.html#sys.exc_info
150 if record.exc_info and all(record.exc_info):
151 handler = self.client.get_handler("elasticapm.events.Exception")
152 exception = handler.capture(self.client, exc_info=record.exc_info)
153 else:
154 exception = None
155
156 return self.client.capture(
157 "Message",
158 param_message={"message": str(record.msg), "params": record.args},
159 stack=stack,
160 custom=custom,
161 exception=exception,
162 level=record.levelno,
163 logger_name=record.name,
164 **kwargs,
165 )
166
167
168 class LoggingFilter(logging.Filter):
169 """
170 This filter doesn't actually do any "filtering" -- rather, it just adds
171 three new attributes to any "filtered" LogRecord objects:
172
173 * elasticapm_transaction_id
174 * elasticapm_trace_id
175 * elasticapm_span_id
176 * elasticapm_service_name
177
178 These attributes can then be incorporated into your handlers and formatters,
179 so that you can tie log messages to transactions in elasticsearch.
180
181 This filter also adds these fields to a dictionary attribute,
182 `elasticapm_labels`, using the official tracing fields names as documented
183 here: https://www.elastic.co/guide/en/ecs/current/ecs-tracing.html
184
185 Note that if you're using Python 3.2+, by default we will add a
186 LogRecordFactory to your root logger which will add these attributes
187 automatically.
188 """
189
190 def filter(self, record):
191 """
192 Add elasticapm attributes to `record`.
193 """
194 _add_attributes_to_log_record(record)
195 return True
196
197
198 @wrapt.decorator
199 def log_record_factory(wrapped, instance, args, kwargs):
200 """
201 Decorator, designed to wrap the python log record factory (fetched by
202 logging.getLogRecordFactory), adding the same custom attributes as in
203 the LoggingFilter provided above.
204
205 :return:
206 LogRecord object, with custom attributes for APM tracing fields
207 """
208 record = wrapped(*args, **kwargs)
209 return _add_attributes_to_log_record(record)
210
211
212 def _add_attributes_to_log_record(record):
213 """
214 Add custom attributes for APM tracing fields to a LogRecord object
215
216 :param record: LogRecord object
217 :return: Updated LogRecord object with new APM tracing fields
218 """
219 transaction = execution_context.get_transaction()
220
221 transaction_id = transaction.id if transaction else None
222 record.elasticapm_transaction_id = transaction_id
223
224 trace_id = transaction.trace_parent.trace_id if transaction and transaction.trace_parent else None
225 record.elasticapm_trace_id = trace_id
226
227 span = execution_context.get_span()
228 span_id = span.id if span else None
229 record.elasticapm_span_id = span_id
230
231 client = get_client()
232 service_name = client.config.service_name if client else None
233 record.elasticapm_service_name = service_name
234
235 record.elasticapm_labels = {
236 "transaction.id": transaction_id,
237 "trace.id": trace_id,
238 "span.id": span_id,
239 "service.name": service_name,
240 }
241
242 return record
243
244
245 class Formatter(logging.Formatter):
246 """
247 Custom formatter to automatically append the elasticapm format string,
248 as well as ensure that LogRecord objects actually have the required fields
249 (so as to avoid errors which could occur for logs before we override the
250 LogRecordFactory):
251
252 formatstring = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
253 formatstring = formatstring + " | elasticapm " \
254 "transaction.id=%(elasticapm_transaction_id)s " \
255 "trace.id=%(elasticapm_trace_id)s " \
256 "span.id=%(elasticapm_span_id)s"
257 """
258
259 def __init__(self, fmt=None, datefmt=None, style="%"):
260 if fmt is None:
261 fmt = "%(message)s"
262 fmt = (
263 fmt + " | elasticapm "
264 "transaction.id=%(elasticapm_transaction_id)s "
265 "trace.id=%(elasticapm_trace_id)s "
266 "span.id=%(elasticapm_span_id)s"
267 )
268 super(Formatter, self).__init__(fmt=fmt, datefmt=datefmt, style=style)
269
270 def format(self, record):
271 if not hasattr(record, "elasticapm_transaction_id"):
272 record.elasticapm_transaction_id = None
273 record.elasticapm_trace_id = None
274 record.elasticapm_span_id = None
275 record.elasticapm_service_name = None
276 return super(Formatter, self).format(record=record)
277
278 def formatTime(self, record, datefmt=None):
279 if not hasattr(record, "elasticapm_transaction_id"):
280 record.elasticapm_transaction_id = None
281 record.elasticapm_trace_id = None
282 record.elasticapm_span_id = None
283 record.elasticapm_service_name = None
284 return super(Formatter, self).formatTime(record=record, datefmt=datefmt)
285
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/elasticapm/handlers/logging.py b/elasticapm/handlers/logging.py
--- a/elasticapm/handlers/logging.py
+++ b/elasticapm/handlers/logging.py
@@ -232,11 +232,15 @@
service_name = client.config.service_name if client else None
record.elasticapm_service_name = service_name
+ service_environment = client.config.environment if client else None
+ record.elasticapm_service_environment = service_environment
+
record.elasticapm_labels = {
"transaction.id": transaction_id,
"trace.id": trace_id,
"span.id": span_id,
"service.name": service_name,
+ "service.environment": service_environment,
}
return record
@@ -273,6 +277,7 @@
record.elasticapm_trace_id = None
record.elasticapm_span_id = None
record.elasticapm_service_name = None
+ record.elasticapm_service_environment = None
return super(Formatter, self).format(record=record)
def formatTime(self, record, datefmt=None):
@@ -281,4 +286,5 @@
record.elasticapm_trace_id = None
record.elasticapm_span_id = None
record.elasticapm_service_name = None
+ record.elasticapm_service_environment = None
return super(Formatter, self).formatTime(record=record, datefmt=datefmt)
diff --git a/elasticapm/handlers/structlog.py b/elasticapm/handlers/structlog.py
--- a/elasticapm/handlers/structlog.py
+++ b/elasticapm/handlers/structlog.py
@@ -57,6 +57,7 @@
client = get_client()
if client:
event_dict["service.name"] = client.config.service_name
+ event_dict["service.environment"] = client.config.environment
if transaction and transaction.trace_parent:
event_dict["trace.id"] = transaction.trace_parent.trace_id
span = execution_context.get_span()
|
{"golden_diff": "diff --git a/elasticapm/handlers/logging.py b/elasticapm/handlers/logging.py\n--- a/elasticapm/handlers/logging.py\n+++ b/elasticapm/handlers/logging.py\n@@ -232,11 +232,15 @@\n service_name = client.config.service_name if client else None\n record.elasticapm_service_name = service_name\n \n+ service_environment = client.config.environment if client else None\n+ record.elasticapm_service_environment = service_environment\n+\n record.elasticapm_labels = {\n \"transaction.id\": transaction_id,\n \"trace.id\": trace_id,\n \"span.id\": span_id,\n \"service.name\": service_name,\n+ \"service.environment\": service_environment,\n }\n \n return record\n@@ -273,6 +277,7 @@\n record.elasticapm_trace_id = None\n record.elasticapm_span_id = None\n record.elasticapm_service_name = None\n+ record.elasticapm_service_environment = None\n return super(Formatter, self).format(record=record)\n \n def formatTime(self, record, datefmt=None):\n@@ -281,4 +286,5 @@\n record.elasticapm_trace_id = None\n record.elasticapm_span_id = None\n record.elasticapm_service_name = None\n+ record.elasticapm_service_environment = None\n return super(Formatter, self).formatTime(record=record, datefmt=datefmt)\ndiff --git a/elasticapm/handlers/structlog.py b/elasticapm/handlers/structlog.py\n--- a/elasticapm/handlers/structlog.py\n+++ b/elasticapm/handlers/structlog.py\n@@ -57,6 +57,7 @@\n client = get_client()\n if client:\n event_dict[\"service.name\"] = client.config.service_name\n+ event_dict[\"service.environment\"] = client.config.environment\n if transaction and transaction.trace_parent:\n event_dict[\"trace.id\"] = transaction.trace_parent.trace_id\n span = execution_context.get_span()\n", "issue": "[META 773] Service environment correlation\nSee meta issue for the description and details:\r\n- Meta issue: https://github.com/elastic/apm/issues/773\r\n\n", "before_files": [{"content": "# Copyright (c) 2019, Elasticsearch BV\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n#\n# * Redistributions of source code must retain the above copyright notice, this\n# list of conditions and the following disclaimer.\n#\n# * Redistributions in binary form must reproduce the above copyright notice,\n# this list of conditions and the following disclaimer in the documentation\n# and/or other materials provided with the distribution.\n#\n# * Neither the name of the copyright holder nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n\nfrom __future__ import absolute_import\n\nfrom elasticapm import get_client\nfrom elasticapm.traces import execution_context\n\n\ndef structlog_processor(logger, method_name, event_dict):\n \"\"\"\n Add three new entries to the event_dict for any processed events:\n\n * transaction.id\n * trace.id\n * span.id\n\n Only adds non-None IDs.\n\n :param logger:\n Unused (logger instance in structlog)\n :param method_name:\n Unused (wrapped method_name)\n :param event_dict:\n Event dictionary for the event we're processing\n :return:\n `event_dict`, with three new entries.\n \"\"\"\n transaction = execution_context.get_transaction()\n if transaction:\n event_dict[\"transaction.id\"] = transaction.id\n client = get_client()\n if client:\n event_dict[\"service.name\"] = client.config.service_name\n if transaction and transaction.trace_parent:\n event_dict[\"trace.id\"] = transaction.trace_parent.trace_id\n span = execution_context.get_span()\n if span and span.id:\n event_dict[\"span.id\"] = span.id\n return event_dict\n", "path": "elasticapm/handlers/structlog.py"}, {"content": "# BSD 3-Clause License\n#\n# Copyright (c) 2012, the Sentry Team, see AUTHORS for more details\n# Copyright (c) 2019, Elasticsearch BV\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n#\n# * Redistributions of source code must retain the above copyright notice, this\n# list of conditions and the following disclaimer.\n#\n# * Redistributions in binary form must reproduce the above copyright notice,\n# this list of conditions and the following disclaimer in the documentation\n# and/or other materials provided with the distribution.\n#\n# * Neither the name of the copyright holder nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n\nfrom __future__ import absolute_import\n\nimport logging\nimport sys\nimport traceback\nimport warnings\n\nimport wrapt\n\nfrom elasticapm import get_client\nfrom elasticapm.base import Client\nfrom elasticapm.traces import execution_context\nfrom elasticapm.utils.stacks import iter_stack_frames\n\n\nclass LoggingHandler(logging.Handler):\n def __init__(self, *args, **kwargs):\n self.client = None\n if \"client\" in kwargs:\n self.client = kwargs.pop(\"client\")\n elif len(args) > 0:\n arg = args[0]\n if isinstance(arg, Client):\n self.client = arg\n\n if not self.client:\n client_cls = kwargs.pop(\"client_cls\", None)\n if client_cls:\n self.client = client_cls(*args, **kwargs)\n else:\n # In 6.0, this should raise a ValueError\n warnings.warn(\n \"LoggingHandler requires a Client instance. No Client was \"\n \"received. This will result in an error starting in v6.0 \"\n \"of the agent\",\n PendingDeprecationWarning,\n )\n self.client = Client(*args, **kwargs)\n logging.Handler.__init__(self, level=kwargs.get(\"level\", logging.NOTSET))\n\n def emit(self, record):\n self.format(record)\n\n # Avoid typical config issues by overriding loggers behavior\n if record.name.startswith((\"elasticapm.errors\",)):\n sys.stderr.write(record.getMessage() + \"\\n\")\n return\n\n try:\n return self._emit(record)\n except Exception:\n sys.stderr.write(\"Top level ElasticAPM exception caught - failed creating log record.\\n\")\n sys.stderr.write(record.getMessage() + \"\\n\")\n sys.stderr.write(traceback.format_exc() + \"\\n\")\n\n try:\n self.client.capture(\"Exception\")\n except Exception:\n pass\n\n def _emit(self, record, **kwargs):\n data = {}\n\n for k, v in record.__dict__.items():\n if \".\" not in k and k not in (\"culprit\",):\n continue\n data[k] = v\n\n stack = getattr(record, \"stack\", None)\n if stack is True:\n stack = iter_stack_frames(config=self.client.config)\n\n if stack:\n frames = []\n started = False\n last_mod = \"\"\n for item in stack:\n if isinstance(item, (list, tuple)):\n frame, lineno = item\n else:\n frame, lineno = item, item.f_lineno\n\n if not started:\n f_globals = getattr(frame, \"f_globals\", {})\n module_name = f_globals.get(\"__name__\", \"\")\n if last_mod.startswith(\"logging\") and not module_name.startswith(\"logging\"):\n started = True\n else:\n last_mod = module_name\n continue\n frames.append((frame, lineno))\n stack = frames\n\n custom = getattr(record, \"data\", {})\n # Add in all of the data from the record that we aren't already capturing\n for k in record.__dict__.keys():\n if k in (\n \"stack\",\n \"name\",\n \"args\",\n \"msg\",\n \"levelno\",\n \"exc_text\",\n \"exc_info\",\n \"data\",\n \"created\",\n \"levelname\",\n \"msecs\",\n \"relativeCreated\",\n ):\n continue\n if k.startswith(\"_\"):\n continue\n custom[k] = record.__dict__[k]\n\n # If there's no exception being processed,\n # exc_info may be a 3-tuple of None\n # http://docs.python.org/library/sys.html#sys.exc_info\n if record.exc_info and all(record.exc_info):\n handler = self.client.get_handler(\"elasticapm.events.Exception\")\n exception = handler.capture(self.client, exc_info=record.exc_info)\n else:\n exception = None\n\n return self.client.capture(\n \"Message\",\n param_message={\"message\": str(record.msg), \"params\": record.args},\n stack=stack,\n custom=custom,\n exception=exception,\n level=record.levelno,\n logger_name=record.name,\n **kwargs,\n )\n\n\nclass LoggingFilter(logging.Filter):\n \"\"\"\n This filter doesn't actually do any \"filtering\" -- rather, it just adds\n three new attributes to any \"filtered\" LogRecord objects:\n\n * elasticapm_transaction_id\n * elasticapm_trace_id\n * elasticapm_span_id\n * elasticapm_service_name\n\n These attributes can then be incorporated into your handlers and formatters,\n so that you can tie log messages to transactions in elasticsearch.\n\n This filter also adds these fields to a dictionary attribute,\n `elasticapm_labels`, using the official tracing fields names as documented\n here: https://www.elastic.co/guide/en/ecs/current/ecs-tracing.html\n\n Note that if you're using Python 3.2+, by default we will add a\n LogRecordFactory to your root logger which will add these attributes\n automatically.\n \"\"\"\n\n def filter(self, record):\n \"\"\"\n Add elasticapm attributes to `record`.\n \"\"\"\n _add_attributes_to_log_record(record)\n return True\n\n\[email protected]\ndef log_record_factory(wrapped, instance, args, kwargs):\n \"\"\"\n Decorator, designed to wrap the python log record factory (fetched by\n logging.getLogRecordFactory), adding the same custom attributes as in\n the LoggingFilter provided above.\n\n :return:\n LogRecord object, with custom attributes for APM tracing fields\n \"\"\"\n record = wrapped(*args, **kwargs)\n return _add_attributes_to_log_record(record)\n\n\ndef _add_attributes_to_log_record(record):\n \"\"\"\n Add custom attributes for APM tracing fields to a LogRecord object\n\n :param record: LogRecord object\n :return: Updated LogRecord object with new APM tracing fields\n \"\"\"\n transaction = execution_context.get_transaction()\n\n transaction_id = transaction.id if transaction else None\n record.elasticapm_transaction_id = transaction_id\n\n trace_id = transaction.trace_parent.trace_id if transaction and transaction.trace_parent else None\n record.elasticapm_trace_id = trace_id\n\n span = execution_context.get_span()\n span_id = span.id if span else None\n record.elasticapm_span_id = span_id\n\n client = get_client()\n service_name = client.config.service_name if client else None\n record.elasticapm_service_name = service_name\n\n record.elasticapm_labels = {\n \"transaction.id\": transaction_id,\n \"trace.id\": trace_id,\n \"span.id\": span_id,\n \"service.name\": service_name,\n }\n\n return record\n\n\nclass Formatter(logging.Formatter):\n \"\"\"\n Custom formatter to automatically append the elasticapm format string,\n as well as ensure that LogRecord objects actually have the required fields\n (so as to avoid errors which could occur for logs before we override the\n LogRecordFactory):\n\n formatstring = \"%(asctime)s - %(name)s - %(levelname)s - %(message)s\"\n formatstring = formatstring + \" | elasticapm \" \\\n \"transaction.id=%(elasticapm_transaction_id)s \" \\\n \"trace.id=%(elasticapm_trace_id)s \" \\\n \"span.id=%(elasticapm_span_id)s\"\n \"\"\"\n\n def __init__(self, fmt=None, datefmt=None, style=\"%\"):\n if fmt is None:\n fmt = \"%(message)s\"\n fmt = (\n fmt + \" | elasticapm \"\n \"transaction.id=%(elasticapm_transaction_id)s \"\n \"trace.id=%(elasticapm_trace_id)s \"\n \"span.id=%(elasticapm_span_id)s\"\n )\n super(Formatter, self).__init__(fmt=fmt, datefmt=datefmt, style=style)\n\n def format(self, record):\n if not hasattr(record, \"elasticapm_transaction_id\"):\n record.elasticapm_transaction_id = None\n record.elasticapm_trace_id = None\n record.elasticapm_span_id = None\n record.elasticapm_service_name = None\n return super(Formatter, self).format(record=record)\n\n def formatTime(self, record, datefmt=None):\n if not hasattr(record, \"elasticapm_transaction_id\"):\n record.elasticapm_transaction_id = None\n record.elasticapm_trace_id = None\n record.elasticapm_span_id = None\n record.elasticapm_service_name = None\n return super(Formatter, self).formatTime(record=record, datefmt=datefmt)\n", "path": "elasticapm/handlers/logging.py"}], "after_files": [{"content": "# Copyright (c) 2019, Elasticsearch BV\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n#\n# * Redistributions of source code must retain the above copyright notice, this\n# list of conditions and the following disclaimer.\n#\n# * Redistributions in binary form must reproduce the above copyright notice,\n# this list of conditions and the following disclaimer in the documentation\n# and/or other materials provided with the distribution.\n#\n# * Neither the name of the copyright holder nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n\nfrom __future__ import absolute_import\n\nfrom elasticapm import get_client\nfrom elasticapm.traces import execution_context\n\n\ndef structlog_processor(logger, method_name, event_dict):\n \"\"\"\n Add three new entries to the event_dict for any processed events:\n\n * transaction.id\n * trace.id\n * span.id\n\n Only adds non-None IDs.\n\n :param logger:\n Unused (logger instance in structlog)\n :param method_name:\n Unused (wrapped method_name)\n :param event_dict:\n Event dictionary for the event we're processing\n :return:\n `event_dict`, with three new entries.\n \"\"\"\n transaction = execution_context.get_transaction()\n if transaction:\n event_dict[\"transaction.id\"] = transaction.id\n client = get_client()\n if client:\n event_dict[\"service.name\"] = client.config.service_name\n event_dict[\"service.environment\"] = client.config.environment\n if transaction and transaction.trace_parent:\n event_dict[\"trace.id\"] = transaction.trace_parent.trace_id\n span = execution_context.get_span()\n if span and span.id:\n event_dict[\"span.id\"] = span.id\n return event_dict\n", "path": "elasticapm/handlers/structlog.py"}, {"content": "# BSD 3-Clause License\n#\n# Copyright (c) 2012, the Sentry Team, see AUTHORS for more details\n# Copyright (c) 2019, Elasticsearch BV\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n#\n# * Redistributions of source code must retain the above copyright notice, this\n# list of conditions and the following disclaimer.\n#\n# * Redistributions in binary form must reproduce the above copyright notice,\n# this list of conditions and the following disclaimer in the documentation\n# and/or other materials provided with the distribution.\n#\n# * Neither the name of the copyright holder nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n\nfrom __future__ import absolute_import\n\nimport logging\nimport sys\nimport traceback\nimport warnings\n\nimport wrapt\n\nfrom elasticapm import get_client\nfrom elasticapm.base import Client\nfrom elasticapm.traces import execution_context\nfrom elasticapm.utils.stacks import iter_stack_frames\n\n\nclass LoggingHandler(logging.Handler):\n def __init__(self, *args, **kwargs):\n self.client = None\n if \"client\" in kwargs:\n self.client = kwargs.pop(\"client\")\n elif len(args) > 0:\n arg = args[0]\n if isinstance(arg, Client):\n self.client = arg\n\n if not self.client:\n client_cls = kwargs.pop(\"client_cls\", None)\n if client_cls:\n self.client = client_cls(*args, **kwargs)\n else:\n # In 6.0, this should raise a ValueError\n warnings.warn(\n \"LoggingHandler requires a Client instance. No Client was \"\n \"received. This will result in an error starting in v6.0 \"\n \"of the agent\",\n PendingDeprecationWarning,\n )\n self.client = Client(*args, **kwargs)\n logging.Handler.__init__(self, level=kwargs.get(\"level\", logging.NOTSET))\n\n def emit(self, record):\n self.format(record)\n\n # Avoid typical config issues by overriding loggers behavior\n if record.name.startswith((\"elasticapm.errors\",)):\n sys.stderr.write(record.getMessage() + \"\\n\")\n return\n\n try:\n return self._emit(record)\n except Exception:\n sys.stderr.write(\"Top level ElasticAPM exception caught - failed creating log record.\\n\")\n sys.stderr.write(record.getMessage() + \"\\n\")\n sys.stderr.write(traceback.format_exc() + \"\\n\")\n\n try:\n self.client.capture(\"Exception\")\n except Exception:\n pass\n\n def _emit(self, record, **kwargs):\n data = {}\n\n for k, v in record.__dict__.items():\n if \".\" not in k and k not in (\"culprit\",):\n continue\n data[k] = v\n\n stack = getattr(record, \"stack\", None)\n if stack is True:\n stack = iter_stack_frames(config=self.client.config)\n\n if stack:\n frames = []\n started = False\n last_mod = \"\"\n for item in stack:\n if isinstance(item, (list, tuple)):\n frame, lineno = item\n else:\n frame, lineno = item, item.f_lineno\n\n if not started:\n f_globals = getattr(frame, \"f_globals\", {})\n module_name = f_globals.get(\"__name__\", \"\")\n if last_mod.startswith(\"logging\") and not module_name.startswith(\"logging\"):\n started = True\n else:\n last_mod = module_name\n continue\n frames.append((frame, lineno))\n stack = frames\n\n custom = getattr(record, \"data\", {})\n # Add in all of the data from the record that we aren't already capturing\n for k in record.__dict__.keys():\n if k in (\n \"stack\",\n \"name\",\n \"args\",\n \"msg\",\n \"levelno\",\n \"exc_text\",\n \"exc_info\",\n \"data\",\n \"created\",\n \"levelname\",\n \"msecs\",\n \"relativeCreated\",\n ):\n continue\n if k.startswith(\"_\"):\n continue\n custom[k] = record.__dict__[k]\n\n # If there's no exception being processed,\n # exc_info may be a 3-tuple of None\n # http://docs.python.org/library/sys.html#sys.exc_info\n if record.exc_info and all(record.exc_info):\n handler = self.client.get_handler(\"elasticapm.events.Exception\")\n exception = handler.capture(self.client, exc_info=record.exc_info)\n else:\n exception = None\n\n return self.client.capture(\n \"Message\",\n param_message={\"message\": str(record.msg), \"params\": record.args},\n stack=stack,\n custom=custom,\n exception=exception,\n level=record.levelno,\n logger_name=record.name,\n **kwargs,\n )\n\n\nclass LoggingFilter(logging.Filter):\n \"\"\"\n This filter doesn't actually do any \"filtering\" -- rather, it just adds\n three new attributes to any \"filtered\" LogRecord objects:\n\n * elasticapm_transaction_id\n * elasticapm_trace_id\n * elasticapm_span_id\n * elasticapm_service_name\n\n These attributes can then be incorporated into your handlers and formatters,\n so that you can tie log messages to transactions in elasticsearch.\n\n This filter also adds these fields to a dictionary attribute,\n `elasticapm_labels`, using the official tracing fields names as documented\n here: https://www.elastic.co/guide/en/ecs/current/ecs-tracing.html\n\n Note that if you're using Python 3.2+, by default we will add a\n LogRecordFactory to your root logger which will add these attributes\n automatically.\n \"\"\"\n\n def filter(self, record):\n \"\"\"\n Add elasticapm attributes to `record`.\n \"\"\"\n _add_attributes_to_log_record(record)\n return True\n\n\[email protected]\ndef log_record_factory(wrapped, instance, args, kwargs):\n \"\"\"\n Decorator, designed to wrap the python log record factory (fetched by\n logging.getLogRecordFactory), adding the same custom attributes as in\n the LoggingFilter provided above.\n\n :return:\n LogRecord object, with custom attributes for APM tracing fields\n \"\"\"\n record = wrapped(*args, **kwargs)\n return _add_attributes_to_log_record(record)\n\n\ndef _add_attributes_to_log_record(record):\n \"\"\"\n Add custom attributes for APM tracing fields to a LogRecord object\n\n :param record: LogRecord object\n :return: Updated LogRecord object with new APM tracing fields\n \"\"\"\n transaction = execution_context.get_transaction()\n\n transaction_id = transaction.id if transaction else None\n record.elasticapm_transaction_id = transaction_id\n\n trace_id = transaction.trace_parent.trace_id if transaction and transaction.trace_parent else None\n record.elasticapm_trace_id = trace_id\n\n span = execution_context.get_span()\n span_id = span.id if span else None\n record.elasticapm_span_id = span_id\n\n client = get_client()\n service_name = client.config.service_name if client else None\n record.elasticapm_service_name = service_name\n\n service_environment = client.config.environment if client else None\n record.elasticapm_service_environment = service_environment\n\n record.elasticapm_labels = {\n \"transaction.id\": transaction_id,\n \"trace.id\": trace_id,\n \"span.id\": span_id,\n \"service.name\": service_name,\n \"service.environment\": service_environment,\n }\n\n return record\n\n\nclass Formatter(logging.Formatter):\n \"\"\"\n Custom formatter to automatically append the elasticapm format string,\n as well as ensure that LogRecord objects actually have the required fields\n (so as to avoid errors which could occur for logs before we override the\n LogRecordFactory):\n\n formatstring = \"%(asctime)s - %(name)s - %(levelname)s - %(message)s\"\n formatstring = formatstring + \" | elasticapm \" \\\n \"transaction.id=%(elasticapm_transaction_id)s \" \\\n \"trace.id=%(elasticapm_trace_id)s \" \\\n \"span.id=%(elasticapm_span_id)s\"\n \"\"\"\n\n def __init__(self, fmt=None, datefmt=None, style=\"%\"):\n if fmt is None:\n fmt = \"%(message)s\"\n fmt = (\n fmt + \" | elasticapm \"\n \"transaction.id=%(elasticapm_transaction_id)s \"\n \"trace.id=%(elasticapm_trace_id)s \"\n \"span.id=%(elasticapm_span_id)s\"\n )\n super(Formatter, self).__init__(fmt=fmt, datefmt=datefmt, style=style)\n\n def format(self, record):\n if not hasattr(record, \"elasticapm_transaction_id\"):\n record.elasticapm_transaction_id = None\n record.elasticapm_trace_id = None\n record.elasticapm_span_id = None\n record.elasticapm_service_name = None\n record.elasticapm_service_environment = None\n return super(Formatter, self).format(record=record)\n\n def formatTime(self, record, datefmt=None):\n if not hasattr(record, \"elasticapm_transaction_id\"):\n record.elasticapm_transaction_id = None\n record.elasticapm_trace_id = None\n record.elasticapm_span_id = None\n record.elasticapm_service_name = None\n record.elasticapm_service_environment = None\n return super(Formatter, self).formatTime(record=record, datefmt=datefmt)\n", "path": "elasticapm/handlers/logging.py"}]}
| 3,956 | 470 |
gh_patches_debug_25476
|
rasdani/github-patches
|
git_diff
|
AnalogJ__lexicon-476
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
TLDExtract Private Domains for Dynamic DNS Providers
Hello all,
I'm currently putting together the plugin for dynu.com (listed in NYI proiders).
As Dynu also acts as Dynamic DNS provider with several toplevel domains as base for the dynamic domain (`yourhost.dynu.net`, `yourhost.freeddns.org`..., also, wildcards), I had some trouble putting together the plugin.
As an example, I'm making up `mydomain.dynu.net` as my target dynamic dns hostname.
Now, the `tldextract` package used to determine the part of the domain that belongs to the toplevel will spit out `net` as the toplevel, `dynu` as the TLD and then drops `mydomain` in further processing as seen [in client.py](../blob/master/lexicon/client.py#L43).
In turn, finding the right domain from the list of dns entries in `_authenticate` is not possible by default (as `self.domain` is set to `dynu.net`).
I discovered two workarounds for this:
1. use `--delegated "mydomain.dynu.net"` to explicitly target the subdomain
2. change the code [in client.py](../blob/master/lexicon/client.py#L41) to this:
```python
extract = tldextract.TLDExtract(include_psl_private_domains=True)
# Process domain, strip subdomain
domain_parts = extract(
self.config.resolve('lexicon:domain'))
runtime_config['domain'] = '{0}.{1}'.format(
domain_parts.domain, domain_parts.suffix)
```
The latter is taken from [the tldextract README](https://github.com/john-kurkowski/tldextract#public-vs-private-domains).
And because Dynu probably isn't the only Dynamic DNS provider using subdomains for their users, I guess this should be the default solution.
There's a catch however that is still in ongoing development [tldextract#144](https://github.com/john-kurkowski/tldextract/pull/144):
The list of TLDs is cached on first load of the extension, so if the config is not set to `include_psl_private_domains` before the package is first initialized, it won't work. So either an update has to be triggered manually, or, lexicon should be installed and used from a virtualenv in the first place.
Since I'm already making use of method 2 in my dev enviroment, I could open a PR right away, but I'm not 100% sure on side effects for other plugins, hence my hesitation.
Thanks and best,
Chris
edit// whitespace in codeblock, typos, grammar
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lexicon/client.py`
Content:
```
1 """Main module of Lexicon. Defines the Client class, that holds all Lexicon logic."""
2 from __future__ import absolute_import
3 import importlib
4
5 import tldextract
6
7 from lexicon import discovery
8 from lexicon.config import (
9 ConfigResolver, DictConfigSource,
10 legacy_config_resolver, non_interactive_config_resolver,
11 )
12
13
14 class ProviderNotAvailableError(Exception):
15 """
16 Custom exception to raise when a provider is not available,
17 typically because some optional dependencies are missing
18 """
19
20
21 class Client(object): # pylint: disable=useless-object-inheritance,too-few-public-methods
22 """This is the Lexicon client, that will execute all the logic."""
23
24 def __init__(self, config=None):
25 if not config:
26 # If there is not config specified, we load a non-interactive configuration.
27 self.config = non_interactive_config_resolver()
28 elif not isinstance(config, ConfigResolver):
29 # If config is not a ConfigResolver, we are in a legacy situation.
30 # We protect this part of the Client API.
31 self.config = legacy_config_resolver(config)
32 else:
33 self.config = config
34
35 # Validate configuration
36 self._validate_config()
37
38 runtime_config = {}
39
40 # Process domain, strip subdomain
41 domain_parts = tldextract.extract(
42 self.config.resolve('lexicon:domain'))
43 runtime_config['domain'] = '{0}.{1}'.format(
44 domain_parts.domain, domain_parts.suffix)
45
46 if self.config.resolve('lexicon:delegated'):
47 # handle delegated domain
48 delegated = self.config.resolve('lexicon:delegated').rstrip('.')
49 if delegated != runtime_config.get('domain'):
50 # convert to relative name
51 if delegated.endswith(runtime_config.get('domain')):
52 delegated = delegated[:-len(runtime_config.get('domain'))]
53 delegated = delegated.rstrip('.')
54 # update domain
55 runtime_config['domain'] = '{0}.{1}'.format(
56 delegated, runtime_config.get('domain'))
57
58 self.action = self.config.resolve('lexicon:action')
59 self.provider_name = (self.config.resolve('lexicon:provider_name')
60 or self.config.resolve('lexicon:provider'))
61
62 self.config.add_config_source(DictConfigSource(runtime_config), 0)
63
64 provider_module = importlib.import_module(
65 'lexicon.providers.' + self.provider_name)
66 provider_class = getattr(provider_module, 'Provider')
67 self.provider = provider_class(self.config)
68
69 def execute(self):
70 """Execute provided configuration in class constructor to the DNS records"""
71 self.provider.authenticate()
72 identifier = self.config.resolve('lexicon:identifier')
73 record_type = self.config.resolve('lexicon:type')
74 name = self.config.resolve('lexicon:name')
75 content = self.config.resolve('lexicon:content')
76
77 if self.action == 'create':
78 return self.provider.create_record(record_type, name, content)
79
80 if self.action == 'list':
81 return self.provider.list_records(record_type, name, content)
82
83 if self.action == 'update':
84 return self.provider.update_record(identifier, record_type, name, content)
85
86 if self.action == 'delete':
87 return self.provider.delete_record(identifier, record_type, name, content)
88
89 raise ValueError('Invalid action statement: {0}'.format(self.action))
90
91 def _validate_config(self):
92 provider_name = self.config.resolve('lexicon:provider_name')
93 if not self.config.resolve('lexicon:provider_name'):
94 raise AttributeError('provider_name')
95
96 try:
97 available = discovery.find_providers()[self.config.resolve('lexicon:provider_name')]
98 except KeyError:
99 raise ProviderNotAvailableError('This provider ({0}) is not supported by Lexicon.'
100 .format(provider_name))
101 else:
102 if not available:
103 raise ProviderNotAvailableError(
104 'This provider ({0}) has required dependencies that are missing. '
105 'Please install lexicon[{0}] first.'.format(provider_name))
106
107 if not self.config.resolve('lexicon:action'):
108 raise AttributeError('action')
109 if not self.config.resolve('lexicon:domain'):
110 raise AttributeError('domain')
111 if not self.config.resolve('lexicon:type'):
112 raise AttributeError('type')
113
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/lexicon/client.py b/lexicon/client.py
--- a/lexicon/client.py
+++ b/lexicon/client.py
@@ -1,6 +1,7 @@
"""Main module of Lexicon. Defines the Client class, that holds all Lexicon logic."""
from __future__ import absolute_import
import importlib
+import os
import tldextract
@@ -10,6 +11,9 @@
legacy_config_resolver, non_interactive_config_resolver,
)
+TLDEXTRACT_CACHE_FILE_DEFAULT = os.path.join('~', '.lexicon_tld_set')
+TLDEXTRACT_CACHE_FILE = os.path.expanduser(os.environ.get("LEXICON_TLDEXTRACT_CACHE",
+ TLDEXTRACT_CACHE_FILE_DEFAULT))
class ProviderNotAvailableError(Exception):
"""
@@ -38,7 +42,9 @@
runtime_config = {}
# Process domain, strip subdomain
- domain_parts = tldextract.extract(
+ domain_extractor = tldextract.TLDExtract(cache_file=TLDEXTRACT_CACHE_FILE,
+ include_psl_private_domains=True)
+ domain_parts = domain_extractor(
self.config.resolve('lexicon:domain'))
runtime_config['domain'] = '{0}.{1}'.format(
domain_parts.domain, domain_parts.suffix)
|
{"golden_diff": "diff --git a/lexicon/client.py b/lexicon/client.py\n--- a/lexicon/client.py\n+++ b/lexicon/client.py\n@@ -1,6 +1,7 @@\n \"\"\"Main module of Lexicon. Defines the Client class, that holds all Lexicon logic.\"\"\"\n from __future__ import absolute_import\n import importlib\n+import os\n \n import tldextract\n \n@@ -10,6 +11,9 @@\n legacy_config_resolver, non_interactive_config_resolver,\n )\n \n+TLDEXTRACT_CACHE_FILE_DEFAULT = os.path.join('~', '.lexicon_tld_set')\n+TLDEXTRACT_CACHE_FILE = os.path.expanduser(os.environ.get(\"LEXICON_TLDEXTRACT_CACHE\",\n+ TLDEXTRACT_CACHE_FILE_DEFAULT))\n \n class ProviderNotAvailableError(Exception):\n \"\"\"\n@@ -38,7 +42,9 @@\n runtime_config = {}\n \n # Process domain, strip subdomain\n- domain_parts = tldextract.extract(\n+ domain_extractor = tldextract.TLDExtract(cache_file=TLDEXTRACT_CACHE_FILE,\n+ include_psl_private_domains=True)\n+ domain_parts = domain_extractor(\n self.config.resolve('lexicon:domain'))\n runtime_config['domain'] = '{0}.{1}'.format(\n domain_parts.domain, domain_parts.suffix)\n", "issue": "TLDExtract Private Domains for Dynamic DNS Providers\nHello all,\r\n\r\nI'm currently putting together the plugin for dynu.com (listed in NYI proiders).\r\nAs Dynu also acts as Dynamic DNS provider with several toplevel domains as base for the dynamic domain (`yourhost.dynu.net`, `yourhost.freeddns.org`..., also, wildcards), I had some trouble putting together the plugin.\r\n\r\nAs an example, I'm making up `mydomain.dynu.net` as my target dynamic dns hostname.\r\n\r\nNow, the `tldextract` package used to determine the part of the domain that belongs to the toplevel will spit out `net` as the toplevel, `dynu` as the TLD and then drops `mydomain` in further processing as seen [in client.py](../blob/master/lexicon/client.py#L43).\r\n\r\nIn turn, finding the right domain from the list of dns entries in `_authenticate` is not possible by default (as `self.domain` is set to `dynu.net`).\r\n\r\nI discovered two workarounds for this:\r\n\r\n1. use `--delegated \"mydomain.dynu.net\"` to explicitly target the subdomain\r\n2. change the code [in client.py](../blob/master/lexicon/client.py#L41) to this:\r\n\r\n```python\r\nextract = tldextract.TLDExtract(include_psl_private_domains=True)\r\n\r\n# Process domain, strip subdomain\r\ndomain_parts = extract(\r\n self.config.resolve('lexicon:domain'))\r\nruntime_config['domain'] = '{0}.{1}'.format(\r\n domain_parts.domain, domain_parts.suffix)\r\n```\r\n\r\nThe latter is taken from [the tldextract README](https://github.com/john-kurkowski/tldextract#public-vs-private-domains).\r\nAnd because Dynu probably isn't the only Dynamic DNS provider using subdomains for their users, I guess this should be the default solution.\r\nThere's a catch however that is still in ongoing development [tldextract#144](https://github.com/john-kurkowski/tldextract/pull/144):\r\nThe list of TLDs is cached on first load of the extension, so if the config is not set to `include_psl_private_domains` before the package is first initialized, it won't work. So either an update has to be triggered manually, or, lexicon should be installed and used from a virtualenv in the first place.\r\n\r\nSince I'm already making use of method 2 in my dev enviroment, I could open a PR right away, but I'm not 100% sure on side effects for other plugins, hence my hesitation.\r\n\r\nThanks and best,\r\nChris\r\n\r\nedit// whitespace in codeblock, typos, grammar\n", "before_files": [{"content": "\"\"\"Main module of Lexicon. Defines the Client class, that holds all Lexicon logic.\"\"\"\nfrom __future__ import absolute_import\nimport importlib\n\nimport tldextract\n\nfrom lexicon import discovery\nfrom lexicon.config import (\n ConfigResolver, DictConfigSource,\n legacy_config_resolver, non_interactive_config_resolver,\n)\n\n\nclass ProviderNotAvailableError(Exception):\n \"\"\"\n Custom exception to raise when a provider is not available,\n typically because some optional dependencies are missing\n \"\"\"\n\n\nclass Client(object): # pylint: disable=useless-object-inheritance,too-few-public-methods\n \"\"\"This is the Lexicon client, that will execute all the logic.\"\"\"\n\n def __init__(self, config=None):\n if not config:\n # If there is not config specified, we load a non-interactive configuration.\n self.config = non_interactive_config_resolver()\n elif not isinstance(config, ConfigResolver):\n # If config is not a ConfigResolver, we are in a legacy situation.\n # We protect this part of the Client API.\n self.config = legacy_config_resolver(config)\n else:\n self.config = config\n\n # Validate configuration\n self._validate_config()\n\n runtime_config = {}\n\n # Process domain, strip subdomain\n domain_parts = tldextract.extract(\n self.config.resolve('lexicon:domain'))\n runtime_config['domain'] = '{0}.{1}'.format(\n domain_parts.domain, domain_parts.suffix)\n\n if self.config.resolve('lexicon:delegated'):\n # handle delegated domain\n delegated = self.config.resolve('lexicon:delegated').rstrip('.')\n if delegated != runtime_config.get('domain'):\n # convert to relative name\n if delegated.endswith(runtime_config.get('domain')):\n delegated = delegated[:-len(runtime_config.get('domain'))]\n delegated = delegated.rstrip('.')\n # update domain\n runtime_config['domain'] = '{0}.{1}'.format(\n delegated, runtime_config.get('domain'))\n\n self.action = self.config.resolve('lexicon:action')\n self.provider_name = (self.config.resolve('lexicon:provider_name')\n or self.config.resolve('lexicon:provider'))\n\n self.config.add_config_source(DictConfigSource(runtime_config), 0)\n\n provider_module = importlib.import_module(\n 'lexicon.providers.' + self.provider_name)\n provider_class = getattr(provider_module, 'Provider')\n self.provider = provider_class(self.config)\n\n def execute(self):\n \"\"\"Execute provided configuration in class constructor to the DNS records\"\"\"\n self.provider.authenticate()\n identifier = self.config.resolve('lexicon:identifier')\n record_type = self.config.resolve('lexicon:type')\n name = self.config.resolve('lexicon:name')\n content = self.config.resolve('lexicon:content')\n\n if self.action == 'create':\n return self.provider.create_record(record_type, name, content)\n\n if self.action == 'list':\n return self.provider.list_records(record_type, name, content)\n\n if self.action == 'update':\n return self.provider.update_record(identifier, record_type, name, content)\n\n if self.action == 'delete':\n return self.provider.delete_record(identifier, record_type, name, content)\n\n raise ValueError('Invalid action statement: {0}'.format(self.action))\n\n def _validate_config(self):\n provider_name = self.config.resolve('lexicon:provider_name')\n if not self.config.resolve('lexicon:provider_name'):\n raise AttributeError('provider_name')\n\n try:\n available = discovery.find_providers()[self.config.resolve('lexicon:provider_name')]\n except KeyError:\n raise ProviderNotAvailableError('This provider ({0}) is not supported by Lexicon.'\n .format(provider_name))\n else:\n if not available:\n raise ProviderNotAvailableError(\n 'This provider ({0}) has required dependencies that are missing. '\n 'Please install lexicon[{0}] first.'.format(provider_name))\n\n if not self.config.resolve('lexicon:action'):\n raise AttributeError('action')\n if not self.config.resolve('lexicon:domain'):\n raise AttributeError('domain')\n if not self.config.resolve('lexicon:type'):\n raise AttributeError('type')\n", "path": "lexicon/client.py"}], "after_files": [{"content": "\"\"\"Main module of Lexicon. Defines the Client class, that holds all Lexicon logic.\"\"\"\nfrom __future__ import absolute_import\nimport importlib\nimport os\n\nimport tldextract\n\nfrom lexicon import discovery\nfrom lexicon.config import (\n ConfigResolver, DictConfigSource,\n legacy_config_resolver, non_interactive_config_resolver,\n)\n\nTLDEXTRACT_CACHE_FILE_DEFAULT = os.path.join('~', '.lexicon_tld_set')\nTLDEXTRACT_CACHE_FILE = os.path.expanduser(os.environ.get(\"LEXICON_TLDEXTRACT_CACHE\",\n TLDEXTRACT_CACHE_FILE_DEFAULT))\n\nclass ProviderNotAvailableError(Exception):\n \"\"\"\n Custom exception to raise when a provider is not available,\n typically because some optional dependencies are missing\n \"\"\"\n\n\nclass Client(object): # pylint: disable=useless-object-inheritance,too-few-public-methods\n \"\"\"This is the Lexicon client, that will execute all the logic.\"\"\"\n\n def __init__(self, config=None):\n if not config:\n # If there is not config specified, we load a non-interactive configuration.\n self.config = non_interactive_config_resolver()\n elif not isinstance(config, ConfigResolver):\n # If config is not a ConfigResolver, we are in a legacy situation.\n # We protect this part of the Client API.\n self.config = legacy_config_resolver(config)\n else:\n self.config = config\n\n # Validate configuration\n self._validate_config()\n\n runtime_config = {}\n\n # Process domain, strip subdomain\n domain_extractor = tldextract.TLDExtract(cache_file=TLDEXTRACT_CACHE_FILE,\n include_psl_private_domains=True)\n domain_parts = domain_extractor(\n self.config.resolve('lexicon:domain'))\n runtime_config['domain'] = '{0}.{1}'.format(\n domain_parts.domain, domain_parts.suffix)\n\n if self.config.resolve('lexicon:delegated'):\n # handle delegated domain\n delegated = self.config.resolve('lexicon:delegated').rstrip('.')\n if delegated != runtime_config.get('domain'):\n # convert to relative name\n if delegated.endswith(runtime_config.get('domain')):\n delegated = delegated[:-len(runtime_config.get('domain'))]\n delegated = delegated.rstrip('.')\n # update domain\n runtime_config['domain'] = '{0}.{1}'.format(\n delegated, runtime_config.get('domain'))\n\n self.action = self.config.resolve('lexicon:action')\n self.provider_name = (self.config.resolve('lexicon:provider_name')\n or self.config.resolve('lexicon:provider'))\n\n self.config.add_config_source(DictConfigSource(runtime_config), 0)\n\n provider_module = importlib.import_module(\n 'lexicon.providers.' + self.provider_name)\n provider_class = getattr(provider_module, 'Provider')\n self.provider = provider_class(self.config)\n\n def execute(self):\n \"\"\"Execute provided configuration in class constructor to the DNS records\"\"\"\n self.provider.authenticate()\n identifier = self.config.resolve('lexicon:identifier')\n record_type = self.config.resolve('lexicon:type')\n name = self.config.resolve('lexicon:name')\n content = self.config.resolve('lexicon:content')\n\n if self.action == 'create':\n return self.provider.create_record(record_type, name, content)\n\n if self.action == 'list':\n return self.provider.list_records(record_type, name, content)\n\n if self.action == 'update':\n return self.provider.update_record(identifier, record_type, name, content)\n\n if self.action == 'delete':\n return self.provider.delete_record(identifier, record_type, name, content)\n\n raise ValueError('Invalid action statement: {0}'.format(self.action))\n\n def _validate_config(self):\n provider_name = self.config.resolve('lexicon:provider_name')\n if not self.config.resolve('lexicon:provider_name'):\n raise AttributeError('provider_name')\n\n try:\n available = discovery.find_providers()[self.config.resolve('lexicon:provider_name')]\n except KeyError:\n raise ProviderNotAvailableError('This provider ({0}) is not supported by Lexicon.'\n .format(provider_name))\n else:\n if not available:\n raise ProviderNotAvailableError(\n 'This provider ({0}) has required dependencies that are missing. '\n 'Please install lexicon[{0}] first.'.format(provider_name))\n\n if not self.config.resolve('lexicon:action'):\n raise AttributeError('action')\n if not self.config.resolve('lexicon:domain'):\n raise AttributeError('domain')\n if not self.config.resolve('lexicon:type'):\n raise AttributeError('type')\n", "path": "lexicon/client.py"}]}
| 1,956 | 277 |
gh_patches_debug_39381
|
rasdani/github-patches
|
git_diff
|
hpcaitech__ColossalAI-3838
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[tensor] fix some unittests
[tensor] fix some unittests
[tensor] fix some unittests
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `colossalai/tensor/d_tensor/sharding_spec.py`
Content:
```
1 from copy import deepcopy
2 from typing import Dict, List
3
4 from ..utils import merge_same_dim_mesh_list
5 from .misc import ShardingOutOfIndexError
6
7 __all__ = ['DimSpec', 'ShardingException', 'ShardingSpec']
8
9 ALLGATHER_COST = 20
10 SHARD_COST = 5
11 STEP_PENALTY = 6
12 NAN = 'nan'
13
14
15 class DimSpec:
16 '''
17 Sharding spec for single dimension of the sharded tensor describe the sharding dimension of
18 logical device mesh and give a method to compute the difference between them.
19 This class is used internally in ShardingSpec.
20
21 Argument:
22 shard_list(List[int]): if shard_list is None, the dim spec will be 'R' type.
23 Otherwise, the element in shard_list means the data will be sharded in that dimension.
24 '''
25
26 def __init__(self, shard_list):
27 self.is_replica = len(shard_list) == 0
28 self.shard_list = shard_list
29 self.build_difference_2d_dict()
30
31 def __eq__(self, other):
32 return str(self) == str(other)
33
34 def __repr__(self):
35 if self.is_replica:
36 return 'R'
37 target = 'S'
38 for dim in self.shard_list:
39 target += str(dim)
40 return target
41
42 def _convert_str_to_shard_list(self, str_spec):
43 '''
44 Conver str_spec into shard_list.
45
46 Argument:
47 str_spec(str): dim spec in str type.
48 '''
49
50 if str_spec == 'R':
51 return []
52 if str_spec == 'S0':
53 return [0]
54 if str_spec == 'S1':
55 return [1]
56 if str_spec == 'S01':
57 return [0, 1]
58
59 def build_difference_2d_dict(self):
60 '''
61 Build a difference maping for 2D device mesh case. It will be used to
62 compute the difference between DimSpec pairs.
63 '''
64
65 source_spec_list = ['R', 'S0', 'S1', 'S01']
66 target_spec_list = ['R', 'S0', 'S1', 'S01']
67 difference_dict = {}
68 for source_spec in source_spec_list:
69 for target_spec in target_spec_list:
70 legal_sharding_dims = []
71 spec_pair = (deepcopy(source_spec), deepcopy(target_spec))
72 source_shard_list = self._convert_str_to_shard_list(source_spec)
73 target_shard_list = self._convert_str_to_shard_list(target_spec)
74
75 # source same as target
76 if source_shard_list == target_shard_list:
77 difference = 0
78
79 # all_gather(source) -> target
80 elif len(source_shard_list
81 ) == len(target_shard_list) + 1 and source_shard_list[:-1] == target_shard_list:
82 difference = ALLGATHER_COST
83
84 # shard(source) -> target
85 elif len(source_shard_list) == len(
86 target_shard_list) - 1 and source_shard_list == target_shard_list[:-1] and target_shard_list[
87 -1] not in source_shard_list:
88 difference = SHARD_COST
89
90 # S1 -> S0 or S0 -> S1
91 elif len(source_shard_list) == len(target_shard_list):
92 # source -> R -> target
93 difference = ALLGATHER_COST + STEP_PENALTY + SHARD_COST
94
95 # R -> S01
96 elif len(source_shard_list) == len(target_shard_list) - 2:
97 difference = SHARD_COST + STEP_PENALTY + SHARD_COST
98
99 # S01 -> R
100 elif len(source_shard_list) == len(target_shard_list) + 2:
101 difference = ALLGATHER_COST + STEP_PENALTY + ALLGATHER_COST
102
103 # S1 -> S01
104 elif len(source_shard_list) == len(target_shard_list) - 1:
105 difference = ALLGATHER_COST + STEP_PENALTY + SHARD_COST + STEP_PENALTY + SHARD_COST
106
107 # S01 -> S1
108 elif len(source_shard_list) == len(target_shard_list) + 1:
109 difference = ALLGATHER_COST + STEP_PENALTY + ALLGATHER_COST + STEP_PENALTY + SHARD_COST
110
111 else:
112 difference = NAN
113 difference_dict[spec_pair] = difference
114
115 self.difference_dict = difference_dict
116
117 def dim_diff(self, other):
118 '''
119 The difference between two _DimSpec.
120
121 Argument:
122 other(_DimSpec): the dim spec to compare with.
123
124 Return:
125 difference(int): the difference between two _DimSpec.
126
127 Example:
128 dim_spec = _DimSpec([0])
129 other_dim_spec = _DimSpec([0, 1])
130 print(dim_spec.difference(other_dim_spec))
131
132 Output:
133 5
134 '''
135 difference = self.difference_dict[(str(self), str(other))]
136 return difference
137
138
139 class ShardingSpec:
140 '''
141 Sharding spec describes how to shard a tensor with dim_size dimensions. The sharding sequence looks like
142 [R, R, S0, S1], which means
143
144 Argument:
145 dim_partition_dict(Dict[int, List[int]], optional): The key is the dimension of tensor to be sharded,
146 and the value of the key describe which logical axis will be sharded in that dimension.
147 sharding_sequence(List[DimSpec], optional): A straight view of ShardingSpec looks like [R, R, S0, S1].
148 '''
149
150 def __init__(self,
151 dim_size: int,
152 dim_partition_dict: Dict[int, List[int]] = None,
153 sharding_sequence: List[DimSpec] = None):
154 self.dims = dim_size
155 self.dim_partition_dict = dim_partition_dict
156 self.sharding_sequence = sharding_sequence
157 if self.sharding_sequence is None:
158 assert self.dim_partition_dict is not None, f'dim_partition_dict should not be None, if sharding_sequence is NoneType object.'
159 self.dim_partition_dict = merge_same_dim_mesh_list(dim_size=self.dims,
160 dim_partition_dict=self.dim_partition_dict)
161 self.sharding_sequence = self.convert_dict_to_shard_sequence()
162
163 elif self.dim_partition_dict is None:
164 assert self.sharding_sequence is not None, f'sharding_sequence should not be None, if dim_partition_dict is NoneType object.'
165 self.dim_partition_dict = self.convert_shard_sequence_to_dict()
166
167 self._sanity_check()
168
169 def _sanity_check(self):
170 if len(self.sharding_sequence) > self.dims:
171 raise ShardingOutOfIndexError(
172 f'sharding_sequence should have {self.dims} elements, but got index {len(self.sharding_sequence)}.')
173
174 if list(self.dim_partition_dict.keys()) and max(list(self.dim_partition_dict.keys())) >= self.dims:
175 raise ShardingOutOfIndexError(
176 f'the key of dim_partition_dict should be less than {self.dims}, but got {max(list(self.dim_partition_dict.keys()))}.'
177 )
178
179 def __repr__(self):
180 res_list = ["ShardingSpec:"]
181 res_list.append(f"\n\tshard_sequence: " + ",".join(str(dimspec) for dimspec in self.sharding_sequence))
182 return ' '.join(res_list)
183
184 def convert_dict_to_shard_sequence(self):
185 '''
186 Convert dim_partition_dict into list of DimSpec, and assign it to sharding_sequence.
187 '''
188 sharding_sequence = [DimSpec([])] * self.dims
189 for dim, shard_list in self.dim_partition_dict.items():
190 sharding_sequence[dim] = DimSpec(shard_list)
191 return sharding_sequence
192
193 def convert_shard_sequence_to_dict(self):
194 '''
195 Convert sharding_sequence into dim_partition_dict.
196 '''
197 new_dim_partition_dict = {}
198 for index, dim_spec in enumerate(self.sharding_sequence):
199 if not dim_spec.is_replica:
200 if index not in new_dim_partition_dict:
201 new_dim_partition_dict[index] = []
202 new_dim_partition_dict[index].extend(dim_spec.shard_list)
203 return new_dim_partition_dict
204
205 def spec_diff(self, other):
206 '''
207 This function is a naive version of difference computation. It just simply accumulates difference every dimension between the
208 pair of sharding sequence.
209
210 Example:
211 dim_partition_dict = {0: [0, 1]}
212 # DistSpec:
213 # shard_sequence: S01,R,R
214 # device_mesh_shape: (4, 4)
215 sharding_spec = ShardingSpec(device_mesh, entire_shape, dim_partition_dict)
216 dim_partition_dict_to_compare = {0: [0], 1: [1]}
217 # DistSpec:
218 # shard_sequence: S0,S1,R
219 # device_mesh_shape: (4, 4)
220 sharding_spec_to_compare = ShardingSpec(device_mesh, entire_shape, dim_partition_dict_to_compare)
221 print(sharding_spec.sharding_sequence_difference(sharding_spec_to_compare))
222
223 Output:
224 25
225
226 Argument:
227 other(ShardingSpec): The ShardingSpec to compared with.
228
229 Return:
230 difference(int): Difference between two ShardingSpec.
231 '''
232 assert len(self.sharding_sequence) == len(
233 other.sharding_sequence), f'Cannot compare difference for two sharding specs with different length.'
234 difference = 0
235 for orig_dim_spec, other_dim_spec in zip(self.sharding_sequence, other.sharding_sequence):
236 difference += orig_dim_spec.dim_diff(other_dim_spec)
237 return difference
238
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/colossalai/tensor/d_tensor/sharding_spec.py b/colossalai/tensor/d_tensor/sharding_spec.py
--- a/colossalai/tensor/d_tensor/sharding_spec.py
+++ b/colossalai/tensor/d_tensor/sharding_spec.py
@@ -116,21 +116,21 @@
def dim_diff(self, other):
'''
- The difference between two _DimSpec.
+ The difference between two DimSpec.
Argument:
- other(_DimSpec): the dim spec to compare with.
+ other(DimSpec): the dim spec to compare with.
Return:
difference(int): the difference between two _DimSpec.
Example:
- dim_spec = _DimSpec([0])
- other_dim_spec = _DimSpec([0, 1])
+ ```python
+ dim_spec = DimSpec([0])
+ other_dim_spec = DimSpec([0, 1])
print(dim_spec.difference(other_dim_spec))
-
- Output:
- 5
+ # output: 5
+ ```
'''
difference = self.difference_dict[(str(self), str(other))]
return difference
@@ -142,9 +142,13 @@
[R, R, S0, S1], which means
Argument:
- dim_partition_dict(Dict[int, List[int]], optional): The key is the dimension of tensor to be sharded,
- and the value of the key describe which logical axis will be sharded in that dimension.
- sharding_sequence(List[DimSpec], optional): A straight view of ShardingSpec looks like [R, R, S0, S1].
+ dim_size (int): The number of dimensions of the tensor to be sharded.
+ dim_partition_dict (Dict[int, List[int]], optional): The key is the dimension of tensor to be sharded,
+ and the value of the key describe which logical axis will be sharded in that dimension. Defaults to None.
+ E.g. {0: [0, 1]} means the first dimension of the tensor will be sharded in logical axis 0 and 1.
+ sharding_sequence (List[DimSpec], optional): A straight view of ShardingSpec looks like [R, R, S0, S1].
+ Generally, users should specify either dim_partition_dict or sharding_sequence.
+ If both are given, users must ensure that they are consistent with each other. Defaults to None.
'''
def __init__(self,
@@ -208,6 +212,7 @@
pair of sharding sequence.
Example:
+ ```python
dim_partition_dict = {0: [0, 1]}
# DistSpec:
# shard_sequence: S01,R,R
@@ -219,10 +224,8 @@
# device_mesh_shape: (4, 4)
sharding_spec_to_compare = ShardingSpec(device_mesh, entire_shape, dim_partition_dict_to_compare)
print(sharding_spec.sharding_sequence_difference(sharding_spec_to_compare))
-
- Output:
- 25
-
+ # output: 25
+ ```
Argument:
other(ShardingSpec): The ShardingSpec to compared with.
|
{"golden_diff": "diff --git a/colossalai/tensor/d_tensor/sharding_spec.py b/colossalai/tensor/d_tensor/sharding_spec.py\n--- a/colossalai/tensor/d_tensor/sharding_spec.py\n+++ b/colossalai/tensor/d_tensor/sharding_spec.py\n@@ -116,21 +116,21 @@\n \n def dim_diff(self, other):\n '''\n- The difference between two _DimSpec.\n+ The difference between two DimSpec.\n \n Argument:\n- other(_DimSpec): the dim spec to compare with.\n+ other(DimSpec): the dim spec to compare with.\n \n Return:\n difference(int): the difference between two _DimSpec.\n \n Example:\n- dim_spec = _DimSpec([0])\n- other_dim_spec = _DimSpec([0, 1])\n+ ```python\n+ dim_spec = DimSpec([0])\n+ other_dim_spec = DimSpec([0, 1])\n print(dim_spec.difference(other_dim_spec))\n-\n- Output:\n- 5\n+ # output: 5\n+ ```\n '''\n difference = self.difference_dict[(str(self), str(other))]\n return difference\n@@ -142,9 +142,13 @@\n [R, R, S0, S1], which means\n \n Argument:\n- dim_partition_dict(Dict[int, List[int]], optional): The key is the dimension of tensor to be sharded,\n- and the value of the key describe which logical axis will be sharded in that dimension.\n- sharding_sequence(List[DimSpec], optional): A straight view of ShardingSpec looks like [R, R, S0, S1].\n+ dim_size (int): The number of dimensions of the tensor to be sharded.\n+ dim_partition_dict (Dict[int, List[int]], optional): The key is the dimension of tensor to be sharded,\n+ and the value of the key describe which logical axis will be sharded in that dimension. Defaults to None.\n+ E.g. {0: [0, 1]} means the first dimension of the tensor will be sharded in logical axis 0 and 1.\n+ sharding_sequence (List[DimSpec], optional): A straight view of ShardingSpec looks like [R, R, S0, S1].\n+ Generally, users should specify either dim_partition_dict or sharding_sequence.\n+ If both are given, users must ensure that they are consistent with each other. Defaults to None.\n '''\n \n def __init__(self,\n@@ -208,6 +212,7 @@\n pair of sharding sequence.\n \n Example:\n+ ```python\n dim_partition_dict = {0: [0, 1]}\n # DistSpec:\n # shard_sequence: S01,R,R\n@@ -219,10 +224,8 @@\n # device_mesh_shape: (4, 4)\n sharding_spec_to_compare = ShardingSpec(device_mesh, entire_shape, dim_partition_dict_to_compare)\n print(sharding_spec.sharding_sequence_difference(sharding_spec_to_compare))\n-\n- Output:\n- 25\n-\n+ # output: 25\n+ ```\n Argument:\n other(ShardingSpec): The ShardingSpec to compared with.\n", "issue": "[tensor] fix some unittests\n\n[tensor] fix some unittests\n\n[tensor] fix some unittests\n\n", "before_files": [{"content": "from copy import deepcopy\nfrom typing import Dict, List\n\nfrom ..utils import merge_same_dim_mesh_list\nfrom .misc import ShardingOutOfIndexError\n\n__all__ = ['DimSpec', 'ShardingException', 'ShardingSpec']\n\nALLGATHER_COST = 20\nSHARD_COST = 5\nSTEP_PENALTY = 6\nNAN = 'nan'\n\n\nclass DimSpec:\n '''\n Sharding spec for single dimension of the sharded tensor describe the sharding dimension of\n logical device mesh and give a method to compute the difference between them.\n This class is used internally in ShardingSpec.\n\n Argument:\n shard_list(List[int]): if shard_list is None, the dim spec will be 'R' type.\n Otherwise, the element in shard_list means the data will be sharded in that dimension.\n '''\n\n def __init__(self, shard_list):\n self.is_replica = len(shard_list) == 0\n self.shard_list = shard_list\n self.build_difference_2d_dict()\n\n def __eq__(self, other):\n return str(self) == str(other)\n\n def __repr__(self):\n if self.is_replica:\n return 'R'\n target = 'S'\n for dim in self.shard_list:\n target += str(dim)\n return target\n\n def _convert_str_to_shard_list(self, str_spec):\n '''\n Conver str_spec into shard_list.\n\n Argument:\n str_spec(str): dim spec in str type.\n '''\n\n if str_spec == 'R':\n return []\n if str_spec == 'S0':\n return [0]\n if str_spec == 'S1':\n return [1]\n if str_spec == 'S01':\n return [0, 1]\n\n def build_difference_2d_dict(self):\n '''\n Build a difference maping for 2D device mesh case. It will be used to\n compute the difference between DimSpec pairs.\n '''\n\n source_spec_list = ['R', 'S0', 'S1', 'S01']\n target_spec_list = ['R', 'S0', 'S1', 'S01']\n difference_dict = {}\n for source_spec in source_spec_list:\n for target_spec in target_spec_list:\n legal_sharding_dims = []\n spec_pair = (deepcopy(source_spec), deepcopy(target_spec))\n source_shard_list = self._convert_str_to_shard_list(source_spec)\n target_shard_list = self._convert_str_to_shard_list(target_spec)\n\n # source same as target\n if source_shard_list == target_shard_list:\n difference = 0\n\n # all_gather(source) -> target\n elif len(source_shard_list\n ) == len(target_shard_list) + 1 and source_shard_list[:-1] == target_shard_list:\n difference = ALLGATHER_COST\n\n # shard(source) -> target\n elif len(source_shard_list) == len(\n target_shard_list) - 1 and source_shard_list == target_shard_list[:-1] and target_shard_list[\n -1] not in source_shard_list:\n difference = SHARD_COST\n\n # S1 -> S0 or S0 -> S1\n elif len(source_shard_list) == len(target_shard_list):\n # source -> R -> target\n difference = ALLGATHER_COST + STEP_PENALTY + SHARD_COST\n\n # R -> S01\n elif len(source_shard_list) == len(target_shard_list) - 2:\n difference = SHARD_COST + STEP_PENALTY + SHARD_COST\n\n # S01 -> R\n elif len(source_shard_list) == len(target_shard_list) + 2:\n difference = ALLGATHER_COST + STEP_PENALTY + ALLGATHER_COST\n\n # S1 -> S01\n elif len(source_shard_list) == len(target_shard_list) - 1:\n difference = ALLGATHER_COST + STEP_PENALTY + SHARD_COST + STEP_PENALTY + SHARD_COST\n\n # S01 -> S1\n elif len(source_shard_list) == len(target_shard_list) + 1:\n difference = ALLGATHER_COST + STEP_PENALTY + ALLGATHER_COST + STEP_PENALTY + SHARD_COST\n\n else:\n difference = NAN\n difference_dict[spec_pair] = difference\n\n self.difference_dict = difference_dict\n\n def dim_diff(self, other):\n '''\n The difference between two _DimSpec.\n\n Argument:\n other(_DimSpec): the dim spec to compare with.\n\n Return:\n difference(int): the difference between two _DimSpec.\n\n Example:\n dim_spec = _DimSpec([0])\n other_dim_spec = _DimSpec([0, 1])\n print(dim_spec.difference(other_dim_spec))\n\n Output:\n 5\n '''\n difference = self.difference_dict[(str(self), str(other))]\n return difference\n\n\nclass ShardingSpec:\n '''\n Sharding spec describes how to shard a tensor with dim_size dimensions. The sharding sequence looks like\n [R, R, S0, S1], which means\n\n Argument:\n dim_partition_dict(Dict[int, List[int]], optional): The key is the dimension of tensor to be sharded,\n and the value of the key describe which logical axis will be sharded in that dimension.\n sharding_sequence(List[DimSpec], optional): A straight view of ShardingSpec looks like [R, R, S0, S1].\n '''\n\n def __init__(self,\n dim_size: int,\n dim_partition_dict: Dict[int, List[int]] = None,\n sharding_sequence: List[DimSpec] = None):\n self.dims = dim_size\n self.dim_partition_dict = dim_partition_dict\n self.sharding_sequence = sharding_sequence\n if self.sharding_sequence is None:\n assert self.dim_partition_dict is not None, f'dim_partition_dict should not be None, if sharding_sequence is NoneType object.'\n self.dim_partition_dict = merge_same_dim_mesh_list(dim_size=self.dims,\n dim_partition_dict=self.dim_partition_dict)\n self.sharding_sequence = self.convert_dict_to_shard_sequence()\n\n elif self.dim_partition_dict is None:\n assert self.sharding_sequence is not None, f'sharding_sequence should not be None, if dim_partition_dict is NoneType object.'\n self.dim_partition_dict = self.convert_shard_sequence_to_dict()\n\n self._sanity_check()\n\n def _sanity_check(self):\n if len(self.sharding_sequence) > self.dims:\n raise ShardingOutOfIndexError(\n f'sharding_sequence should have {self.dims} elements, but got index {len(self.sharding_sequence)}.')\n\n if list(self.dim_partition_dict.keys()) and max(list(self.dim_partition_dict.keys())) >= self.dims:\n raise ShardingOutOfIndexError(\n f'the key of dim_partition_dict should be less than {self.dims}, but got {max(list(self.dim_partition_dict.keys()))}.'\n )\n\n def __repr__(self):\n res_list = [\"ShardingSpec:\"]\n res_list.append(f\"\\n\\tshard_sequence: \" + \",\".join(str(dimspec) for dimspec in self.sharding_sequence))\n return ' '.join(res_list)\n\n def convert_dict_to_shard_sequence(self):\n '''\n Convert dim_partition_dict into list of DimSpec, and assign it to sharding_sequence.\n '''\n sharding_sequence = [DimSpec([])] * self.dims\n for dim, shard_list in self.dim_partition_dict.items():\n sharding_sequence[dim] = DimSpec(shard_list)\n return sharding_sequence\n\n def convert_shard_sequence_to_dict(self):\n '''\n Convert sharding_sequence into dim_partition_dict.\n '''\n new_dim_partition_dict = {}\n for index, dim_spec in enumerate(self.sharding_sequence):\n if not dim_spec.is_replica:\n if index not in new_dim_partition_dict:\n new_dim_partition_dict[index] = []\n new_dim_partition_dict[index].extend(dim_spec.shard_list)\n return new_dim_partition_dict\n\n def spec_diff(self, other):\n '''\n This function is a naive version of difference computation. It just simply accumulates difference every dimension between the\n pair of sharding sequence.\n\n Example:\n dim_partition_dict = {0: [0, 1]}\n # DistSpec:\n # shard_sequence: S01,R,R\n # device_mesh_shape: (4, 4)\n sharding_spec = ShardingSpec(device_mesh, entire_shape, dim_partition_dict)\n dim_partition_dict_to_compare = {0: [0], 1: [1]}\n # DistSpec:\n # shard_sequence: S0,S1,R\n # device_mesh_shape: (4, 4)\n sharding_spec_to_compare = ShardingSpec(device_mesh, entire_shape, dim_partition_dict_to_compare)\n print(sharding_spec.sharding_sequence_difference(sharding_spec_to_compare))\n\n Output:\n 25\n\n Argument:\n other(ShardingSpec): The ShardingSpec to compared with.\n\n Return:\n difference(int): Difference between two ShardingSpec.\n '''\n assert len(self.sharding_sequence) == len(\n other.sharding_sequence), f'Cannot compare difference for two sharding specs with different length.'\n difference = 0\n for orig_dim_spec, other_dim_spec in zip(self.sharding_sequence, other.sharding_sequence):\n difference += orig_dim_spec.dim_diff(other_dim_spec)\n return difference\n", "path": "colossalai/tensor/d_tensor/sharding_spec.py"}], "after_files": [{"content": "from copy import deepcopy\nfrom typing import Dict, List\n\nfrom ..utils import merge_same_dim_mesh_list\nfrom .misc import ShardingOutOfIndexError\n\n__all__ = ['DimSpec', 'ShardingException', 'ShardingSpec']\n\nALLGATHER_COST = 20\nSHARD_COST = 5\nSTEP_PENALTY = 6\nNAN = 'nan'\n\n\nclass DimSpec:\n '''\n Sharding spec for single dimension of the sharded tensor describe the sharding dimension of\n logical device mesh and give a method to compute the difference between them.\n This class is used internally in ShardingSpec.\n\n Argument:\n shard_list(List[int]): if shard_list is None, the dim spec will be 'R' type.\n Otherwise, the element in shard_list means the data will be sharded in that dimension.\n '''\n\n def __init__(self, shard_list):\n self.is_replica = len(shard_list) == 0\n self.shard_list = shard_list\n self.build_difference_2d_dict()\n\n def __eq__(self, other):\n return str(self) == str(other)\n\n def __repr__(self):\n if self.is_replica:\n return 'R'\n target = 'S'\n for dim in self.shard_list:\n target += str(dim)\n return target\n\n def _convert_str_to_shard_list(self, str_spec):\n '''\n Conver str_spec into shard_list.\n\n Argument:\n str_spec(str): dim spec in str type.\n '''\n\n if str_spec == 'R':\n return []\n if str_spec == 'S0':\n return [0]\n if str_spec == 'S1':\n return [1]\n if str_spec == 'S01':\n return [0, 1]\n\n def build_difference_2d_dict(self):\n '''\n Build a difference maping for 2D device mesh case. It will be used to\n compute the difference between DimSpec pairs.\n '''\n\n source_spec_list = ['R', 'S0', 'S1', 'S01']\n target_spec_list = ['R', 'S0', 'S1', 'S01']\n difference_dict = {}\n for source_spec in source_spec_list:\n for target_spec in target_spec_list:\n legal_sharding_dims = []\n spec_pair = (deepcopy(source_spec), deepcopy(target_spec))\n source_shard_list = self._convert_str_to_shard_list(source_spec)\n target_shard_list = self._convert_str_to_shard_list(target_spec)\n\n # source same as target\n if source_shard_list == target_shard_list:\n difference = 0\n\n # all_gather(source) -> target\n elif len(source_shard_list\n ) == len(target_shard_list) + 1 and source_shard_list[:-1] == target_shard_list:\n difference = ALLGATHER_COST\n\n # shard(source) -> target\n elif len(source_shard_list) == len(\n target_shard_list) - 1 and source_shard_list == target_shard_list[:-1] and target_shard_list[\n -1] not in source_shard_list:\n difference = SHARD_COST\n\n # S1 -> S0 or S0 -> S1\n elif len(source_shard_list) == len(target_shard_list):\n # source -> R -> target\n difference = ALLGATHER_COST + STEP_PENALTY + SHARD_COST\n\n # R -> S01\n elif len(source_shard_list) == len(target_shard_list) - 2:\n difference = SHARD_COST + STEP_PENALTY + SHARD_COST\n\n # S01 -> R\n elif len(source_shard_list) == len(target_shard_list) + 2:\n difference = ALLGATHER_COST + STEP_PENALTY + ALLGATHER_COST\n\n # S1 -> S01\n elif len(source_shard_list) == len(target_shard_list) - 1:\n difference = ALLGATHER_COST + STEP_PENALTY + SHARD_COST + STEP_PENALTY + SHARD_COST\n\n # S01 -> S1\n elif len(source_shard_list) == len(target_shard_list) + 1:\n difference = ALLGATHER_COST + STEP_PENALTY + ALLGATHER_COST + STEP_PENALTY + SHARD_COST\n\n else:\n difference = NAN\n difference_dict[spec_pair] = difference\n\n self.difference_dict = difference_dict\n\n def dim_diff(self, other):\n '''\n The difference between two DimSpec.\n\n Argument:\n other(DimSpec): the dim spec to compare with.\n\n Return:\n difference(int): the difference between two _DimSpec.\n\n Example:\n ```python\n dim_spec = DimSpec([0])\n other_dim_spec = DimSpec([0, 1])\n print(dim_spec.difference(other_dim_spec))\n # output: 5\n ```\n '''\n difference = self.difference_dict[(str(self), str(other))]\n return difference\n\n\nclass ShardingSpec:\n '''\n Sharding spec describes how to shard a tensor with dim_size dimensions. The sharding sequence looks like\n [R, R, S0, S1], which means\n\n Argument:\n dim_size (int): The number of dimensions of the tensor to be sharded.\n dim_partition_dict (Dict[int, List[int]], optional): The key is the dimension of tensor to be sharded,\n and the value of the key describe which logical axis will be sharded in that dimension. Defaults to None.\n E.g. {0: [0, 1]} means the first dimension of the tensor will be sharded in logical axis 0 and 1.\n sharding_sequence (List[DimSpec], optional): A straight view of ShardingSpec looks like [R, R, S0, S1].\n Generally, users should specify either dim_partition_dict or sharding_sequence.\n If both are given, users must ensure that they are consistent with each other. Defaults to None.\n '''\n\n def __init__(self,\n dim_size: int,\n dim_partition_dict: Dict[int, List[int]] = None,\n sharding_sequence: List[DimSpec] = None):\n self.dims = dim_size\n self.dim_partition_dict = dim_partition_dict\n self.sharding_sequence = sharding_sequence\n if self.sharding_sequence is None:\n assert self.dim_partition_dict is not None, f'dim_partition_dict should not be None, if sharding_sequence is NoneType object.'\n self.dim_partition_dict = merge_same_dim_mesh_list(dim_size=self.dims,\n dim_partition_dict=self.dim_partition_dict)\n self.sharding_sequence = self.convert_dict_to_shard_sequence()\n\n elif self.dim_partition_dict is None:\n assert self.sharding_sequence is not None, f'sharding_sequence should not be None, if dim_partition_dict is NoneType object.'\n self.dim_partition_dict = self.convert_shard_sequence_to_dict()\n\n self._sanity_check()\n\n def _sanity_check(self):\n if len(self.sharding_sequence) > self.dims:\n raise ShardingOutOfIndexError(\n f'sharding_sequence should have {self.dims} elements, but got index {len(self.sharding_sequence)}.')\n\n if list(self.dim_partition_dict.keys()) and max(list(self.dim_partition_dict.keys())) >= self.dims:\n raise ShardingOutOfIndexError(\n f'the key of dim_partition_dict should be less than {self.dims}, but got {max(list(self.dim_partition_dict.keys()))}.'\n )\n\n def __repr__(self):\n res_list = [\"ShardingSpec:\"]\n res_list.append(f\"\\n\\tshard_sequence: \" + \",\".join(str(dimspec) for dimspec in self.sharding_sequence))\n return ' '.join(res_list)\n\n def convert_dict_to_shard_sequence(self):\n '''\n Convert dim_partition_dict into list of DimSpec, and assign it to sharding_sequence.\n '''\n sharding_sequence = [DimSpec([])] * self.dims\n for dim, shard_list in self.dim_partition_dict.items():\n sharding_sequence[dim] = DimSpec(shard_list)\n return sharding_sequence\n\n def convert_shard_sequence_to_dict(self):\n '''\n Convert sharding_sequence into dim_partition_dict.\n '''\n new_dim_partition_dict = {}\n for index, dim_spec in enumerate(self.sharding_sequence):\n if not dim_spec.is_replica:\n if index not in new_dim_partition_dict:\n new_dim_partition_dict[index] = []\n new_dim_partition_dict[index].extend(dim_spec.shard_list)\n return new_dim_partition_dict\n\n def spec_diff(self, other):\n '''\n This function is a naive version of difference computation. It just simply accumulates difference every dimension between the\n pair of sharding sequence.\n\n Example:\n ```python\n dim_partition_dict = {0: [0, 1]}\n # DistSpec:\n # shard_sequence: S01,R,R\n # device_mesh_shape: (4, 4)\n sharding_spec = ShardingSpec(device_mesh, entire_shape, dim_partition_dict)\n dim_partition_dict_to_compare = {0: [0], 1: [1]}\n # DistSpec:\n # shard_sequence: S0,S1,R\n # device_mesh_shape: (4, 4)\n sharding_spec_to_compare = ShardingSpec(device_mesh, entire_shape, dim_partition_dict_to_compare)\n print(sharding_spec.sharding_sequence_difference(sharding_spec_to_compare))\n # output: 25\n ```\n Argument:\n other(ShardingSpec): The ShardingSpec to compared with.\n\n Return:\n difference(int): Difference between two ShardingSpec.\n '''\n assert len(self.sharding_sequence) == len(\n other.sharding_sequence), f'Cannot compare difference for two sharding specs with different length.'\n difference = 0\n for orig_dim_spec, other_dim_spec in zip(self.sharding_sequence, other.sharding_sequence):\n difference += orig_dim_spec.dim_diff(other_dim_spec)\n return difference\n", "path": "colossalai/tensor/d_tensor/sharding_spec.py"}]}
| 3,007 | 727 |
gh_patches_debug_3931
|
rasdani/github-patches
|
git_diff
|
google__clusterfuzz-995
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Initialize issue_body_footer when reading issue tracker config
It seems like we forgot to add initialization of `issue_body_footer` in https://github.com/google/clusterfuzz/blob/dfe686cde7429ed0ba482b0b0b4f27106b0a0d5f/src/appengine/libs/issue_management/issue_tracker_policy.py#L114, that's why the issues filed recently do not have the text explaining that OSS-Fuzz team doesn't read bug tracked comments and can be contacted via GitHub only.
Bug example: https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=17216
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/appengine/libs/issue_management/issue_tracker_policy.py`
Content:
```
1 # Copyright 2019 Google LLC
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
13 """Issue tracker policy."""
14
15 from builtins import object
16 from collections import namedtuple
17
18 from config import local_config
19
20 Status = namedtuple('Status',
21 ['assigned', 'duplicate', 'wontfix', 'fixed', 'verified'])
22
23 EXPECTED_STATUSES = [
24 'assigned',
25 'duplicate',
26 'wontfix',
27 'fixed',
28 'verified',
29 'new',
30 ]
31
32
33 class ConfigurationError(Exception):
34 """Base configuration error class."""
35
36
37 class NewIssuePolicy(object):
38 """New issue policy."""
39
40 def __init__(self):
41 self.status = ''
42 self.ccs = []
43 self.labels = []
44 self.issue_body_footer = ''
45
46
47 def _to_str_list(values):
48 """Convert a list to a list of strs."""
49 return [str(value) for value in values]
50
51
52 class IssueTrackerPolicy(object):
53 """Represents an issue tracker policy."""
54
55 def __init__(self, data):
56 self._data = data
57 if 'status' not in self._data:
58 raise ConfigurationError('Status not set in policies.')
59
60 if 'labels' not in self._data:
61 raise ConfigurationError('Labels not set in policies.')
62
63 for status in EXPECTED_STATUSES:
64 if status not in self._data['status']:
65 raise ConfigurationError(
66 'Expected status {} is not set.'.format(status))
67
68 def status(self, status_type):
69 """Get the actual status string for the given type."""
70 return self._data['status'][status_type]
71
72 def label(self, label_type):
73 """Get the actual label string for the given type."""
74 label = self._data['labels'].get(label_type)
75 if label is None:
76 return None
77
78 return str(label)
79
80 def substitution_mapping(self, label):
81 """Get an explicit substitution mapping."""
82 if 'substitutions' not in self._data:
83 return label
84
85 mapped = self._data['substitutions'].get(label)
86 if not mapped:
87 return label
88
89 return str(mapped)
90
91 @property
92 def deadline_policy_message(self):
93 """Get the deadline policy message, if if exists."""
94 return self._data.get('deadline_policy_message')
95
96 def get_new_issue_properties(self, is_security, is_crash):
97 """Get the properties to apply to a new issue."""
98 policy = NewIssuePolicy()
99
100 if 'all' in self._data:
101 self._apply_new_issue_properties(policy, self._data['all'], is_crash)
102
103 if is_security:
104 if 'security' in self._data:
105 self._apply_new_issue_properties(policy, self._data['security'],
106 is_crash)
107 else:
108 if 'non_security' in self._data:
109 self._apply_new_issue_properties(policy, self._data['non_security'],
110 is_crash)
111
112 return policy
113
114 def _apply_new_issue_properties(self, policy, issue_type, is_crash):
115 """Apply issue policies."""
116 if not issue_type:
117 return
118
119 if 'status' in issue_type:
120 policy.status = self._data['status'][issue_type['status']]
121
122 if 'ccs' in issue_type:
123 policy.labels.extend(issue_type['ccs'])
124
125 labels = issue_type.get('labels')
126 if labels:
127 policy.labels.extend(_to_str_list(labels))
128
129 if is_crash:
130 crash_labels = issue_type.get('crash_labels')
131 if crash_labels:
132 policy.labels.extend(_to_str_list(crash_labels))
133 else:
134 non_crash_labels = issue_type.get('non_crash_labels')
135 if non_crash_labels:
136 policy.labels.extend(_to_str_list(non_crash_labels))
137
138 def get_existing_issue_properties(self):
139 """Get the properties to apply to a new issue."""
140 policy = NewIssuePolicy()
141
142 if 'existing' in self._data:
143 self._apply_new_issue_properties(policy, self._data['existing'], False)
144
145 return policy
146
147
148 def get(project_name):
149 """Get policy."""
150 issue_tracker_config = local_config.IssueTrackerConfig()
151 project_config = issue_tracker_config.get(project_name)
152 if not project_config:
153 raise ConfigurationError(
154 'Issue tracker for {} does not exist'.format(project_name))
155
156 if not 'policies' in project_config:
157 raise ConfigurationError(
158 'Policies for {} do not exist'.format(project_name))
159
160 return IssueTrackerPolicy(project_config['policies'])
161
162
163 def get_empty():
164 """Get an empty policy."""
165 return IssueTrackerPolicy({
166 'status': {
167 'assigned': 'unused',
168 'duplicate': 'unused',
169 'wontfix': 'unused',
170 'fixed': 'unused',
171 'verified': 'unused',
172 'new': 'unused',
173 },
174 'labels': {},
175 })
176
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/appengine/libs/issue_management/issue_tracker_policy.py b/src/appengine/libs/issue_management/issue_tracker_policy.py
--- a/src/appengine/libs/issue_management/issue_tracker_policy.py
+++ b/src/appengine/libs/issue_management/issue_tracker_policy.py
@@ -126,6 +126,10 @@
if labels:
policy.labels.extend(_to_str_list(labels))
+ issue_body_footer = issue_type.get('issue_body_footer')
+ if issue_body_footer:
+ policy.issue_body_footer = issue_body_footer
+
if is_crash:
crash_labels = issue_type.get('crash_labels')
if crash_labels:
|
{"golden_diff": "diff --git a/src/appengine/libs/issue_management/issue_tracker_policy.py b/src/appengine/libs/issue_management/issue_tracker_policy.py\n--- a/src/appengine/libs/issue_management/issue_tracker_policy.py\n+++ b/src/appengine/libs/issue_management/issue_tracker_policy.py\n@@ -126,6 +126,10 @@\n if labels:\n policy.labels.extend(_to_str_list(labels))\n \n+ issue_body_footer = issue_type.get('issue_body_footer')\n+ if issue_body_footer:\n+ policy.issue_body_footer = issue_body_footer\n+\n if is_crash:\n crash_labels = issue_type.get('crash_labels')\n if crash_labels:\n", "issue": "Initialize issue_body_footer when reading issue tracker config\nIt seems like we forgot to add initialization of `issue_body_footer` in https://github.com/google/clusterfuzz/blob/dfe686cde7429ed0ba482b0b0b4f27106b0a0d5f/src/appengine/libs/issue_management/issue_tracker_policy.py#L114, that's why the issues filed recently do not have the text explaining that OSS-Fuzz team doesn't read bug tracked comments and can be contacted via GitHub only.\r\n\r\nBug example: https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=17216\n", "before_files": [{"content": "# Copyright 2019 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Issue tracker policy.\"\"\"\n\nfrom builtins import object\nfrom collections import namedtuple\n\nfrom config import local_config\n\nStatus = namedtuple('Status',\n ['assigned', 'duplicate', 'wontfix', 'fixed', 'verified'])\n\nEXPECTED_STATUSES = [\n 'assigned',\n 'duplicate',\n 'wontfix',\n 'fixed',\n 'verified',\n 'new',\n]\n\n\nclass ConfigurationError(Exception):\n \"\"\"Base configuration error class.\"\"\"\n\n\nclass NewIssuePolicy(object):\n \"\"\"New issue policy.\"\"\"\n\n def __init__(self):\n self.status = ''\n self.ccs = []\n self.labels = []\n self.issue_body_footer = ''\n\n\ndef _to_str_list(values):\n \"\"\"Convert a list to a list of strs.\"\"\"\n return [str(value) for value in values]\n\n\nclass IssueTrackerPolicy(object):\n \"\"\"Represents an issue tracker policy.\"\"\"\n\n def __init__(self, data):\n self._data = data\n if 'status' not in self._data:\n raise ConfigurationError('Status not set in policies.')\n\n if 'labels' not in self._data:\n raise ConfigurationError('Labels not set in policies.')\n\n for status in EXPECTED_STATUSES:\n if status not in self._data['status']:\n raise ConfigurationError(\n 'Expected status {} is not set.'.format(status))\n\n def status(self, status_type):\n \"\"\"Get the actual status string for the given type.\"\"\"\n return self._data['status'][status_type]\n\n def label(self, label_type):\n \"\"\"Get the actual label string for the given type.\"\"\"\n label = self._data['labels'].get(label_type)\n if label is None:\n return None\n\n return str(label)\n\n def substitution_mapping(self, label):\n \"\"\"Get an explicit substitution mapping.\"\"\"\n if 'substitutions' not in self._data:\n return label\n\n mapped = self._data['substitutions'].get(label)\n if not mapped:\n return label\n\n return str(mapped)\n\n @property\n def deadline_policy_message(self):\n \"\"\"Get the deadline policy message, if if exists.\"\"\"\n return self._data.get('deadline_policy_message')\n\n def get_new_issue_properties(self, is_security, is_crash):\n \"\"\"Get the properties to apply to a new issue.\"\"\"\n policy = NewIssuePolicy()\n\n if 'all' in self._data:\n self._apply_new_issue_properties(policy, self._data['all'], is_crash)\n\n if is_security:\n if 'security' in self._data:\n self._apply_new_issue_properties(policy, self._data['security'],\n is_crash)\n else:\n if 'non_security' in self._data:\n self._apply_new_issue_properties(policy, self._data['non_security'],\n is_crash)\n\n return policy\n\n def _apply_new_issue_properties(self, policy, issue_type, is_crash):\n \"\"\"Apply issue policies.\"\"\"\n if not issue_type:\n return\n\n if 'status' in issue_type:\n policy.status = self._data['status'][issue_type['status']]\n\n if 'ccs' in issue_type:\n policy.labels.extend(issue_type['ccs'])\n\n labels = issue_type.get('labels')\n if labels:\n policy.labels.extend(_to_str_list(labels))\n\n if is_crash:\n crash_labels = issue_type.get('crash_labels')\n if crash_labels:\n policy.labels.extend(_to_str_list(crash_labels))\n else:\n non_crash_labels = issue_type.get('non_crash_labels')\n if non_crash_labels:\n policy.labels.extend(_to_str_list(non_crash_labels))\n\n def get_existing_issue_properties(self):\n \"\"\"Get the properties to apply to a new issue.\"\"\"\n policy = NewIssuePolicy()\n\n if 'existing' in self._data:\n self._apply_new_issue_properties(policy, self._data['existing'], False)\n\n return policy\n\n\ndef get(project_name):\n \"\"\"Get policy.\"\"\"\n issue_tracker_config = local_config.IssueTrackerConfig()\n project_config = issue_tracker_config.get(project_name)\n if not project_config:\n raise ConfigurationError(\n 'Issue tracker for {} does not exist'.format(project_name))\n\n if not 'policies' in project_config:\n raise ConfigurationError(\n 'Policies for {} do not exist'.format(project_name))\n\n return IssueTrackerPolicy(project_config['policies'])\n\n\ndef get_empty():\n \"\"\"Get an empty policy.\"\"\"\n return IssueTrackerPolicy({\n 'status': {\n 'assigned': 'unused',\n 'duplicate': 'unused',\n 'wontfix': 'unused',\n 'fixed': 'unused',\n 'verified': 'unused',\n 'new': 'unused',\n },\n 'labels': {},\n })\n", "path": "src/appengine/libs/issue_management/issue_tracker_policy.py"}], "after_files": [{"content": "# Copyright 2019 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Issue tracker policy.\"\"\"\n\nfrom builtins import object\nfrom collections import namedtuple\n\nfrom config import local_config\n\nStatus = namedtuple('Status',\n ['assigned', 'duplicate', 'wontfix', 'fixed', 'verified'])\n\nEXPECTED_STATUSES = [\n 'assigned',\n 'duplicate',\n 'wontfix',\n 'fixed',\n 'verified',\n 'new',\n]\n\n\nclass ConfigurationError(Exception):\n \"\"\"Base configuration error class.\"\"\"\n\n\nclass NewIssuePolicy(object):\n \"\"\"New issue policy.\"\"\"\n\n def __init__(self):\n self.status = ''\n self.ccs = []\n self.labels = []\n self.issue_body_footer = ''\n\n\ndef _to_str_list(values):\n \"\"\"Convert a list to a list of strs.\"\"\"\n return [str(value) for value in values]\n\n\nclass IssueTrackerPolicy(object):\n \"\"\"Represents an issue tracker policy.\"\"\"\n\n def __init__(self, data):\n self._data = data\n if 'status' not in self._data:\n raise ConfigurationError('Status not set in policies.')\n\n if 'labels' not in self._data:\n raise ConfigurationError('Labels not set in policies.')\n\n for status in EXPECTED_STATUSES:\n if status not in self._data['status']:\n raise ConfigurationError(\n 'Expected status {} is not set.'.format(status))\n\n def status(self, status_type):\n \"\"\"Get the actual status string for the given type.\"\"\"\n return self._data['status'][status_type]\n\n def label(self, label_type):\n \"\"\"Get the actual label string for the given type.\"\"\"\n label = self._data['labels'].get(label_type)\n if label is None:\n return None\n\n return str(label)\n\n def substitution_mapping(self, label):\n \"\"\"Get an explicit substitution mapping.\"\"\"\n if 'substitutions' not in self._data:\n return label\n\n mapped = self._data['substitutions'].get(label)\n if not mapped:\n return label\n\n return str(mapped)\n\n @property\n def deadline_policy_message(self):\n \"\"\"Get the deadline policy message, if if exists.\"\"\"\n return self._data.get('deadline_policy_message')\n\n def get_new_issue_properties(self, is_security, is_crash):\n \"\"\"Get the properties to apply to a new issue.\"\"\"\n policy = NewIssuePolicy()\n\n if 'all' in self._data:\n self._apply_new_issue_properties(policy, self._data['all'], is_crash)\n\n if is_security:\n if 'security' in self._data:\n self._apply_new_issue_properties(policy, self._data['security'],\n is_crash)\n else:\n if 'non_security' in self._data:\n self._apply_new_issue_properties(policy, self._data['non_security'],\n is_crash)\n\n return policy\n\n def _apply_new_issue_properties(self, policy, issue_type, is_crash):\n \"\"\"Apply issue policies.\"\"\"\n if not issue_type:\n return\n\n if 'status' in issue_type:\n policy.status = self._data['status'][issue_type['status']]\n\n if 'ccs' in issue_type:\n policy.labels.extend(issue_type['ccs'])\n\n labels = issue_type.get('labels')\n if labels:\n policy.labels.extend(_to_str_list(labels))\n\n issue_body_footer = issue_type.get('issue_body_footer')\n if issue_body_footer:\n policy.issue_body_footer = issue_body_footer\n\n if is_crash:\n crash_labels = issue_type.get('crash_labels')\n if crash_labels:\n policy.labels.extend(_to_str_list(crash_labels))\n else:\n non_crash_labels = issue_type.get('non_crash_labels')\n if non_crash_labels:\n policy.labels.extend(_to_str_list(non_crash_labels))\n\n def get_existing_issue_properties(self):\n \"\"\"Get the properties to apply to a new issue.\"\"\"\n policy = NewIssuePolicy()\n\n if 'existing' in self._data:\n self._apply_new_issue_properties(policy, self._data['existing'], False)\n\n return policy\n\n\ndef get(project_name):\n \"\"\"Get policy.\"\"\"\n issue_tracker_config = local_config.IssueTrackerConfig()\n project_config = issue_tracker_config.get(project_name)\n if not project_config:\n raise ConfigurationError(\n 'Issue tracker for {} does not exist'.format(project_name))\n\n if not 'policies' in project_config:\n raise ConfigurationError(\n 'Policies for {} do not exist'.format(project_name))\n\n return IssueTrackerPolicy(project_config['policies'])\n\n\ndef get_empty():\n \"\"\"Get an empty policy.\"\"\"\n return IssueTrackerPolicy({\n 'status': {\n 'assigned': 'unused',\n 'duplicate': 'unused',\n 'wontfix': 'unused',\n 'fixed': 'unused',\n 'verified': 'unused',\n 'new': 'unused',\n },\n 'labels': {},\n })\n", "path": "src/appengine/libs/issue_management/issue_tracker_policy.py"}]}
| 1,989 | 146 |
gh_patches_debug_3903
|
rasdani/github-patches
|
git_diff
|
archlinux__archinstall-1674
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
archinstall won't start due to TypeError
# Error
When I execute `archinstall` I get the following error:
```
...
File "/usr/lib/python3.10/site-packages/archinstall/lib/packages/packages.py", line 115, in installed_package
return LocalPackage(**package_info)
TypeError: LocalPackage.__init__() got an unexpected keyword argument 'warning'
```
# What I tried
I looked at the code of `installed_package` and found it executes `pacman -Q --info {package}`, so I tried to execute that with archlinux-keyring to see what the output was like.
I executed `pacman -Q --info archlinux-keyring` and got the following output
```
warning: config file /etc/pacman.conf, line 19: directive 'SyncFirst' in section 'options' not recognized
Name : archlinux-keyring
Version : 20230225-1
...
```
# Why this seems to be happening
## Code
https://github.com/archlinux/archinstall/blob/8f6cc07062968b259bebd346521ef685c16f89dc/archinstall/lib/packages/packages.py#L105-L115
## Explanation
Because the line `warning: config file /etc/pacman.conf, line 19: directive 'SyncFirst' in section 'options' not recognized` I get as part of the output of `pacman -Q --info {package}` has a colon it is being interpreted as a key value pair.
# Possible fix
Ignore all lines that start with 'warning'
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `archinstall/lib/packages/packages.py`
Content:
```
1 import json
2 import ssl
3 from typing import Dict, Any, Tuple, List
4 from urllib.error import HTTPError
5 from urllib.parse import urlencode
6 from urllib.request import urlopen
7
8 from ..exceptions import PackageError, SysCallError
9 from ..models.dataclasses import PackageSearch, PackageSearchResult, LocalPackage
10 from ..pacman import run_pacman
11
12 BASE_URL_PKG_SEARCH = 'https://archlinux.org/packages/search/json/'
13 # BASE_URL_PKG_CONTENT = 'https://archlinux.org/packages/search/json/'
14 BASE_GROUP_URL = 'https://archlinux.org/groups/search/json/'
15
16
17 def _make_request(url: str, params: Dict) -> Any:
18 ssl_context = ssl.create_default_context()
19 ssl_context.check_hostname = False
20 ssl_context.verify_mode = ssl.CERT_NONE
21
22 encoded = urlencode(params)
23 full_url = f'{url}?{encoded}'
24
25 return urlopen(full_url, context=ssl_context)
26
27
28 def group_search(name :str) -> List[PackageSearchResult]:
29 # TODO UPSTREAM: Implement /json/ for the groups search
30 try:
31 response = _make_request(BASE_GROUP_URL, {'name': name})
32 except HTTPError as err:
33 if err.code == 404:
34 return []
35 else:
36 raise err
37
38 # Just to be sure some code didn't slip through the exception
39 data = response.read().decode('UTF-8')
40
41 return [PackageSearchResult(**package) for package in json.loads(data)['results']]
42
43
44 def package_search(package :str) -> PackageSearch:
45 """
46 Finds a specific package via the package database.
47 It makes a simple web-request, which might be a bit slow.
48 """
49 # TODO UPSTREAM: Implement bulk search, either support name=X&name=Y or split on space (%20 or ' ')
50 # TODO: utilize pacman cache first, upstream second.
51 response = _make_request(BASE_URL_PKG_SEARCH, {'name': package})
52
53 if response.code != 200:
54 raise PackageError(f"Could not locate package: [{response.code}] {response}")
55
56 data = response.read().decode('UTF-8')
57
58 return PackageSearch(**json.loads(data))
59
60
61 def find_package(package :str) -> List[PackageSearchResult]:
62 data = package_search(package)
63 results = []
64
65 for result in data.results:
66 if result.pkgname == package:
67 results.append(result)
68
69 # If we didn't find the package in the search results,
70 # odds are it's a group package
71 if not results:
72 # Check if the package is actually a group
73 for result in group_search(package):
74 results.append(result)
75
76 return results
77
78
79 def find_packages(*names :str) -> Dict[str, Any]:
80 """
81 This function returns the search results for many packages.
82 The function itself is rather slow, so consider not sending to
83 many packages to the search query.
84 """
85 result = {}
86 for package in names:
87 for found_package in find_package(package):
88 result[package] = found_package
89
90 return result
91
92
93 def validate_package_list(packages :list) -> Tuple[list, list]:
94 """
95 Validates a list of given packages.
96 return: Tuple of lists containing valid packavges in the first and invalid
97 packages in the second entry
98 """
99 valid_packages = {package for package in packages if find_package(package)}
100 invalid_packages = set(packages) - valid_packages
101
102 return list(valid_packages), list(invalid_packages)
103
104
105 def installed_package(package :str) -> LocalPackage:
106 package_info = {}
107 try:
108 for line in run_pacman(f"-Q --info {package}"):
109 if b':' in line:
110 key, value = line.decode().split(':', 1)
111 package_info[key.strip().lower().replace(' ', '_')] = value.strip()
112 except SysCallError:
113 pass
114
115 return LocalPackage(**package_info)
116
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/archinstall/lib/packages/packages.py b/archinstall/lib/packages/packages.py
--- a/archinstall/lib/packages/packages.py
+++ b/archinstall/lib/packages/packages.py
@@ -1,3 +1,4 @@
+import dataclasses
import json
import ssl
from typing import Dict, Any, Tuple, List
@@ -112,4 +113,4 @@
except SysCallError:
pass
- return LocalPackage(**package_info)
+ return LocalPackage({field.name: package_info.get(field.name) for field in dataclasses.fields(LocalPackage)})
|
{"golden_diff": "diff --git a/archinstall/lib/packages/packages.py b/archinstall/lib/packages/packages.py\n--- a/archinstall/lib/packages/packages.py\n+++ b/archinstall/lib/packages/packages.py\n@@ -1,3 +1,4 @@\n+import dataclasses\n import json\n import ssl\n from typing import Dict, Any, Tuple, List\n@@ -112,4 +113,4 @@\n \texcept SysCallError:\n \t\tpass\n \n-\treturn LocalPackage(**package_info)\n+\treturn LocalPackage({field.name: package_info.get(field.name) for field in dataclasses.fields(LocalPackage)})\n", "issue": "archinstall won't start due to TypeError\n# Error\r\nWhen I execute `archinstall` I get the following error:\r\n\r\n```\r\n...\r\nFile \"/usr/lib/python3.10/site-packages/archinstall/lib/packages/packages.py\", line 115, in installed_package\r\n return LocalPackage(**package_info)\r\nTypeError: LocalPackage.__init__() got an unexpected keyword argument 'warning'\r\n```\r\n# What I tried\r\nI looked at the code of `installed_package` and found it executes `pacman -Q --info {package}`, so I tried to execute that with archlinux-keyring to see what the output was like.\r\n\r\nI executed `pacman -Q --info archlinux-keyring` and got the following output\r\n```\r\nwarning: config file /etc/pacman.conf, line 19: directive 'SyncFirst' in section 'options' not recognized\r\nName \t: archlinux-keyring\r\nVersion : 20230225-1\r\n...\r\n```\r\n# Why this seems to be happening\r\n## Code\r\nhttps://github.com/archlinux/archinstall/blob/8f6cc07062968b259bebd346521ef685c16f89dc/archinstall/lib/packages/packages.py#L105-L115\r\n## Explanation \r\nBecause the line `warning: config file /etc/pacman.conf, line 19: directive 'SyncFirst' in section 'options' not recognized` I get as part of the output of `pacman -Q --info {package}` has a colon it is being interpreted as a key value pair.\r\n\r\n# Possible fix\r\nIgnore all lines that start with 'warning'\r\n\r\n\n", "before_files": [{"content": "import json\nimport ssl\nfrom typing import Dict, Any, Tuple, List\nfrom urllib.error import HTTPError\nfrom urllib.parse import urlencode\nfrom urllib.request import urlopen\n\nfrom ..exceptions import PackageError, SysCallError\nfrom ..models.dataclasses import PackageSearch, PackageSearchResult, LocalPackage\nfrom ..pacman import run_pacman\n\nBASE_URL_PKG_SEARCH = 'https://archlinux.org/packages/search/json/'\n# BASE_URL_PKG_CONTENT = 'https://archlinux.org/packages/search/json/'\nBASE_GROUP_URL = 'https://archlinux.org/groups/search/json/'\n\n\ndef _make_request(url: str, params: Dict) -> Any:\n\tssl_context = ssl.create_default_context()\n\tssl_context.check_hostname = False\n\tssl_context.verify_mode = ssl.CERT_NONE\n\n\tencoded = urlencode(params)\n\tfull_url = f'{url}?{encoded}'\n\n\treturn urlopen(full_url, context=ssl_context)\n\n\ndef group_search(name :str) -> List[PackageSearchResult]:\n\t# TODO UPSTREAM: Implement /json/ for the groups search\n\ttry:\n\t\tresponse = _make_request(BASE_GROUP_URL, {'name': name})\n\texcept HTTPError as err:\n\t\tif err.code == 404:\n\t\t\treturn []\n\t\telse:\n\t\t\traise err\n\n\t# Just to be sure some code didn't slip through the exception\n\tdata = response.read().decode('UTF-8')\n\n\treturn [PackageSearchResult(**package) for package in json.loads(data)['results']]\n\n\ndef package_search(package :str) -> PackageSearch:\n\t\"\"\"\n\tFinds a specific package via the package database.\n\tIt makes a simple web-request, which might be a bit slow.\n\t\"\"\"\n\t# TODO UPSTREAM: Implement bulk search, either support name=X&name=Y or split on space (%20 or ' ')\n\t# TODO: utilize pacman cache first, upstream second.\n\tresponse = _make_request(BASE_URL_PKG_SEARCH, {'name': package})\n\n\tif response.code != 200:\n\t\traise PackageError(f\"Could not locate package: [{response.code}] {response}\")\n\n\tdata = response.read().decode('UTF-8')\n\n\treturn PackageSearch(**json.loads(data))\n\n\ndef find_package(package :str) -> List[PackageSearchResult]:\n\tdata = package_search(package)\n\tresults = []\n\n\tfor result in data.results:\n\t\tif result.pkgname == package:\n\t\t\tresults.append(result)\n\n\t# If we didn't find the package in the search results,\n\t# odds are it's a group package\n\tif not results:\n\t\t# Check if the package is actually a group\n\t\tfor result in group_search(package):\n\t\t\tresults.append(result)\n\n\treturn results\n\n\ndef find_packages(*names :str) -> Dict[str, Any]:\n\t\"\"\"\n\tThis function returns the search results for many packages.\n\tThe function itself is rather slow, so consider not sending to\n\tmany packages to the search query.\n\t\"\"\"\n\tresult = {}\n\tfor package in names:\n\t\tfor found_package in find_package(package):\n\t\t\tresult[package] = found_package\n\n\treturn result\n\n\ndef validate_package_list(packages :list) -> Tuple[list, list]:\n\t\"\"\"\n\tValidates a list of given packages.\n\treturn: Tuple of lists containing valid packavges in the first and invalid\n\tpackages in the second entry\n\t\"\"\"\n\tvalid_packages = {package for package in packages if find_package(package)}\n\tinvalid_packages = set(packages) - valid_packages\n\n\treturn list(valid_packages), list(invalid_packages)\n\n\ndef installed_package(package :str) -> LocalPackage:\n\tpackage_info = {}\n\ttry:\n\t\tfor line in run_pacman(f\"-Q --info {package}\"):\n\t\t\tif b':' in line:\n\t\t\t\tkey, value = line.decode().split(':', 1)\n\t\t\t\tpackage_info[key.strip().lower().replace(' ', '_')] = value.strip()\n\texcept SysCallError:\n\t\tpass\n\n\treturn LocalPackage(**package_info)\n", "path": "archinstall/lib/packages/packages.py"}], "after_files": [{"content": "import dataclasses\nimport json\nimport ssl\nfrom typing import Dict, Any, Tuple, List\nfrom urllib.error import HTTPError\nfrom urllib.parse import urlencode\nfrom urllib.request import urlopen\n\nfrom ..exceptions import PackageError, SysCallError\nfrom ..models.dataclasses import PackageSearch, PackageSearchResult, LocalPackage\nfrom ..pacman import run_pacman\n\nBASE_URL_PKG_SEARCH = 'https://archlinux.org/packages/search/json/'\n# BASE_URL_PKG_CONTENT = 'https://archlinux.org/packages/search/json/'\nBASE_GROUP_URL = 'https://archlinux.org/groups/search/json/'\n\n\ndef _make_request(url: str, params: Dict) -> Any:\n\tssl_context = ssl.create_default_context()\n\tssl_context.check_hostname = False\n\tssl_context.verify_mode = ssl.CERT_NONE\n\n\tencoded = urlencode(params)\n\tfull_url = f'{url}?{encoded}'\n\n\treturn urlopen(full_url, context=ssl_context)\n\n\ndef group_search(name :str) -> List[PackageSearchResult]:\n\t# TODO UPSTREAM: Implement /json/ for the groups search\n\ttry:\n\t\tresponse = _make_request(BASE_GROUP_URL, {'name': name})\n\texcept HTTPError as err:\n\t\tif err.code == 404:\n\t\t\treturn []\n\t\telse:\n\t\t\traise err\n\n\t# Just to be sure some code didn't slip through the exception\n\tdata = response.read().decode('UTF-8')\n\n\treturn [PackageSearchResult(**package) for package in json.loads(data)['results']]\n\n\ndef package_search(package :str) -> PackageSearch:\n\t\"\"\"\n\tFinds a specific package via the package database.\n\tIt makes a simple web-request, which might be a bit slow.\n\t\"\"\"\n\t# TODO UPSTREAM: Implement bulk search, either support name=X&name=Y or split on space (%20 or ' ')\n\t# TODO: utilize pacman cache first, upstream second.\n\tresponse = _make_request(BASE_URL_PKG_SEARCH, {'name': package})\n\n\tif response.code != 200:\n\t\traise PackageError(f\"Could not locate package: [{response.code}] {response}\")\n\n\tdata = response.read().decode('UTF-8')\n\n\treturn PackageSearch(**json.loads(data))\n\n\ndef find_package(package :str) -> List[PackageSearchResult]:\n\tdata = package_search(package)\n\tresults = []\n\n\tfor result in data.results:\n\t\tif result.pkgname == package:\n\t\t\tresults.append(result)\n\n\t# If we didn't find the package in the search results,\n\t# odds are it's a group package\n\tif not results:\n\t\t# Check if the package is actually a group\n\t\tfor result in group_search(package):\n\t\t\tresults.append(result)\n\n\treturn results\n\n\ndef find_packages(*names :str) -> Dict[str, Any]:\n\t\"\"\"\n\tThis function returns the search results for many packages.\n\tThe function itself is rather slow, so consider not sending to\n\tmany packages to the search query.\n\t\"\"\"\n\tresult = {}\n\tfor package in names:\n\t\tfor found_package in find_package(package):\n\t\t\tresult[package] = found_package\n\n\treturn result\n\n\ndef validate_package_list(packages :list) -> Tuple[list, list]:\n\t\"\"\"\n\tValidates a list of given packages.\n\treturn: Tuple of lists containing valid packavges in the first and invalid\n\tpackages in the second entry\n\t\"\"\"\n\tvalid_packages = {package for package in packages if find_package(package)}\n\tinvalid_packages = set(packages) - valid_packages\n\n\treturn list(valid_packages), list(invalid_packages)\n\n\ndef installed_package(package :str) -> LocalPackage:\n\tpackage_info = {}\n\ttry:\n\t\tfor line in run_pacman(f\"-Q --info {package}\"):\n\t\t\tif b':' in line:\n\t\t\t\tkey, value = line.decode().split(':', 1)\n\t\t\t\tpackage_info[key.strip().lower().replace(' ', '_')] = value.strip()\n\texcept SysCallError:\n\t\tpass\n\n\treturn LocalPackage({field.name: package_info.get(field.name) for field in dataclasses.fields(LocalPackage)})\n", "path": "archinstall/lib/packages/packages.py"}]}
| 1,728 | 122 |
gh_patches_debug_29273
|
rasdani/github-patches
|
git_diff
|
digitalfabrik__integreat-cms-577
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Old permalinks not mapped in single page API endpoint
### Describe the Bug
<!-- A clear and concise description of what the bug is. -->
The permalink is checked for correctness in the single page API endpoint, but in #119 we decided not to keep track of old permalinks. Instead, we will just map permalinks based on the last url parameter (the slug) and ignore the rest.
### Steps to Reproduce
1. Copy one page permalink
2. Move that page to another parent to change the permalink
3. Request this page in the single page API endpoint with the old permalink from step 1
### Expected Behavior
<!-- A clear and concise description of what you expected to happen. -->
The page should be found even if the permalink is old
### Actual Behavior
<!-- A clear and concise description of what actually happened. -->
The error `No Page matches the given url or id` is returned.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/api/v3/single_page.py`
Content:
```
1 """
2 View to return a JSON representation of a single page. The page can
3 be selected via the id or the permalink.
4 """
5 from django.http import JsonResponse, Http404
6 from django.shortcuts import get_object_or_404
7
8 from cms.models import Region, Page
9 from .pages import transform_page
10
11
12 # pylint: disable=unused-argument
13 def single_page(request, region_slug, language_code):
14 """
15 View function returning the desired page as a JSON or a 404 if the
16 requested page does not exist.
17
18 :param request: The request that has been sent to the Django server
19 :type request: ~django.http.HttpRequest
20
21 :param region_slug: Slug defining the region
22 :type region_slug: str
23
24 :param language_code: Code to identify the desired language
25 :type language_code: str
26
27 :raises ~django.http.Http404: HTTP status 404 if the request is malformed or no page with the given id or url exists.
28
29 :return: Return a JSON with the requested page and a HTTP status 200.
30 :rtype: ~django.http.JsonResponse
31 """
32 region = Region.get_current_region(request)
33
34 if request.GET.get("id"):
35 page = get_object_or_404(region.pages, id=request.GET.get("id"))
36 page_translation = page.get_public_translation(language_code)
37 if page_translation:
38 return JsonResponse(transform_page(page_translation), safe=False)
39
40 elif request.GET.get("url"):
41 # Strip leading and trailing slashes to avoid ambiguous urls
42 url = request.GET.get("url").strip("/")
43 # Get potential page candidate by only filtering for the translation slug
44 page = get_object_or_404(
45 Page, region=region, translations__slug=url.split("/")[-1]
46 )
47 # Get most recent public revision of the page
48 page_translation = page.get_public_translation(language_code)
49 # Check if the whole path is correct, not only the slug
50 # TODO: Once we have a permalink mapping of old versions, we also have to check whether the permalink was valid in the past
51 if page_translation.permalink == url:
52 return JsonResponse(transform_page(page_translation), safe=False)
53
54 raise Http404("No Page matches the given url or id.")
55
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/src/api/v3/single_page.py b/src/api/v3/single_page.py
--- a/src/api/v3/single_page.py
+++ b/src/api/v3/single_page.py
@@ -5,7 +5,7 @@
from django.http import JsonResponse, Http404
from django.shortcuts import get_object_or_404
-from cms.models import Region, Page
+from cms.models import Region
from .pages import transform_page
@@ -40,15 +40,17 @@
elif request.GET.get("url"):
# Strip leading and trailing slashes to avoid ambiguous urls
url = request.GET.get("url").strip("/")
- # Get potential page candidate by only filtering for the translation slug
+ # The last path component of the url is the page translation slug
+ page_translation_slug = url.split("/")[-1]
+ # Get page by filtering for translation slug and translation language code
page = get_object_or_404(
- Page, region=region, translations__slug=url.split("/")[-1]
+ region.pages,
+ translations__slug=page_translation_slug,
+ translations__language__code=language_code,
)
# Get most recent public revision of the page
page_translation = page.get_public_translation(language_code)
- # Check if the whole path is correct, not only the slug
- # TODO: Once we have a permalink mapping of old versions, we also have to check whether the permalink was valid in the past
- if page_translation.permalink == url:
+ if page_translation:
return JsonResponse(transform_page(page_translation), safe=False)
raise Http404("No Page matches the given url or id.")
|
{"golden_diff": "diff --git a/src/api/v3/single_page.py b/src/api/v3/single_page.py\n--- a/src/api/v3/single_page.py\n+++ b/src/api/v3/single_page.py\n@@ -5,7 +5,7 @@\n from django.http import JsonResponse, Http404\n from django.shortcuts import get_object_or_404\n \n-from cms.models import Region, Page\n+from cms.models import Region\n from .pages import transform_page\n \n \n@@ -40,15 +40,17 @@\n elif request.GET.get(\"url\"):\n # Strip leading and trailing slashes to avoid ambiguous urls\n url = request.GET.get(\"url\").strip(\"/\")\n- # Get potential page candidate by only filtering for the translation slug\n+ # The last path component of the url is the page translation slug\n+ page_translation_slug = url.split(\"/\")[-1]\n+ # Get page by filtering for translation slug and translation language code\n page = get_object_or_404(\n- Page, region=region, translations__slug=url.split(\"/\")[-1]\n+ region.pages,\n+ translations__slug=page_translation_slug,\n+ translations__language__code=language_code,\n )\n # Get most recent public revision of the page\n page_translation = page.get_public_translation(language_code)\n- # Check if the whole path is correct, not only the slug\n- # TODO: Once we have a permalink mapping of old versions, we also have to check whether the permalink was valid in the past\n- if page_translation.permalink == url:\n+ if page_translation:\n return JsonResponse(transform_page(page_translation), safe=False)\n \n raise Http404(\"No Page matches the given url or id.\")\n", "issue": "Old permalinks not mapped in single page API endpoint\n### Describe the Bug\r\n<!-- A clear and concise description of what the bug is. -->\r\nThe permalink is checked for correctness in the single page API endpoint, but in #119 we decided not to keep track of old permalinks. Instead, we will just map permalinks based on the last url parameter (the slug) and ignore the rest.\r\n\r\n### Steps to Reproduce\r\n\r\n1. Copy one page permalink\r\n2. Move that page to another parent to change the permalink\r\n3. Request this page in the single page API endpoint with the old permalink from step 1\r\n\r\n### Expected Behavior\r\n<!-- A clear and concise description of what you expected to happen. -->\r\nThe page should be found even if the permalink is old\r\n\r\n### Actual Behavior\r\n<!-- A clear and concise description of what actually happened. -->\r\nThe error `No Page matches the given url or id` is returned.\r\n\r\n\r\n\n", "before_files": [{"content": "\"\"\"\nView to return a JSON representation of a single page. The page can\nbe selected via the id or the permalink.\n\"\"\"\nfrom django.http import JsonResponse, Http404\nfrom django.shortcuts import get_object_or_404\n\nfrom cms.models import Region, Page\nfrom .pages import transform_page\n\n\n# pylint: disable=unused-argument\ndef single_page(request, region_slug, language_code):\n \"\"\"\n View function returning the desired page as a JSON or a 404 if the\n requested page does not exist.\n\n :param request: The request that has been sent to the Django server\n :type request: ~django.http.HttpRequest\n\n :param region_slug: Slug defining the region\n :type region_slug: str\n\n :param language_code: Code to identify the desired language\n :type language_code: str\n\n :raises ~django.http.Http404: HTTP status 404 if the request is malformed or no page with the given id or url exists.\n\n :return: Return a JSON with the requested page and a HTTP status 200.\n :rtype: ~django.http.JsonResponse\n \"\"\"\n region = Region.get_current_region(request)\n\n if request.GET.get(\"id\"):\n page = get_object_or_404(region.pages, id=request.GET.get(\"id\"))\n page_translation = page.get_public_translation(language_code)\n if page_translation:\n return JsonResponse(transform_page(page_translation), safe=False)\n\n elif request.GET.get(\"url\"):\n # Strip leading and trailing slashes to avoid ambiguous urls\n url = request.GET.get(\"url\").strip(\"/\")\n # Get potential page candidate by only filtering for the translation slug\n page = get_object_or_404(\n Page, region=region, translations__slug=url.split(\"/\")[-1]\n )\n # Get most recent public revision of the page\n page_translation = page.get_public_translation(language_code)\n # Check if the whole path is correct, not only the slug\n # TODO: Once we have a permalink mapping of old versions, we also have to check whether the permalink was valid in the past\n if page_translation.permalink == url:\n return JsonResponse(transform_page(page_translation), safe=False)\n\n raise Http404(\"No Page matches the given url or id.\")\n", "path": "src/api/v3/single_page.py"}], "after_files": [{"content": "\"\"\"\nView to return a JSON representation of a single page. The page can\nbe selected via the id or the permalink.\n\"\"\"\nfrom django.http import JsonResponse, Http404\nfrom django.shortcuts import get_object_or_404\n\nfrom cms.models import Region\nfrom .pages import transform_page\n\n\n# pylint: disable=unused-argument\ndef single_page(request, region_slug, language_code):\n \"\"\"\n View function returning the desired page as a JSON or a 404 if the\n requested page does not exist.\n\n :param request: The request that has been sent to the Django server\n :type request: ~django.http.HttpRequest\n\n :param region_slug: Slug defining the region\n :type region_slug: str\n\n :param language_code: Code to identify the desired language\n :type language_code: str\n\n :raises ~django.http.Http404: HTTP status 404 if the request is malformed or no page with the given id or url exists.\n\n :return: Return a JSON with the requested page and a HTTP status 200.\n :rtype: ~django.http.JsonResponse\n \"\"\"\n region = Region.get_current_region(request)\n\n if request.GET.get(\"id\"):\n page = get_object_or_404(region.pages, id=request.GET.get(\"id\"))\n page_translation = page.get_public_translation(language_code)\n if page_translation:\n return JsonResponse(transform_page(page_translation), safe=False)\n\n elif request.GET.get(\"url\"):\n # Strip leading and trailing slashes to avoid ambiguous urls\n url = request.GET.get(\"url\").strip(\"/\")\n # The last path component of the url is the page translation slug\n page_translation_slug = url.split(\"/\")[-1]\n # Get page by filtering for translation slug and translation language code\n page = get_object_or_404(\n region.pages,\n translations__slug=page_translation_slug,\n translations__language__code=language_code,\n )\n # Get most recent public revision of the page\n page_translation = page.get_public_translation(language_code)\n if page_translation:\n return JsonResponse(transform_page(page_translation), safe=False)\n\n raise Http404(\"No Page matches the given url or id.\")\n", "path": "src/api/v3/single_page.py"}]}
| 1,050 | 366 |
gh_patches_debug_24049
|
rasdani/github-patches
|
git_diff
|
mozilla__bugbug-140
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Print progress bar while downloading bugs
Similar to what we're doing for commits data: 28b83c12c29185c52afb58d94a533a9448969a8a.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bugbug/bugzilla.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 # This Source Code Form is subject to the terms of the Mozilla Public
3 # License, v. 2.0. If a copy of the MPL was not distributed with this file,
4 # You can obtain one at http://mozilla.org/MPL/2.0/.
5
6 import json
7 import os
8
9 import requests
10 from libmozdata import bugzilla
11
12 from bugbug import db
13
14 BUGS_DB = 'data/bugs.json'
15 db.register(BUGS_DB, 'https://www.dropbox.com/s/xm6wzac9jl81irz/bugs.json.xz?dl=1')
16
17 ATTACHMENT_INCLUDE_FIELDS = [
18 'id', 'is_obsolete', 'flags', 'is_patch', 'creator', 'content_type', 'creation_time',
19 ]
20
21 COMMENT_INCLUDE_FIELDS = [
22 'id', 'text', 'author', 'creation_time',
23 ]
24
25
26 def get_bug_fields():
27 os.makedirs('data', exist_ok=True)
28
29 try:
30 with open('data/bug_fields.json', 'r') as f:
31 return json.load(f)
32 except IOError:
33 pass
34
35 r = requests.get('https://bugzilla.mozilla.org/rest/field/bug')
36 r.raise_for_status()
37 return r.json()['fields']
38
39
40 def get_bugs():
41 return db.read(BUGS_DB)
42
43
44 def set_token(token):
45 bugzilla.Bugzilla.TOKEN = token
46
47
48 def _download(ids_or_query):
49 new_bugs = {}
50
51 def bughandler(bug):
52 bug_id = int(bug['id'])
53
54 if bug_id not in new_bugs:
55 new_bugs[bug_id] = dict()
56
57 new_bugs[bug_id].update(bug)
58
59 def commenthandler(bug, bug_id):
60 bug_id = int(bug_id)
61
62 if bug_id not in new_bugs:
63 new_bugs[bug_id] = dict()
64
65 new_bugs[bug_id]['comments'] = bug['comments']
66
67 def attachmenthandler(bug, bug_id):
68 bug_id = int(bug_id)
69
70 if bug_id not in new_bugs:
71 new_bugs[bug_id] = dict()
72
73 new_bugs[bug_id]['attachments'] = bug
74
75 def historyhandler(bug):
76 bug_id = int(bug['id'])
77
78 if bug_id not in new_bugs:
79 new_bugs[bug_id] = dict()
80
81 new_bugs[bug_id]['history'] = bug['history']
82
83 bugzilla.Bugzilla(ids_or_query, bughandler=bughandler, commenthandler=commenthandler, comment_include_fields=COMMENT_INCLUDE_FIELDS, attachmenthandler=attachmenthandler, attachment_include_fields=ATTACHMENT_INCLUDE_FIELDS, historyhandler=historyhandler).get_data().wait()
84
85 return new_bugs
86
87
88 def download_bugs_between(date_from, date_to, security=False):
89 products = set([
90 'Add-on SDK',
91 'Android Background Services',
92 'Core',
93 'DevTools',
94 'External Software Affecting Firefox',
95 'Firefox',
96 'Firefox for Android',
97 # 'Firefox for iOS',
98 'Firefox Graveyard',
99 'Firefox Health Report',
100 # 'Focus',
101 # 'Hello (Loop)',
102 'NSPR',
103 'NSS',
104 'Toolkit',
105 'WebExtensions',
106 ])
107
108 r = requests.get(f'https://bugzilla.mozilla.org/rest/bug?include_fields=id&f1=creation_ts&o1=greaterthan&v1={date_from.strftime("%Y-%m-%d")}&limit=1&order=bug_id')
109 r.raise_for_status()
110 first_id = r.json()['bugs'][0]['id']
111
112 r = requests.get(f'https://bugzilla.mozilla.org/rest/bug?include_fields=id&f1=creation_ts&o1=lessthan&v1={date_to.strftime("%Y-%m-%d")}&limit=1&order=bug_id%20desc')
113 r.raise_for_status()
114 last_id = r.json()['bugs'][0]['id']
115
116 assert first_id < last_id
117
118 all_ids = range(first_id, last_id + 1)
119
120 download_bugs(all_ids, security=security, products=products)
121
122 return all_ids
123
124
125 def download_bugs(bug_ids, products=None, security=False):
126 old_bug_count = 0
127 old_bugs = []
128 new_bug_ids = set(int(bug_id) for bug_id in bug_ids)
129 for bug in get_bugs():
130 old_bug_count += 1
131 if int(bug['id']) in new_bug_ids:
132 old_bugs.append(bug)
133 new_bug_ids.remove(bug['id'])
134
135 print(f'Loaded {old_bug_count} bugs.')
136
137 new_bug_ids = sorted(list(new_bug_ids))
138
139 total_downloaded = 0
140 chunks = (new_bug_ids[i:(i + 500)] for i in range(0, len(new_bug_ids), 500))
141 for chunk in chunks:
142 new_bugs = _download(chunk)
143
144 total_downloaded += len(new_bugs)
145
146 print(f'Downloaded {total_downloaded} out of {len(new_bug_ids)} bugs')
147
148 if not security:
149 new_bugs = {bug_id: bug for bug_id, bug in new_bugs.items() if len(bug['groups']) == 0}
150
151 if products is not None:
152 new_bugs = {bug_id: bug for bug_id, bug in new_bugs.items() if bug['product'] in products}
153
154 db.append(BUGS_DB, new_bugs.values())
155
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/bugbug/bugzilla.py b/bugbug/bugzilla.py
--- a/bugbug/bugzilla.py
+++ b/bugbug/bugzilla.py
@@ -8,6 +8,7 @@
import requests
from libmozdata import bugzilla
+from tqdm import tqdm
from bugbug import db
@@ -136,19 +137,17 @@
new_bug_ids = sorted(list(new_bug_ids))
- total_downloaded = 0
chunks = (new_bug_ids[i:(i + 500)] for i in range(0, len(new_bug_ids), 500))
- for chunk in chunks:
- new_bugs = _download(chunk)
+ with tqdm(total=len(new_bug_ids)) as progress_bar:
+ for chunk in chunks:
+ new_bugs = _download(chunk)
- total_downloaded += len(new_bugs)
+ progress_bar.update(len(chunk))
- print(f'Downloaded {total_downloaded} out of {len(new_bug_ids)} bugs')
+ if not security:
+ new_bugs = {bug_id: bug for bug_id, bug in new_bugs.items() if len(bug['groups']) == 0}
- if not security:
- new_bugs = {bug_id: bug for bug_id, bug in new_bugs.items() if len(bug['groups']) == 0}
+ if products is not None:
+ new_bugs = {bug_id: bug for bug_id, bug in new_bugs.items() if bug['product'] in products}
- if products is not None:
- new_bugs = {bug_id: bug for bug_id, bug in new_bugs.items() if bug['product'] in products}
-
- db.append(BUGS_DB, new_bugs.values())
+ db.append(BUGS_DB, new_bugs.values())
|
{"golden_diff": "diff --git a/bugbug/bugzilla.py b/bugbug/bugzilla.py\n--- a/bugbug/bugzilla.py\n+++ b/bugbug/bugzilla.py\n@@ -8,6 +8,7 @@\n \n import requests\n from libmozdata import bugzilla\n+from tqdm import tqdm\n \n from bugbug import db\n \n@@ -136,19 +137,17 @@\n \n new_bug_ids = sorted(list(new_bug_ids))\n \n- total_downloaded = 0\n chunks = (new_bug_ids[i:(i + 500)] for i in range(0, len(new_bug_ids), 500))\n- for chunk in chunks:\n- new_bugs = _download(chunk)\n+ with tqdm(total=len(new_bug_ids)) as progress_bar:\n+ for chunk in chunks:\n+ new_bugs = _download(chunk)\n \n- total_downloaded += len(new_bugs)\n+ progress_bar.update(len(chunk))\n \n- print(f'Downloaded {total_downloaded} out of {len(new_bug_ids)} bugs')\n+ if not security:\n+ new_bugs = {bug_id: bug for bug_id, bug in new_bugs.items() if len(bug['groups']) == 0}\n \n- if not security:\n- new_bugs = {bug_id: bug for bug_id, bug in new_bugs.items() if len(bug['groups']) == 0}\n+ if products is not None:\n+ new_bugs = {bug_id: bug for bug_id, bug in new_bugs.items() if bug['product'] in products}\n \n- if products is not None:\n- new_bugs = {bug_id: bug for bug_id, bug in new_bugs.items() if bug['product'] in products}\n-\n- db.append(BUGS_DB, new_bugs.values())\n+ db.append(BUGS_DB, new_bugs.values())\n", "issue": "Print progress bar while downloading bugs\nSimilar to what we're doing for commits data: 28b83c12c29185c52afb58d94a533a9448969a8a.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this file,\n# You can obtain one at http://mozilla.org/MPL/2.0/.\n\nimport json\nimport os\n\nimport requests\nfrom libmozdata import bugzilla\n\nfrom bugbug import db\n\nBUGS_DB = 'data/bugs.json'\ndb.register(BUGS_DB, 'https://www.dropbox.com/s/xm6wzac9jl81irz/bugs.json.xz?dl=1')\n\nATTACHMENT_INCLUDE_FIELDS = [\n 'id', 'is_obsolete', 'flags', 'is_patch', 'creator', 'content_type', 'creation_time',\n]\n\nCOMMENT_INCLUDE_FIELDS = [\n 'id', 'text', 'author', 'creation_time',\n]\n\n\ndef get_bug_fields():\n os.makedirs('data', exist_ok=True)\n\n try:\n with open('data/bug_fields.json', 'r') as f:\n return json.load(f)\n except IOError:\n pass\n\n r = requests.get('https://bugzilla.mozilla.org/rest/field/bug')\n r.raise_for_status()\n return r.json()['fields']\n\n\ndef get_bugs():\n return db.read(BUGS_DB)\n\n\ndef set_token(token):\n bugzilla.Bugzilla.TOKEN = token\n\n\ndef _download(ids_or_query):\n new_bugs = {}\n\n def bughandler(bug):\n bug_id = int(bug['id'])\n\n if bug_id not in new_bugs:\n new_bugs[bug_id] = dict()\n\n new_bugs[bug_id].update(bug)\n\n def commenthandler(bug, bug_id):\n bug_id = int(bug_id)\n\n if bug_id not in new_bugs:\n new_bugs[bug_id] = dict()\n\n new_bugs[bug_id]['comments'] = bug['comments']\n\n def attachmenthandler(bug, bug_id):\n bug_id = int(bug_id)\n\n if bug_id not in new_bugs:\n new_bugs[bug_id] = dict()\n\n new_bugs[bug_id]['attachments'] = bug\n\n def historyhandler(bug):\n bug_id = int(bug['id'])\n\n if bug_id not in new_bugs:\n new_bugs[bug_id] = dict()\n\n new_bugs[bug_id]['history'] = bug['history']\n\n bugzilla.Bugzilla(ids_or_query, bughandler=bughandler, commenthandler=commenthandler, comment_include_fields=COMMENT_INCLUDE_FIELDS, attachmenthandler=attachmenthandler, attachment_include_fields=ATTACHMENT_INCLUDE_FIELDS, historyhandler=historyhandler).get_data().wait()\n\n return new_bugs\n\n\ndef download_bugs_between(date_from, date_to, security=False):\n products = set([\n 'Add-on SDK',\n 'Android Background Services',\n 'Core',\n 'DevTools',\n 'External Software Affecting Firefox',\n 'Firefox',\n 'Firefox for Android',\n # 'Firefox for iOS',\n 'Firefox Graveyard',\n 'Firefox Health Report',\n # 'Focus',\n # 'Hello (Loop)',\n 'NSPR',\n 'NSS',\n 'Toolkit',\n 'WebExtensions',\n ])\n\n r = requests.get(f'https://bugzilla.mozilla.org/rest/bug?include_fields=id&f1=creation_ts&o1=greaterthan&v1={date_from.strftime(\"%Y-%m-%d\")}&limit=1&order=bug_id')\n r.raise_for_status()\n first_id = r.json()['bugs'][0]['id']\n\n r = requests.get(f'https://bugzilla.mozilla.org/rest/bug?include_fields=id&f1=creation_ts&o1=lessthan&v1={date_to.strftime(\"%Y-%m-%d\")}&limit=1&order=bug_id%20desc')\n r.raise_for_status()\n last_id = r.json()['bugs'][0]['id']\n\n assert first_id < last_id\n\n all_ids = range(first_id, last_id + 1)\n\n download_bugs(all_ids, security=security, products=products)\n\n return all_ids\n\n\ndef download_bugs(bug_ids, products=None, security=False):\n old_bug_count = 0\n old_bugs = []\n new_bug_ids = set(int(bug_id) for bug_id in bug_ids)\n for bug in get_bugs():\n old_bug_count += 1\n if int(bug['id']) in new_bug_ids:\n old_bugs.append(bug)\n new_bug_ids.remove(bug['id'])\n\n print(f'Loaded {old_bug_count} bugs.')\n\n new_bug_ids = sorted(list(new_bug_ids))\n\n total_downloaded = 0\n chunks = (new_bug_ids[i:(i + 500)] for i in range(0, len(new_bug_ids), 500))\n for chunk in chunks:\n new_bugs = _download(chunk)\n\n total_downloaded += len(new_bugs)\n\n print(f'Downloaded {total_downloaded} out of {len(new_bug_ids)} bugs')\n\n if not security:\n new_bugs = {bug_id: bug for bug_id, bug in new_bugs.items() if len(bug['groups']) == 0}\n\n if products is not None:\n new_bugs = {bug_id: bug for bug_id, bug in new_bugs.items() if bug['product'] in products}\n\n db.append(BUGS_DB, new_bugs.values())\n", "path": "bugbug/bugzilla.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this file,\n# You can obtain one at http://mozilla.org/MPL/2.0/.\n\nimport json\nimport os\n\nimport requests\nfrom libmozdata import bugzilla\nfrom tqdm import tqdm\n\nfrom bugbug import db\n\nBUGS_DB = 'data/bugs.json'\ndb.register(BUGS_DB, 'https://www.dropbox.com/s/xm6wzac9jl81irz/bugs.json.xz?dl=1')\n\nATTACHMENT_INCLUDE_FIELDS = [\n 'id', 'is_obsolete', 'flags', 'is_patch', 'creator', 'content_type', 'creation_time',\n]\n\nCOMMENT_INCLUDE_FIELDS = [\n 'id', 'text', 'author', 'creation_time',\n]\n\n\ndef get_bug_fields():\n os.makedirs('data', exist_ok=True)\n\n try:\n with open('data/bug_fields.json', 'r') as f:\n return json.load(f)\n except IOError:\n pass\n\n r = requests.get('https://bugzilla.mozilla.org/rest/field/bug')\n r.raise_for_status()\n return r.json()['fields']\n\n\ndef get_bugs():\n return db.read(BUGS_DB)\n\n\ndef set_token(token):\n bugzilla.Bugzilla.TOKEN = token\n\n\ndef _download(ids_or_query):\n new_bugs = {}\n\n def bughandler(bug):\n bug_id = int(bug['id'])\n\n if bug_id not in new_bugs:\n new_bugs[bug_id] = dict()\n\n new_bugs[bug_id].update(bug)\n\n def commenthandler(bug, bug_id):\n bug_id = int(bug_id)\n\n if bug_id not in new_bugs:\n new_bugs[bug_id] = dict()\n\n new_bugs[bug_id]['comments'] = bug['comments']\n\n def attachmenthandler(bug, bug_id):\n bug_id = int(bug_id)\n\n if bug_id not in new_bugs:\n new_bugs[bug_id] = dict()\n\n new_bugs[bug_id]['attachments'] = bug\n\n def historyhandler(bug):\n bug_id = int(bug['id'])\n\n if bug_id not in new_bugs:\n new_bugs[bug_id] = dict()\n\n new_bugs[bug_id]['history'] = bug['history']\n\n bugzilla.Bugzilla(ids_or_query, bughandler=bughandler, commenthandler=commenthandler, comment_include_fields=COMMENT_INCLUDE_FIELDS, attachmenthandler=attachmenthandler, attachment_include_fields=ATTACHMENT_INCLUDE_FIELDS, historyhandler=historyhandler).get_data().wait()\n\n return new_bugs\n\n\ndef download_bugs_between(date_from, date_to, security=False):\n products = set([\n 'Add-on SDK',\n 'Android Background Services',\n 'Core',\n 'DevTools',\n 'External Software Affecting Firefox',\n 'Firefox',\n 'Firefox for Android',\n # 'Firefox for iOS',\n 'Firefox Graveyard',\n 'Firefox Health Report',\n # 'Focus',\n # 'Hello (Loop)',\n 'NSPR',\n 'NSS',\n 'Toolkit',\n 'WebExtensions',\n ])\n\n r = requests.get(f'https://bugzilla.mozilla.org/rest/bug?include_fields=id&f1=creation_ts&o1=greaterthan&v1={date_from.strftime(\"%Y-%m-%d\")}&limit=1&order=bug_id')\n r.raise_for_status()\n first_id = r.json()['bugs'][0]['id']\n\n r = requests.get(f'https://bugzilla.mozilla.org/rest/bug?include_fields=id&f1=creation_ts&o1=lessthan&v1={date_to.strftime(\"%Y-%m-%d\")}&limit=1&order=bug_id%20desc')\n r.raise_for_status()\n last_id = r.json()['bugs'][0]['id']\n\n assert first_id < last_id\n\n all_ids = range(first_id, last_id + 1)\n\n download_bugs(all_ids, security=security, products=products)\n\n return all_ids\n\n\ndef download_bugs(bug_ids, products=None, security=False):\n old_bug_count = 0\n old_bugs = []\n new_bug_ids = set(int(bug_id) for bug_id in bug_ids)\n for bug in get_bugs():\n old_bug_count += 1\n if int(bug['id']) in new_bug_ids:\n old_bugs.append(bug)\n new_bug_ids.remove(bug['id'])\n\n print(f'Loaded {old_bug_count} bugs.')\n\n new_bug_ids = sorted(list(new_bug_ids))\n\n chunks = (new_bug_ids[i:(i + 500)] for i in range(0, len(new_bug_ids), 500))\n with tqdm(total=len(new_bug_ids)) as progress_bar:\n for chunk in chunks:\n new_bugs = _download(chunk)\n\n progress_bar.update(len(chunk))\n\n if not security:\n new_bugs = {bug_id: bug for bug_id, bug in new_bugs.items() if len(bug['groups']) == 0}\n\n if products is not None:\n new_bugs = {bug_id: bug for bug_id, bug in new_bugs.items() if bug['product'] in products}\n\n db.append(BUGS_DB, new_bugs.values())\n", "path": "bugbug/bugzilla.py"}]}
| 1,897 | 421 |
gh_patches_debug_40073
|
rasdani/github-patches
|
git_diff
|
hpcaitech__ColossalAI-3162
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[tensor] fix some unittests
[tensor] fix some unittests
[tensor] fix some unittests
[BUG]: chatgpt ppo training hangs when using gemini
### 🐛 Describe the bug
## Problem
Running `ChatGPT/examples/train_prompts.py`, I found sometimes the training hangs when using Gemini.
This occurs when randomly while changing batch size.
## Possible reason
I found the [padding policy](https://huggingface.co/docs/transformers/main_classes/tokenizer#transformers.PreTrainedTokenizer.__call__.padding) is to pad to the longest sequence in the batch.
In DDP scheme, different process may have different input lengths due to random sampling. That is to say, they may have different generation steps.
When using Gemini, which need communication during forward, different forward steps leads to different number of communication calls. And this asymmetric communication leads to hang.
## Possible solution
Change padding policy to `'max_length'`, see [huggingface tokenizer doc](https://huggingface.co/docs/transformers/main_classes/tokenizer#transformers.PreTrainedTokenizer.__call__.padding) for more details.
In addition, when enabling early stopping, we should also consider DDP and ensure the number of generation steps of each i process is the same.
### Environment
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `applications/ChatGPT/chatgpt/models/generation.py`
Content:
```
1 from typing import Any, Callable, Optional
2
3 import torch
4 import torch.nn as nn
5
6 try:
7 from transformers.generation_logits_process import (
8 LogitsProcessorList,
9 TemperatureLogitsWarper,
10 TopKLogitsWarper,
11 TopPLogitsWarper,
12 )
13 except ImportError:
14 from transformers.generation import LogitsProcessorList, TemperatureLogitsWarper, TopKLogitsWarper, TopPLogitsWarper
15
16
17 def prepare_logits_processor(top_k: Optional[int] = None,
18 top_p: Optional[float] = None,
19 temperature: Optional[float] = None) -> LogitsProcessorList:
20 processor_list = LogitsProcessorList()
21 if temperature is not None and temperature != 1.0:
22 processor_list.append(TemperatureLogitsWarper(temperature))
23 if top_k is not None and top_k != 0:
24 processor_list.append(TopKLogitsWarper(top_k))
25 if top_p is not None and top_p < 1.0:
26 processor_list.append(TopPLogitsWarper(top_p))
27 return processor_list
28
29
30 def sample(model: nn.Module,
31 input_ids: torch.Tensor,
32 max_length: int,
33 early_stopping: bool = False,
34 eos_token_id: Optional[int] = None,
35 pad_token_id: Optional[int] = None,
36 top_k: Optional[int] = None,
37 top_p: Optional[float] = None,
38 temperature: Optional[float] = None,
39 prepare_inputs_fn: Optional[Callable[[torch.Tensor, Any], dict]] = None,
40 update_model_kwargs_fn: Optional[Callable[[dict, Any], dict]] = None,
41 **model_kwargs) -> torch.Tensor:
42 if input_ids.size(1) >= max_length:
43 return input_ids
44
45 logits_processor = prepare_logits_processor(top_k, top_p, temperature)
46 unfinished_sequences = input_ids.new(input_ids.shape[0]).fill_(1)
47
48 for _ in range(input_ids.size(1), max_length):
49 model_inputs = prepare_inputs_fn(input_ids, **model_kwargs) if prepare_inputs_fn is not None else {
50 'input_ids': input_ids
51 }
52 outputs = model(**model_inputs)
53
54 next_token_logits = outputs['logits'][:, -1, :]
55 # pre-process distribution
56 next_token_logits = logits_processor(input_ids, next_token_logits)
57 # sample
58 probs = torch.softmax(next_token_logits, dim=-1, dtype=torch.float)
59 next_tokens = torch.multinomial(probs, num_samples=1).squeeze(1)
60
61 # finished sentences should have their next token be a padding token
62 if eos_token_id is not None:
63 if pad_token_id is None:
64 raise ValueError("If `eos_token_id` is defined, make sure that `pad_token_id` is defined.")
65 next_tokens = next_tokens * unfinished_sequences + pad_token_id * (1 - unfinished_sequences)
66
67 # update generated ids, model inputs for next step
68 input_ids = torch.cat([input_ids, next_tokens[:, None]], dim=-1)
69 if update_model_kwargs_fn is not None:
70 model_kwargs = update_model_kwargs_fn(outputs, **model_kwargs)
71
72 # if eos_token was found in one sentence, set sentence to finished
73 if eos_token_id is not None:
74 unfinished_sequences = unfinished_sequences.mul((next_tokens != eos_token_id).long())
75
76 # stop when each sentence is finished if early_stopping=True
77 if early_stopping and unfinished_sequences.max() == 0:
78 break
79
80 return input_ids
81
82
83 def generate(model: nn.Module,
84 input_ids: torch.Tensor,
85 max_length: int,
86 num_beams: int = 1,
87 do_sample: bool = True,
88 early_stopping: bool = False,
89 eos_token_id: Optional[int] = None,
90 pad_token_id: Optional[int] = None,
91 top_k: Optional[int] = None,
92 top_p: Optional[float] = None,
93 temperature: Optional[float] = None,
94 prepare_inputs_fn: Optional[Callable[[torch.Tensor, Any], dict]] = None,
95 update_model_kwargs_fn: Optional[Callable[[dict, Any], dict]] = None,
96 **model_kwargs) -> torch.Tensor:
97 """Generate token sequence. The returned sequence is input_ids + generated_tokens.
98
99 Args:
100 model (nn.Module): model
101 input_ids (torch.Tensor): input sequence
102 max_length (int): max length of the returned sequence
103 num_beams (int, optional): number of beams. Defaults to 1.
104 do_sample (bool, optional): whether to do sample. Defaults to True.
105 early_stopping (bool, optional): if True, the sequence length may be smaller than max_length due to finding eos. Defaults to False.
106 eos_token_id (Optional[int], optional): end of sequence token id. Defaults to None.
107 pad_token_id (Optional[int], optional): pad token id. Defaults to None.
108 top_k (Optional[int], optional): the number of highest probability vocabulary tokens to keep for top-k-filtering. Defaults to None.
109 top_p (Optional[float], optional): If set to float < 1, only the smallest set of most probable tokens with probabilities that add up to top_p or higher are kept for generation. Defaults to None.
110 temperature (Optional[float], optional): The value used to module the next token probabilities. Defaults to None.
111 prepare_inputs_fn (Optional[Callable[[torch.Tensor, Any], dict]], optional): Function to preprocess model inputs. Arguments of this function should be input_ids and model_kwargs. Defaults to None.
112 update_model_kwargs_fn (Optional[Callable[[dict, Any], dict]], optional): Function to update model_kwargs based on outputs. Arguments of this function should be outputs and model_kwargs. Defaults to None.
113 """
114 is_greedy_gen_mode = ((num_beams == 1) and do_sample is False)
115 is_sample_gen_mode = ((num_beams == 1) and do_sample is True)
116 is_beam_gen_mode = ((num_beams > 1) and do_sample is False)
117 if is_greedy_gen_mode:
118 # run greedy search
119 raise NotImplementedError
120 elif is_sample_gen_mode:
121 # run sample
122 return sample(model,
123 input_ids,
124 max_length,
125 early_stopping=early_stopping,
126 eos_token_id=eos_token_id,
127 pad_token_id=pad_token_id,
128 top_k=top_k,
129 top_p=top_p,
130 temperature=temperature,
131 prepare_inputs_fn=prepare_inputs_fn,
132 update_model_kwargs_fn=update_model_kwargs_fn,
133 **model_kwargs)
134 elif is_beam_gen_mode:
135 raise NotImplementedError
136 else:
137 raise ValueError("Unsupported generation mode")
138
```
Path: `applications/ChatGPT/examples/train_prompts.py`
Content:
```
1 import argparse
2 from copy import deepcopy
3
4 import pandas as pd
5 import torch
6 from chatgpt.models.base import RewardModel
7 from chatgpt.models.bloom import BLOOMActor, BLOOMCritic
8 from chatgpt.models.gpt import GPTActor, GPTCritic
9 from chatgpt.models.opt import OPTActor, OPTCritic
10 from chatgpt.trainer import PPOTrainer
11 from chatgpt.trainer.strategies import ColossalAIStrategy, DDPStrategy, NaiveStrategy
12 from torch.optim import Adam
13 from transformers import AutoTokenizer, BloomTokenizerFast
14 from transformers.models.gpt2.tokenization_gpt2 import GPT2Tokenizer
15
16 from colossalai.nn.optimizer import HybridAdam
17
18
19 def main(args):
20 # configure strategy
21 if args.strategy == 'naive':
22 strategy = NaiveStrategy()
23 elif args.strategy == 'ddp':
24 strategy = DDPStrategy()
25 elif args.strategy == 'colossalai_gemini':
26 strategy = ColossalAIStrategy(stage=3, placement_policy='cuda', initial_scale=2**5)
27 elif args.strategy == 'colossalai_zero2':
28 strategy = ColossalAIStrategy(stage=2, placement_policy='cuda')
29 else:
30 raise ValueError(f'Unsupported strategy "{args.strategy}"')
31
32 # configure model
33 with strategy.model_init_context():
34 if args.model == 'gpt2':
35 actor = GPTActor(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())
36 critic = GPTCritic(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())
37 elif args.model == 'bloom':
38 actor = BLOOMActor(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())
39 critic = BLOOMCritic(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())
40 elif args.model == 'opt':
41 actor = OPTActor(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())
42 critic = OPTCritic(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())
43 else:
44 raise ValueError(f'Unsupported model "{args.model}"')
45
46 initial_model = deepcopy(actor)
47 reward_model = RewardModel(deepcopy(critic.model), deepcopy(critic.value_head)).to(torch.cuda.current_device())
48
49
50 # configure optimizer
51 if args.strategy.startswith('colossalai'):
52 actor_optim = HybridAdam(actor.parameters(), lr=5e-6)
53 critic_optim = HybridAdam(critic.parameters(), lr=5e-6)
54 else:
55 actor_optim = Adam(actor.parameters(), lr=5e-6)
56 critic_optim = Adam(critic.parameters(), lr=5e-6)
57
58 # configure tokenizer
59 if args.model == 'gpt2':
60 tokenizer = GPT2Tokenizer.from_pretrained('gpt2')
61 tokenizer.pad_token = tokenizer.eos_token
62 elif args.model == 'bloom':
63 tokenizer = BloomTokenizerFast.from_pretrained(args.pretrain)
64 tokenizer.pad_token = tokenizer.eos_token
65 elif args.model == 'opt':
66 tokenizer = AutoTokenizer.from_pretrained("facebook/opt-350m")
67 else:
68 raise ValueError(f'Unsupported model "{args.model}"')
69
70 dataset = pd.read_csv(args.prompt_path)['prompt']
71
72 def tokenize_fn(texts):
73 batch = tokenizer(texts, return_tensors='pt', max_length=96, padding=True, truncation=True)
74 return {k: v.cuda() for k, v in batch.items()}
75
76 (actor, actor_optim), (critic, critic_optim), reward_model, initial_model = strategy.prepare(
77 (actor, actor_optim), (critic, critic_optim), reward_model, initial_model)
78
79 # configure trainer
80 trainer = PPOTrainer(
81 strategy,
82 actor,
83 critic,
84 reward_model,
85 initial_model,
86 actor_optim,
87 critic_optim,
88 max_epochs=args.max_epochs,
89 train_batch_size=args.train_batch_size,
90 experience_batch_size=args.experience_batch_size,
91 tokenizer=tokenize_fn,
92 max_length=128,
93 do_sample=True,
94 temperature=1.0,
95 top_k=50,
96 pad_token_id=tokenizer.pad_token_id,
97 eos_token_id=tokenizer.eos_token_id,
98 )
99
100 trainer.fit(dataset,
101 num_episodes=args.num_episodes,
102 max_timesteps=args.max_timesteps,
103 update_timesteps=args.update_timesteps)
104 # save model checkpoint after fitting
105 strategy.save_model(actor, args.save_path, only_rank0=True)
106 # save optimizer checkpoint on all ranks
107 if args.need_optim_ckpt:
108 strategy.save_optimizer(actor_optim,
109 'actor_optim_checkpoint_prompts_%d.pt' % (torch.cuda.current_device()),
110 only_rank0=False)
111
112
113 if __name__ == '__main__':
114 parser = argparse.ArgumentParser()
115 parser.add_argument('prompt_path')
116 parser.add_argument('--strategy',
117 choices=['naive', 'ddp', 'colossalai_gemini', 'colossalai_zero2'],
118 default='naive')
119 parser.add_argument('--model', default='gpt2', choices=['gpt2', 'bloom', 'opt'])
120 parser.add_argument('--pretrain', type=str, default=None)
121 parser.add_argument('--save_path', type=str, default='actor_checkpoint_prompts.pt')
122 parser.add_argument('--need_optim_ckpt', type=bool, default=False)
123 parser.add_argument('--num_episodes', type=int, default=10)
124 parser.add_argument('--max_timesteps', type=int, default=10)
125 parser.add_argument('--update_timesteps', type=int, default=10)
126 parser.add_argument('--max_epochs', type=int, default=5)
127 parser.add_argument('--train_batch_size', type=int, default=8)
128 parser.add_argument('--experience_batch_size', type=int, default=8)
129 parser.add_argument('--lora_rank', type=int, default=0, help="low-rank adaptation matrices rank")
130 args = parser.parse_args()
131 main(args)
132
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/applications/ChatGPT/chatgpt/models/generation.py b/applications/ChatGPT/chatgpt/models/generation.py
--- a/applications/ChatGPT/chatgpt/models/generation.py
+++ b/applications/ChatGPT/chatgpt/models/generation.py
@@ -1,6 +1,7 @@
from typing import Any, Callable, Optional
import torch
+import torch.distributed as dist
import torch.nn as nn
try:
@@ -27,6 +28,14 @@
return processor_list
+def _is_sequence_finished(unfinished_sequences: torch.Tensor) -> bool:
+ if dist.is_initialized() and dist.get_world_size() > 1:
+ # consider DP
+ unfinished_sequences = unfinished_sequences.clone()
+ dist.all_reduce(unfinished_sequences)
+ return unfinished_sequences.max() == 0
+
+
def sample(model: nn.Module,
input_ids: torch.Tensor,
max_length: int,
@@ -74,7 +83,7 @@
unfinished_sequences = unfinished_sequences.mul((next_tokens != eos_token_id).long())
# stop when each sentence is finished if early_stopping=True
- if early_stopping and unfinished_sequences.max() == 0:
+ if early_stopping and _is_sequence_finished(unfinished_sequences):
break
return input_ids
diff --git a/applications/ChatGPT/examples/train_prompts.py b/applications/ChatGPT/examples/train_prompts.py
--- a/applications/ChatGPT/examples/train_prompts.py
+++ b/applications/ChatGPT/examples/train_prompts.py
@@ -46,7 +46,6 @@
initial_model = deepcopy(actor)
reward_model = RewardModel(deepcopy(critic.model), deepcopy(critic.value_head)).to(torch.cuda.current_device())
-
# configure optimizer
if args.strategy.startswith('colossalai'):
actor_optim = HybridAdam(actor.parameters(), lr=5e-6)
@@ -70,7 +69,9 @@
dataset = pd.read_csv(args.prompt_path)['prompt']
def tokenize_fn(texts):
- batch = tokenizer(texts, return_tensors='pt', max_length=96, padding=True, truncation=True)
+ # MUST padding to max length to ensure inputs of all ranks have the same length
+ # Different length may lead to hang when using gemini, as different generation steps
+ batch = tokenizer(texts, return_tensors='pt', max_length=96, padding='max_length', truncation=True)
return {k: v.cuda() for k, v in batch.items()}
(actor, actor_optim), (critic, critic_optim), reward_model, initial_model = strategy.prepare(
@@ -101,7 +102,7 @@
num_episodes=args.num_episodes,
max_timesteps=args.max_timesteps,
update_timesteps=args.update_timesteps)
- # save model checkpoint after fitting
+ # save model checkpoint after fitting
strategy.save_model(actor, args.save_path, only_rank0=True)
# save optimizer checkpoint on all ranks
if args.need_optim_ckpt:
|
{"golden_diff": "diff --git a/applications/ChatGPT/chatgpt/models/generation.py b/applications/ChatGPT/chatgpt/models/generation.py\n--- a/applications/ChatGPT/chatgpt/models/generation.py\n+++ b/applications/ChatGPT/chatgpt/models/generation.py\n@@ -1,6 +1,7 @@\n from typing import Any, Callable, Optional\n \n import torch\n+import torch.distributed as dist\n import torch.nn as nn\n \n try:\n@@ -27,6 +28,14 @@\n return processor_list\n \n \n+def _is_sequence_finished(unfinished_sequences: torch.Tensor) -> bool:\n+ if dist.is_initialized() and dist.get_world_size() > 1:\n+ # consider DP\n+ unfinished_sequences = unfinished_sequences.clone()\n+ dist.all_reduce(unfinished_sequences)\n+ return unfinished_sequences.max() == 0\n+\n+\n def sample(model: nn.Module,\n input_ids: torch.Tensor,\n max_length: int,\n@@ -74,7 +83,7 @@\n unfinished_sequences = unfinished_sequences.mul((next_tokens != eos_token_id).long())\n \n # stop when each sentence is finished if early_stopping=True\n- if early_stopping and unfinished_sequences.max() == 0:\n+ if early_stopping and _is_sequence_finished(unfinished_sequences):\n break\n \n return input_ids\ndiff --git a/applications/ChatGPT/examples/train_prompts.py b/applications/ChatGPT/examples/train_prompts.py\n--- a/applications/ChatGPT/examples/train_prompts.py\n+++ b/applications/ChatGPT/examples/train_prompts.py\n@@ -46,7 +46,6 @@\n initial_model = deepcopy(actor)\n reward_model = RewardModel(deepcopy(critic.model), deepcopy(critic.value_head)).to(torch.cuda.current_device())\n \n-\n # configure optimizer\n if args.strategy.startswith('colossalai'):\n actor_optim = HybridAdam(actor.parameters(), lr=5e-6)\n@@ -70,7 +69,9 @@\n dataset = pd.read_csv(args.prompt_path)['prompt']\n \n def tokenize_fn(texts):\n- batch = tokenizer(texts, return_tensors='pt', max_length=96, padding=True, truncation=True)\n+ # MUST padding to max length to ensure inputs of all ranks have the same length\n+ # Different length may lead to hang when using gemini, as different generation steps\n+ batch = tokenizer(texts, return_tensors='pt', max_length=96, padding='max_length', truncation=True)\n return {k: v.cuda() for k, v in batch.items()}\n \n (actor, actor_optim), (critic, critic_optim), reward_model, initial_model = strategy.prepare(\n@@ -101,7 +102,7 @@\n num_episodes=args.num_episodes,\n max_timesteps=args.max_timesteps,\n update_timesteps=args.update_timesteps)\n- # save model checkpoint after fitting \n+ # save model checkpoint after fitting\n strategy.save_model(actor, args.save_path, only_rank0=True)\n # save optimizer checkpoint on all ranks\n if args.need_optim_ckpt:\n", "issue": "[tensor] fix some unittests\n\n[tensor] fix some unittests\n\n[tensor] fix some unittests\n\n[BUG]: chatgpt ppo training hangs when using gemini\n### \ud83d\udc1b Describe the bug\n\n## Problem\r\n\r\nRunning `ChatGPT/examples/train_prompts.py`, I found sometimes the training hangs when using Gemini.\r\n\r\nThis occurs when randomly while changing batch size.\r\n\r\n## Possible reason\r\n\r\nI found the [padding policy](https://huggingface.co/docs/transformers/main_classes/tokenizer#transformers.PreTrainedTokenizer.__call__.padding) is to pad to the longest sequence in the batch.\r\nIn DDP scheme, different process may have different input lengths due to random sampling. That is to say, they may have different generation steps.\r\n\r\nWhen using Gemini, which need communication during forward, different forward steps leads to different number of communication calls. And this asymmetric communication leads to hang.\r\n\r\n## Possible solution\r\n\r\nChange padding policy to `'max_length'`, see [huggingface tokenizer doc](https://huggingface.co/docs/transformers/main_classes/tokenizer#transformers.PreTrainedTokenizer.__call__.padding) for more details.\r\n\r\nIn addition, when enabling early stopping, we should also consider DDP and ensure the number of generation steps of each i process is the same.\r\n\r\n\n\n### Environment\n\n_No response_\n", "before_files": [{"content": "from typing import Any, Callable, Optional\n\nimport torch\nimport torch.nn as nn\n\ntry:\n from transformers.generation_logits_process import (\n LogitsProcessorList,\n TemperatureLogitsWarper,\n TopKLogitsWarper,\n TopPLogitsWarper,\n )\nexcept ImportError:\n from transformers.generation import LogitsProcessorList, TemperatureLogitsWarper, TopKLogitsWarper, TopPLogitsWarper\n\n\ndef prepare_logits_processor(top_k: Optional[int] = None,\n top_p: Optional[float] = None,\n temperature: Optional[float] = None) -> LogitsProcessorList:\n processor_list = LogitsProcessorList()\n if temperature is not None and temperature != 1.0:\n processor_list.append(TemperatureLogitsWarper(temperature))\n if top_k is not None and top_k != 0:\n processor_list.append(TopKLogitsWarper(top_k))\n if top_p is not None and top_p < 1.0:\n processor_list.append(TopPLogitsWarper(top_p))\n return processor_list\n\n\ndef sample(model: nn.Module,\n input_ids: torch.Tensor,\n max_length: int,\n early_stopping: bool = False,\n eos_token_id: Optional[int] = None,\n pad_token_id: Optional[int] = None,\n top_k: Optional[int] = None,\n top_p: Optional[float] = None,\n temperature: Optional[float] = None,\n prepare_inputs_fn: Optional[Callable[[torch.Tensor, Any], dict]] = None,\n update_model_kwargs_fn: Optional[Callable[[dict, Any], dict]] = None,\n **model_kwargs) -> torch.Tensor:\n if input_ids.size(1) >= max_length:\n return input_ids\n\n logits_processor = prepare_logits_processor(top_k, top_p, temperature)\n unfinished_sequences = input_ids.new(input_ids.shape[0]).fill_(1)\n\n for _ in range(input_ids.size(1), max_length):\n model_inputs = prepare_inputs_fn(input_ids, **model_kwargs) if prepare_inputs_fn is not None else {\n 'input_ids': input_ids\n }\n outputs = model(**model_inputs)\n\n next_token_logits = outputs['logits'][:, -1, :]\n # pre-process distribution\n next_token_logits = logits_processor(input_ids, next_token_logits)\n # sample\n probs = torch.softmax(next_token_logits, dim=-1, dtype=torch.float)\n next_tokens = torch.multinomial(probs, num_samples=1).squeeze(1)\n\n # finished sentences should have their next token be a padding token\n if eos_token_id is not None:\n if pad_token_id is None:\n raise ValueError(\"If `eos_token_id` is defined, make sure that `pad_token_id` is defined.\")\n next_tokens = next_tokens * unfinished_sequences + pad_token_id * (1 - unfinished_sequences)\n\n # update generated ids, model inputs for next step\n input_ids = torch.cat([input_ids, next_tokens[:, None]], dim=-1)\n if update_model_kwargs_fn is not None:\n model_kwargs = update_model_kwargs_fn(outputs, **model_kwargs)\n\n # if eos_token was found in one sentence, set sentence to finished\n if eos_token_id is not None:\n unfinished_sequences = unfinished_sequences.mul((next_tokens != eos_token_id).long())\n\n # stop when each sentence is finished if early_stopping=True\n if early_stopping and unfinished_sequences.max() == 0:\n break\n\n return input_ids\n\n\ndef generate(model: nn.Module,\n input_ids: torch.Tensor,\n max_length: int,\n num_beams: int = 1,\n do_sample: bool = True,\n early_stopping: bool = False,\n eos_token_id: Optional[int] = None,\n pad_token_id: Optional[int] = None,\n top_k: Optional[int] = None,\n top_p: Optional[float] = None,\n temperature: Optional[float] = None,\n prepare_inputs_fn: Optional[Callable[[torch.Tensor, Any], dict]] = None,\n update_model_kwargs_fn: Optional[Callable[[dict, Any], dict]] = None,\n **model_kwargs) -> torch.Tensor:\n \"\"\"Generate token sequence. The returned sequence is input_ids + generated_tokens.\n\n Args:\n model (nn.Module): model\n input_ids (torch.Tensor): input sequence\n max_length (int): max length of the returned sequence\n num_beams (int, optional): number of beams. Defaults to 1.\n do_sample (bool, optional): whether to do sample. Defaults to True.\n early_stopping (bool, optional): if True, the sequence length may be smaller than max_length due to finding eos. Defaults to False.\n eos_token_id (Optional[int], optional): end of sequence token id. Defaults to None.\n pad_token_id (Optional[int], optional): pad token id. Defaults to None.\n top_k (Optional[int], optional): the number of highest probability vocabulary tokens to keep for top-k-filtering. Defaults to None.\n top_p (Optional[float], optional): If set to float < 1, only the smallest set of most probable tokens with probabilities that add up to top_p or higher are kept for generation. Defaults to None.\n temperature (Optional[float], optional): The value used to module the next token probabilities. Defaults to None.\n prepare_inputs_fn (Optional[Callable[[torch.Tensor, Any], dict]], optional): Function to preprocess model inputs. Arguments of this function should be input_ids and model_kwargs. Defaults to None.\n update_model_kwargs_fn (Optional[Callable[[dict, Any], dict]], optional): Function to update model_kwargs based on outputs. Arguments of this function should be outputs and model_kwargs. Defaults to None.\n \"\"\"\n is_greedy_gen_mode = ((num_beams == 1) and do_sample is False)\n is_sample_gen_mode = ((num_beams == 1) and do_sample is True)\n is_beam_gen_mode = ((num_beams > 1) and do_sample is False)\n if is_greedy_gen_mode:\n # run greedy search\n raise NotImplementedError\n elif is_sample_gen_mode:\n # run sample\n return sample(model,\n input_ids,\n max_length,\n early_stopping=early_stopping,\n eos_token_id=eos_token_id,\n pad_token_id=pad_token_id,\n top_k=top_k,\n top_p=top_p,\n temperature=temperature,\n prepare_inputs_fn=prepare_inputs_fn,\n update_model_kwargs_fn=update_model_kwargs_fn,\n **model_kwargs)\n elif is_beam_gen_mode:\n raise NotImplementedError\n else:\n raise ValueError(\"Unsupported generation mode\")\n", "path": "applications/ChatGPT/chatgpt/models/generation.py"}, {"content": "import argparse\nfrom copy import deepcopy\n\nimport pandas as pd\nimport torch\nfrom chatgpt.models.base import RewardModel\nfrom chatgpt.models.bloom import BLOOMActor, BLOOMCritic\nfrom chatgpt.models.gpt import GPTActor, GPTCritic\nfrom chatgpt.models.opt import OPTActor, OPTCritic\nfrom chatgpt.trainer import PPOTrainer\nfrom chatgpt.trainer.strategies import ColossalAIStrategy, DDPStrategy, NaiveStrategy\nfrom torch.optim import Adam\nfrom transformers import AutoTokenizer, BloomTokenizerFast\nfrom transformers.models.gpt2.tokenization_gpt2 import GPT2Tokenizer\n\nfrom colossalai.nn.optimizer import HybridAdam\n\n\ndef main(args):\n # configure strategy\n if args.strategy == 'naive':\n strategy = NaiveStrategy()\n elif args.strategy == 'ddp':\n strategy = DDPStrategy()\n elif args.strategy == 'colossalai_gemini':\n strategy = ColossalAIStrategy(stage=3, placement_policy='cuda', initial_scale=2**5)\n elif args.strategy == 'colossalai_zero2':\n strategy = ColossalAIStrategy(stage=2, placement_policy='cuda')\n else:\n raise ValueError(f'Unsupported strategy \"{args.strategy}\"')\n\n # configure model\n with strategy.model_init_context():\n if args.model == 'gpt2':\n actor = GPTActor(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())\n critic = GPTCritic(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())\n elif args.model == 'bloom':\n actor = BLOOMActor(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())\n critic = BLOOMCritic(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())\n elif args.model == 'opt':\n actor = OPTActor(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())\n critic = OPTCritic(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())\n else:\n raise ValueError(f'Unsupported model \"{args.model}\"')\n\n initial_model = deepcopy(actor)\n reward_model = RewardModel(deepcopy(critic.model), deepcopy(critic.value_head)).to(torch.cuda.current_device())\n\n\n # configure optimizer\n if args.strategy.startswith('colossalai'):\n actor_optim = HybridAdam(actor.parameters(), lr=5e-6)\n critic_optim = HybridAdam(critic.parameters(), lr=5e-6)\n else:\n actor_optim = Adam(actor.parameters(), lr=5e-6)\n critic_optim = Adam(critic.parameters(), lr=5e-6)\n\n # configure tokenizer\n if args.model == 'gpt2':\n tokenizer = GPT2Tokenizer.from_pretrained('gpt2')\n tokenizer.pad_token = tokenizer.eos_token\n elif args.model == 'bloom':\n tokenizer = BloomTokenizerFast.from_pretrained(args.pretrain)\n tokenizer.pad_token = tokenizer.eos_token\n elif args.model == 'opt':\n tokenizer = AutoTokenizer.from_pretrained(\"facebook/opt-350m\")\n else:\n raise ValueError(f'Unsupported model \"{args.model}\"')\n\n dataset = pd.read_csv(args.prompt_path)['prompt']\n\n def tokenize_fn(texts):\n batch = tokenizer(texts, return_tensors='pt', max_length=96, padding=True, truncation=True)\n return {k: v.cuda() for k, v in batch.items()}\n\n (actor, actor_optim), (critic, critic_optim), reward_model, initial_model = strategy.prepare(\n (actor, actor_optim), (critic, critic_optim), reward_model, initial_model)\n\n # configure trainer\n trainer = PPOTrainer(\n strategy,\n actor,\n critic,\n reward_model,\n initial_model,\n actor_optim,\n critic_optim,\n max_epochs=args.max_epochs,\n train_batch_size=args.train_batch_size,\n experience_batch_size=args.experience_batch_size,\n tokenizer=tokenize_fn,\n max_length=128,\n do_sample=True,\n temperature=1.0,\n top_k=50,\n pad_token_id=tokenizer.pad_token_id,\n eos_token_id=tokenizer.eos_token_id,\n )\n\n trainer.fit(dataset,\n num_episodes=args.num_episodes,\n max_timesteps=args.max_timesteps,\n update_timesteps=args.update_timesteps)\n # save model checkpoint after fitting \n strategy.save_model(actor, args.save_path, only_rank0=True)\n # save optimizer checkpoint on all ranks\n if args.need_optim_ckpt:\n strategy.save_optimizer(actor_optim,\n 'actor_optim_checkpoint_prompts_%d.pt' % (torch.cuda.current_device()),\n only_rank0=False)\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument('prompt_path')\n parser.add_argument('--strategy',\n choices=['naive', 'ddp', 'colossalai_gemini', 'colossalai_zero2'],\n default='naive')\n parser.add_argument('--model', default='gpt2', choices=['gpt2', 'bloom', 'opt'])\n parser.add_argument('--pretrain', type=str, default=None)\n parser.add_argument('--save_path', type=str, default='actor_checkpoint_prompts.pt')\n parser.add_argument('--need_optim_ckpt', type=bool, default=False)\n parser.add_argument('--num_episodes', type=int, default=10)\n parser.add_argument('--max_timesteps', type=int, default=10)\n parser.add_argument('--update_timesteps', type=int, default=10)\n parser.add_argument('--max_epochs', type=int, default=5)\n parser.add_argument('--train_batch_size', type=int, default=8)\n parser.add_argument('--experience_batch_size', type=int, default=8)\n parser.add_argument('--lora_rank', type=int, default=0, help=\"low-rank adaptation matrices rank\")\n args = parser.parse_args()\n main(args)\n", "path": "applications/ChatGPT/examples/train_prompts.py"}], "after_files": [{"content": "from typing import Any, Callable, Optional\n\nimport torch\nimport torch.distributed as dist\nimport torch.nn as nn\n\ntry:\n from transformers.generation_logits_process import (\n LogitsProcessorList,\n TemperatureLogitsWarper,\n TopKLogitsWarper,\n TopPLogitsWarper,\n )\nexcept ImportError:\n from transformers.generation import LogitsProcessorList, TemperatureLogitsWarper, TopKLogitsWarper, TopPLogitsWarper\n\n\ndef prepare_logits_processor(top_k: Optional[int] = None,\n top_p: Optional[float] = None,\n temperature: Optional[float] = None) -> LogitsProcessorList:\n processor_list = LogitsProcessorList()\n if temperature is not None and temperature != 1.0:\n processor_list.append(TemperatureLogitsWarper(temperature))\n if top_k is not None and top_k != 0:\n processor_list.append(TopKLogitsWarper(top_k))\n if top_p is not None and top_p < 1.0:\n processor_list.append(TopPLogitsWarper(top_p))\n return processor_list\n\n\ndef _is_sequence_finished(unfinished_sequences: torch.Tensor) -> bool:\n if dist.is_initialized() and dist.get_world_size() > 1:\n # consider DP\n unfinished_sequences = unfinished_sequences.clone()\n dist.all_reduce(unfinished_sequences)\n return unfinished_sequences.max() == 0\n\n\ndef sample(model: nn.Module,\n input_ids: torch.Tensor,\n max_length: int,\n early_stopping: bool = False,\n eos_token_id: Optional[int] = None,\n pad_token_id: Optional[int] = None,\n top_k: Optional[int] = None,\n top_p: Optional[float] = None,\n temperature: Optional[float] = None,\n prepare_inputs_fn: Optional[Callable[[torch.Tensor, Any], dict]] = None,\n update_model_kwargs_fn: Optional[Callable[[dict, Any], dict]] = None,\n **model_kwargs) -> torch.Tensor:\n if input_ids.size(1) >= max_length:\n return input_ids\n\n logits_processor = prepare_logits_processor(top_k, top_p, temperature)\n unfinished_sequences = input_ids.new(input_ids.shape[0]).fill_(1)\n\n for _ in range(input_ids.size(1), max_length):\n model_inputs = prepare_inputs_fn(input_ids, **model_kwargs) if prepare_inputs_fn is not None else {\n 'input_ids': input_ids\n }\n outputs = model(**model_inputs)\n\n next_token_logits = outputs['logits'][:, -1, :]\n # pre-process distribution\n next_token_logits = logits_processor(input_ids, next_token_logits)\n # sample\n probs = torch.softmax(next_token_logits, dim=-1, dtype=torch.float)\n next_tokens = torch.multinomial(probs, num_samples=1).squeeze(1)\n\n # finished sentences should have their next token be a padding token\n if eos_token_id is not None:\n if pad_token_id is None:\n raise ValueError(\"If `eos_token_id` is defined, make sure that `pad_token_id` is defined.\")\n next_tokens = next_tokens * unfinished_sequences + pad_token_id * (1 - unfinished_sequences)\n\n # update generated ids, model inputs for next step\n input_ids = torch.cat([input_ids, next_tokens[:, None]], dim=-1)\n if update_model_kwargs_fn is not None:\n model_kwargs = update_model_kwargs_fn(outputs, **model_kwargs)\n\n # if eos_token was found in one sentence, set sentence to finished\n if eos_token_id is not None:\n unfinished_sequences = unfinished_sequences.mul((next_tokens != eos_token_id).long())\n\n # stop when each sentence is finished if early_stopping=True\n if early_stopping and _is_sequence_finished(unfinished_sequences):\n break\n\n return input_ids\n\n\ndef generate(model: nn.Module,\n input_ids: torch.Tensor,\n max_length: int,\n num_beams: int = 1,\n do_sample: bool = True,\n early_stopping: bool = False,\n eos_token_id: Optional[int] = None,\n pad_token_id: Optional[int] = None,\n top_k: Optional[int] = None,\n top_p: Optional[float] = None,\n temperature: Optional[float] = None,\n prepare_inputs_fn: Optional[Callable[[torch.Tensor, Any], dict]] = None,\n update_model_kwargs_fn: Optional[Callable[[dict, Any], dict]] = None,\n **model_kwargs) -> torch.Tensor:\n \"\"\"Generate token sequence. The returned sequence is input_ids + generated_tokens.\n\n Args:\n model (nn.Module): model\n input_ids (torch.Tensor): input sequence\n max_length (int): max length of the returned sequence\n num_beams (int, optional): number of beams. Defaults to 1.\n do_sample (bool, optional): whether to do sample. Defaults to True.\n early_stopping (bool, optional): if True, the sequence length may be smaller than max_length due to finding eos. Defaults to False.\n eos_token_id (Optional[int], optional): end of sequence token id. Defaults to None.\n pad_token_id (Optional[int], optional): pad token id. Defaults to None.\n top_k (Optional[int], optional): the number of highest probability vocabulary tokens to keep for top-k-filtering. Defaults to None.\n top_p (Optional[float], optional): If set to float < 1, only the smallest set of most probable tokens with probabilities that add up to top_p or higher are kept for generation. Defaults to None.\n temperature (Optional[float], optional): The value used to module the next token probabilities. Defaults to None.\n prepare_inputs_fn (Optional[Callable[[torch.Tensor, Any], dict]], optional): Function to preprocess model inputs. Arguments of this function should be input_ids and model_kwargs. Defaults to None.\n update_model_kwargs_fn (Optional[Callable[[dict, Any], dict]], optional): Function to update model_kwargs based on outputs. Arguments of this function should be outputs and model_kwargs. Defaults to None.\n \"\"\"\n is_greedy_gen_mode = ((num_beams == 1) and do_sample is False)\n is_sample_gen_mode = ((num_beams == 1) and do_sample is True)\n is_beam_gen_mode = ((num_beams > 1) and do_sample is False)\n if is_greedy_gen_mode:\n # run greedy search\n raise NotImplementedError\n elif is_sample_gen_mode:\n # run sample\n return sample(model,\n input_ids,\n max_length,\n early_stopping=early_stopping,\n eos_token_id=eos_token_id,\n pad_token_id=pad_token_id,\n top_k=top_k,\n top_p=top_p,\n temperature=temperature,\n prepare_inputs_fn=prepare_inputs_fn,\n update_model_kwargs_fn=update_model_kwargs_fn,\n **model_kwargs)\n elif is_beam_gen_mode:\n raise NotImplementedError\n else:\n raise ValueError(\"Unsupported generation mode\")\n", "path": "applications/ChatGPT/chatgpt/models/generation.py"}, {"content": "import argparse\nfrom copy import deepcopy\n\nimport pandas as pd\nimport torch\nfrom chatgpt.models.base import RewardModel\nfrom chatgpt.models.bloom import BLOOMActor, BLOOMCritic\nfrom chatgpt.models.gpt import GPTActor, GPTCritic\nfrom chatgpt.models.opt import OPTActor, OPTCritic\nfrom chatgpt.trainer import PPOTrainer\nfrom chatgpt.trainer.strategies import ColossalAIStrategy, DDPStrategy, NaiveStrategy\nfrom torch.optim import Adam\nfrom transformers import AutoTokenizer, BloomTokenizerFast\nfrom transformers.models.gpt2.tokenization_gpt2 import GPT2Tokenizer\n\nfrom colossalai.nn.optimizer import HybridAdam\n\n\ndef main(args):\n # configure strategy\n if args.strategy == 'naive':\n strategy = NaiveStrategy()\n elif args.strategy == 'ddp':\n strategy = DDPStrategy()\n elif args.strategy == 'colossalai_gemini':\n strategy = ColossalAIStrategy(stage=3, placement_policy='cuda', initial_scale=2**5)\n elif args.strategy == 'colossalai_zero2':\n strategy = ColossalAIStrategy(stage=2, placement_policy='cuda')\n else:\n raise ValueError(f'Unsupported strategy \"{args.strategy}\"')\n\n # configure model\n with strategy.model_init_context():\n if args.model == 'gpt2':\n actor = GPTActor(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())\n critic = GPTCritic(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())\n elif args.model == 'bloom':\n actor = BLOOMActor(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())\n critic = BLOOMCritic(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())\n elif args.model == 'opt':\n actor = OPTActor(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())\n critic = OPTCritic(pretrained=args.pretrain, lora_rank=args.lora_rank).to(torch.cuda.current_device())\n else:\n raise ValueError(f'Unsupported model \"{args.model}\"')\n\n initial_model = deepcopy(actor)\n reward_model = RewardModel(deepcopy(critic.model), deepcopy(critic.value_head)).to(torch.cuda.current_device())\n\n # configure optimizer\n if args.strategy.startswith('colossalai'):\n actor_optim = HybridAdam(actor.parameters(), lr=5e-6)\n critic_optim = HybridAdam(critic.parameters(), lr=5e-6)\n else:\n actor_optim = Adam(actor.parameters(), lr=5e-6)\n critic_optim = Adam(critic.parameters(), lr=5e-6)\n\n # configure tokenizer\n if args.model == 'gpt2':\n tokenizer = GPT2Tokenizer.from_pretrained('gpt2')\n tokenizer.pad_token = tokenizer.eos_token\n elif args.model == 'bloom':\n tokenizer = BloomTokenizerFast.from_pretrained(args.pretrain)\n tokenizer.pad_token = tokenizer.eos_token\n elif args.model == 'opt':\n tokenizer = AutoTokenizer.from_pretrained(\"facebook/opt-350m\")\n else:\n raise ValueError(f'Unsupported model \"{args.model}\"')\n\n dataset = pd.read_csv(args.prompt_path)['prompt']\n\n def tokenize_fn(texts):\n # MUST padding to max length to ensure inputs of all ranks have the same length\n # Different length may lead to hang when using gemini, as different generation steps\n batch = tokenizer(texts, return_tensors='pt', max_length=96, padding='max_length', truncation=True)\n return {k: v.cuda() for k, v in batch.items()}\n\n (actor, actor_optim), (critic, critic_optim), reward_model, initial_model = strategy.prepare(\n (actor, actor_optim), (critic, critic_optim), reward_model, initial_model)\n\n # configure trainer\n trainer = PPOTrainer(\n strategy,\n actor,\n critic,\n reward_model,\n initial_model,\n actor_optim,\n critic_optim,\n max_epochs=args.max_epochs,\n train_batch_size=args.train_batch_size,\n experience_batch_size=args.experience_batch_size,\n tokenizer=tokenize_fn,\n max_length=128,\n do_sample=True,\n temperature=1.0,\n top_k=50,\n pad_token_id=tokenizer.pad_token_id,\n eos_token_id=tokenizer.eos_token_id,\n )\n\n trainer.fit(dataset,\n num_episodes=args.num_episodes,\n max_timesteps=args.max_timesteps,\n update_timesteps=args.update_timesteps)\n # save model checkpoint after fitting\n strategy.save_model(actor, args.save_path, only_rank0=True)\n # save optimizer checkpoint on all ranks\n if args.need_optim_ckpt:\n strategy.save_optimizer(actor_optim,\n 'actor_optim_checkpoint_prompts_%d.pt' % (torch.cuda.current_device()),\n only_rank0=False)\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument('prompt_path')\n parser.add_argument('--strategy',\n choices=['naive', 'ddp', 'colossalai_gemini', 'colossalai_zero2'],\n default='naive')\n parser.add_argument('--model', default='gpt2', choices=['gpt2', 'bloom', 'opt'])\n parser.add_argument('--pretrain', type=str, default=None)\n parser.add_argument('--save_path', type=str, default='actor_checkpoint_prompts.pt')\n parser.add_argument('--need_optim_ckpt', type=bool, default=False)\n parser.add_argument('--num_episodes', type=int, default=10)\n parser.add_argument('--max_timesteps', type=int, default=10)\n parser.add_argument('--update_timesteps', type=int, default=10)\n parser.add_argument('--max_epochs', type=int, default=5)\n parser.add_argument('--train_batch_size', type=int, default=8)\n parser.add_argument('--experience_batch_size', type=int, default=8)\n parser.add_argument('--lora_rank', type=int, default=0, help=\"low-rank adaptation matrices rank\")\n args = parser.parse_args()\n main(args)\n", "path": "applications/ChatGPT/examples/train_prompts.py"}]}
| 3,930 | 684 |
gh_patches_debug_36794
|
rasdani/github-patches
|
git_diff
|
Pylons__pyramid-705
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
request_param in @view_config should support tuples
request_param should support tuples just as match_param does. Among other things, this would be very useful for oauth where multiple parameters are required to be present for a valid request.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pyramid/config/predicates.py`
Content:
```
1 import re
2
3 from pyramid.compat import is_nonstr_iter
4
5 from pyramid.exceptions import ConfigurationError
6
7 from pyramid.traversal import (
8 find_interface,
9 traversal_path,
10 )
11
12 from pyramid.urldispatch import _compile_route
13
14 from pyramid.util import object_description
15
16 from pyramid.session import check_csrf_token
17
18 from .util import as_sorted_tuple
19
20 class XHRPredicate(object):
21 def __init__(self, val, config):
22 self.val = bool(val)
23
24 def text(self):
25 return 'xhr = %s' % self.val
26
27 phash = text
28
29 def __call__(self, context, request):
30 return bool(request.is_xhr) is self.val
31
32 class RequestMethodPredicate(object):
33 def __init__(self, val, config):
34 request_method = as_sorted_tuple(val)
35 if 'GET' in request_method and 'HEAD' not in request_method:
36 # GET implies HEAD too
37 request_method = as_sorted_tuple(request_method + ('HEAD',))
38 self.val = request_method
39
40 def text(self):
41 return 'request_method = %s' % (','.join(self.val))
42
43 phash = text
44
45 def __call__(self, context, request):
46 return request.method in self.val
47
48 class PathInfoPredicate(object):
49 def __init__(self, val, config):
50 self.orig = val
51 try:
52 val = re.compile(val)
53 except re.error as why:
54 raise ConfigurationError(why.args[0])
55 self.val = val
56
57 def text(self):
58 return 'path_info = %s' % (self.orig,)
59
60 phash = text
61
62 def __call__(self, context, request):
63 return self.val.match(request.upath_info) is not None
64
65 class RequestParamPredicate(object):
66 def __init__(self, val, config):
67 name = val
68 v = None
69 if '=' in name:
70 name, v = name.split('=', 1)
71 name, v = name.strip(), v.strip()
72 if v is None:
73 self._text = 'request_param %s' % (name,)
74 else:
75 self._text = 'request_param %s = %s' % (name, v)
76 self.name = name
77 self.val = v
78
79 def text(self):
80 return self._text
81
82 phash = text
83
84 def __call__(self, context, request):
85 if self.val is None:
86 return self.name in request.params
87 return request.params.get(self.name) == self.val
88
89
90 class HeaderPredicate(object):
91 def __init__(self, val, config):
92 name = val
93 v = None
94 if ':' in name:
95 name, v = name.split(':', 1)
96 try:
97 v = re.compile(v)
98 except re.error as why:
99 raise ConfigurationError(why.args[0])
100 if v is None:
101 self._text = 'header %s' % (name,)
102 else:
103 self._text = 'header %s = %s' % (name, v)
104 self.name = name
105 self.val = v
106
107 def text(self):
108 return self._text
109
110 phash = text
111
112 def __call__(self, context, request):
113 if self.val is None:
114 return self.name in request.headers
115 val = request.headers.get(self.name)
116 if val is None:
117 return False
118 return self.val.match(val) is not None
119
120 class AcceptPredicate(object):
121 def __init__(self, val, config):
122 self.val = val
123
124 def text(self):
125 return 'accept = %s' % (self.val,)
126
127 phash = text
128
129 def __call__(self, context, request):
130 return self.val in request.accept
131
132 class ContainmentPredicate(object):
133 def __init__(self, val, config):
134 self.val = config.maybe_dotted(val)
135
136 def text(self):
137 return 'containment = %s' % (self.val,)
138
139 phash = text
140
141 def __call__(self, context, request):
142 ctx = getattr(request, 'context', context)
143 return find_interface(ctx, self.val) is not None
144
145 class RequestTypePredicate(object):
146 def __init__(self, val, config):
147 self.val = val
148
149 def text(self):
150 return 'request_type = %s' % (self.val,)
151
152 phash = text
153
154 def __call__(self, context, request):
155 return self.val.providedBy(request)
156
157 class MatchParamPredicate(object):
158 def __init__(self, val, config):
159 if not is_nonstr_iter(val):
160 val = (val,)
161 val = sorted(val)
162 self.val = val
163 reqs = [ p.split('=', 1) for p in val ]
164 self.reqs = [ (x.strip(), y.strip()) for x, y in reqs ]
165
166 def text(self):
167 return 'match_param %s' % ','.join(
168 ['%s=%s' % (x,y) for x, y in self.reqs]
169 )
170
171 phash = text
172
173 def __call__(self, context, request):
174 for k, v in self.reqs:
175 if request.matchdict.get(k) != v:
176 return False
177 return True
178
179 class CustomPredicate(object):
180 def __init__(self, func, config):
181 self.func = func
182
183 def text(self):
184 return getattr(
185 self.func,
186 '__text__',
187 'custom predicate: %s' % object_description(self.func)
188 )
189
190 def phash(self):
191 # using hash() here rather than id() is intentional: we
192 # want to allow custom predicates that are part of
193 # frameworks to be able to define custom __hash__
194 # functions for custom predicates, so that the hash output
195 # of predicate instances which are "logically the same"
196 # may compare equal.
197 return 'custom:%r' % hash(self.func)
198
199 def __call__(self, context, request):
200 return self.func(context, request)
201
202
203 class TraversePredicate(object):
204 # Can only be used as a *route* "predicate"; it adds 'traverse' to the
205 # matchdict if it's specified in the routing args. This causes the
206 # ResourceTreeTraverser to use the resolved traverse pattern as the
207 # traversal path.
208 def __init__(self, val, config):
209 _, self.tgenerate = _compile_route(val)
210 self.val = val
211
212 def text(self):
213 return 'traverse matchdict pseudo-predicate'
214
215 def phash(self):
216 # This isn't actually a predicate, it's just a infodict modifier that
217 # injects ``traverse`` into the matchdict. As a result, we don't
218 # need to update the hash.
219 return ''
220
221 def __call__(self, context, request):
222 if 'traverse' in context:
223 return True
224 m = context['match']
225 tvalue = self.tgenerate(m) # tvalue will be urlquoted string
226 m['traverse'] = traversal_path(tvalue)
227 # This isn't actually a predicate, it's just a infodict modifier that
228 # injects ``traverse`` into the matchdict. As a result, we just
229 # return True.
230 return True
231
232 class CheckCSRFTokenPredicate(object):
233
234 check_csrf_token = staticmethod(check_csrf_token) # testing
235
236 def __init__(self, val, config):
237 self.val = val
238
239 def text(self):
240 return 'check_csrf = %s' % (self.val,)
241
242 phash = text
243
244 def __call__(self, context, request):
245 val = self.val
246 if val:
247 if val is True:
248 val = 'csrf_token'
249 return self.check_csrf_token(request, val, raises=False)
250 return True
251
252
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pyramid/config/predicates.py b/pyramid/config/predicates.py
--- a/pyramid/config/predicates.py
+++ b/pyramid/config/predicates.py
@@ -64,43 +64,48 @@
class RequestParamPredicate(object):
def __init__(self, val, config):
- name = val
- v = None
- if '=' in name:
- name, v = name.split('=', 1)
- name, v = name.strip(), v.strip()
- if v is None:
- self._text = 'request_param %s' % (name,)
- else:
- self._text = 'request_param %s = %s' % (name, v)
- self.name = name
- self.val = v
+ val = as_sorted_tuple(val)
+ reqs = []
+ for p in val:
+ k = p
+ v = None
+ if '=' in p:
+ k, v = p.split('=', 1)
+ k, v = k.strip(), v.strip()
+ reqs.append((k, v))
+ self.val = val
+ self.reqs = reqs
def text(self):
- return self._text
+ return 'request_param %s' % ','.join(
+ ['%s=%s' % (x,y) if y else x for x, y in self.reqs]
+ )
phash = text
def __call__(self, context, request):
- if self.val is None:
- return self.name in request.params
- return request.params.get(self.name) == self.val
-
+ for k, v in self.reqs:
+ actual = request.params.get(k)
+ if actual is None:
+ return False
+ if v is not None and actual != v:
+ return False
+ return True
class HeaderPredicate(object):
def __init__(self, val, config):
name = val
v = None
if ':' in name:
- name, v = name.split(':', 1)
+ name, val_str = name.split(':', 1)
try:
- v = re.compile(v)
+ v = re.compile(val_str)
except re.error as why:
raise ConfigurationError(why.args[0])
if v is None:
self._text = 'header %s' % (name,)
else:
- self._text = 'header %s = %s' % (name, v)
+ self._text = 'header %s=%s' % (name, val_str)
self.name = name
self.val = v
@@ -156,9 +161,7 @@
class MatchParamPredicate(object):
def __init__(self, val, config):
- if not is_nonstr_iter(val):
- val = (val,)
- val = sorted(val)
+ val = as_sorted_tuple(val)
self.val = val
reqs = [ p.split('=', 1) for p in val ]
self.reqs = [ (x.strip(), y.strip()) for x, y in reqs ]
|
{"golden_diff": "diff --git a/pyramid/config/predicates.py b/pyramid/config/predicates.py\n--- a/pyramid/config/predicates.py\n+++ b/pyramid/config/predicates.py\n@@ -64,43 +64,48 @@\n \n class RequestParamPredicate(object):\n def __init__(self, val, config):\n- name = val\n- v = None\n- if '=' in name:\n- name, v = name.split('=', 1)\n- name, v = name.strip(), v.strip()\n- if v is None:\n- self._text = 'request_param %s' % (name,)\n- else:\n- self._text = 'request_param %s = %s' % (name, v)\n- self.name = name\n- self.val = v\n+ val = as_sorted_tuple(val)\n+ reqs = []\n+ for p in val:\n+ k = p\n+ v = None\n+ if '=' in p:\n+ k, v = p.split('=', 1)\n+ k, v = k.strip(), v.strip()\n+ reqs.append((k, v))\n+ self.val = val\n+ self.reqs = reqs\n \n def text(self):\n- return self._text\n+ return 'request_param %s' % ','.join(\n+ ['%s=%s' % (x,y) if y else x for x, y in self.reqs]\n+ )\n \n phash = text\n \n def __call__(self, context, request):\n- if self.val is None:\n- return self.name in request.params\n- return request.params.get(self.name) == self.val\n- \n+ for k, v in self.reqs:\n+ actual = request.params.get(k)\n+ if actual is None:\n+ return False\n+ if v is not None and actual != v:\n+ return False\n+ return True\n \n class HeaderPredicate(object):\n def __init__(self, val, config):\n name = val\n v = None\n if ':' in name:\n- name, v = name.split(':', 1)\n+ name, val_str = name.split(':', 1)\n try:\n- v = re.compile(v)\n+ v = re.compile(val_str)\n except re.error as why:\n raise ConfigurationError(why.args[0])\n if v is None:\n self._text = 'header %s' % (name,)\n else:\n- self._text = 'header %s = %s' % (name, v)\n+ self._text = 'header %s=%s' % (name, val_str)\n self.name = name\n self.val = v\n \n@@ -156,9 +161,7 @@\n \n class MatchParamPredicate(object):\n def __init__(self, val, config):\n- if not is_nonstr_iter(val):\n- val = (val,)\n- val = sorted(val)\n+ val = as_sorted_tuple(val)\n self.val = val\n reqs = [ p.split('=', 1) for p in val ]\n self.reqs = [ (x.strip(), y.strip()) for x, y in reqs ]\n", "issue": "request_param in @view_config should support tuples\nrequest_param should support tuples just as match_param does. Among other things, this would be very useful for oauth where multiple parameters are required to be present for a valid request.\n\n", "before_files": [{"content": "import re\n\nfrom pyramid.compat import is_nonstr_iter\n\nfrom pyramid.exceptions import ConfigurationError\n\nfrom pyramid.traversal import (\n find_interface,\n traversal_path,\n )\n\nfrom pyramid.urldispatch import _compile_route\n\nfrom pyramid.util import object_description\n\nfrom pyramid.session import check_csrf_token\n\nfrom .util import as_sorted_tuple\n\nclass XHRPredicate(object):\n def __init__(self, val, config):\n self.val = bool(val)\n\n def text(self):\n return 'xhr = %s' % self.val\n\n phash = text\n\n def __call__(self, context, request):\n return bool(request.is_xhr) is self.val\n\nclass RequestMethodPredicate(object):\n def __init__(self, val, config):\n request_method = as_sorted_tuple(val)\n if 'GET' in request_method and 'HEAD' not in request_method:\n # GET implies HEAD too\n request_method = as_sorted_tuple(request_method + ('HEAD',))\n self.val = request_method\n\n def text(self):\n return 'request_method = %s' % (','.join(self.val))\n\n phash = text\n\n def __call__(self, context, request):\n return request.method in self.val\n\nclass PathInfoPredicate(object):\n def __init__(self, val, config):\n self.orig = val\n try:\n val = re.compile(val)\n except re.error as why:\n raise ConfigurationError(why.args[0])\n self.val = val\n\n def text(self):\n return 'path_info = %s' % (self.orig,)\n\n phash = text\n\n def __call__(self, context, request):\n return self.val.match(request.upath_info) is not None\n \nclass RequestParamPredicate(object):\n def __init__(self, val, config):\n name = val\n v = None\n if '=' in name:\n name, v = name.split('=', 1)\n name, v = name.strip(), v.strip()\n if v is None:\n self._text = 'request_param %s' % (name,)\n else:\n self._text = 'request_param %s = %s' % (name, v)\n self.name = name\n self.val = v\n\n def text(self):\n return self._text\n\n phash = text\n\n def __call__(self, context, request):\n if self.val is None:\n return self.name in request.params\n return request.params.get(self.name) == self.val\n \n\nclass HeaderPredicate(object):\n def __init__(self, val, config):\n name = val\n v = None\n if ':' in name:\n name, v = name.split(':', 1)\n try:\n v = re.compile(v)\n except re.error as why:\n raise ConfigurationError(why.args[0])\n if v is None:\n self._text = 'header %s' % (name,)\n else:\n self._text = 'header %s = %s' % (name, v)\n self.name = name\n self.val = v\n\n def text(self):\n return self._text\n\n phash = text\n\n def __call__(self, context, request):\n if self.val is None:\n return self.name in request.headers\n val = request.headers.get(self.name)\n if val is None:\n return False\n return self.val.match(val) is not None\n\nclass AcceptPredicate(object):\n def __init__(self, val, config):\n self.val = val\n\n def text(self):\n return 'accept = %s' % (self.val,)\n\n phash = text\n\n def __call__(self, context, request):\n return self.val in request.accept\n\nclass ContainmentPredicate(object):\n def __init__(self, val, config):\n self.val = config.maybe_dotted(val)\n\n def text(self):\n return 'containment = %s' % (self.val,)\n\n phash = text\n\n def __call__(self, context, request):\n ctx = getattr(request, 'context', context)\n return find_interface(ctx, self.val) is not None\n \nclass RequestTypePredicate(object):\n def __init__(self, val, config):\n self.val = val\n\n def text(self):\n return 'request_type = %s' % (self.val,)\n\n phash = text\n\n def __call__(self, context, request):\n return self.val.providedBy(request)\n \nclass MatchParamPredicate(object):\n def __init__(self, val, config):\n if not is_nonstr_iter(val):\n val = (val,)\n val = sorted(val)\n self.val = val\n reqs = [ p.split('=', 1) for p in val ]\n self.reqs = [ (x.strip(), y.strip()) for x, y in reqs ]\n\n def text(self):\n return 'match_param %s' % ','.join(\n ['%s=%s' % (x,y) for x, y in self.reqs]\n )\n\n phash = text\n\n def __call__(self, context, request):\n for k, v in self.reqs:\n if request.matchdict.get(k) != v:\n return False\n return True\n \nclass CustomPredicate(object):\n def __init__(self, func, config):\n self.func = func\n\n def text(self):\n return getattr(\n self.func,\n '__text__',\n 'custom predicate: %s' % object_description(self.func)\n )\n\n def phash(self):\n # using hash() here rather than id() is intentional: we\n # want to allow custom predicates that are part of\n # frameworks to be able to define custom __hash__\n # functions for custom predicates, so that the hash output\n # of predicate instances which are \"logically the same\"\n # may compare equal.\n return 'custom:%r' % hash(self.func)\n\n def __call__(self, context, request):\n return self.func(context, request)\n \n \nclass TraversePredicate(object):\n # Can only be used as a *route* \"predicate\"; it adds 'traverse' to the\n # matchdict if it's specified in the routing args. This causes the\n # ResourceTreeTraverser to use the resolved traverse pattern as the\n # traversal path.\n def __init__(self, val, config):\n _, self.tgenerate = _compile_route(val)\n self.val = val\n \n def text(self):\n return 'traverse matchdict pseudo-predicate'\n\n def phash(self):\n # This isn't actually a predicate, it's just a infodict modifier that\n # injects ``traverse`` into the matchdict. As a result, we don't\n # need to update the hash.\n return ''\n\n def __call__(self, context, request):\n if 'traverse' in context:\n return True\n m = context['match']\n tvalue = self.tgenerate(m) # tvalue will be urlquoted string\n m['traverse'] = traversal_path(tvalue)\n # This isn't actually a predicate, it's just a infodict modifier that\n # injects ``traverse`` into the matchdict. As a result, we just\n # return True.\n return True\n\nclass CheckCSRFTokenPredicate(object):\n\n check_csrf_token = staticmethod(check_csrf_token) # testing\n \n def __init__(self, val, config):\n self.val = val\n\n def text(self):\n return 'check_csrf = %s' % (self.val,)\n\n phash = text\n\n def __call__(self, context, request):\n val = self.val\n if val:\n if val is True:\n val = 'csrf_token'\n return self.check_csrf_token(request, val, raises=False)\n return True\n\n", "path": "pyramid/config/predicates.py"}], "after_files": [{"content": "import re\n\nfrom pyramid.compat import is_nonstr_iter\n\nfrom pyramid.exceptions import ConfigurationError\n\nfrom pyramid.traversal import (\n find_interface,\n traversal_path,\n )\n\nfrom pyramid.urldispatch import _compile_route\n\nfrom pyramid.util import object_description\n\nfrom pyramid.session import check_csrf_token\n\nfrom .util import as_sorted_tuple\n\nclass XHRPredicate(object):\n def __init__(self, val, config):\n self.val = bool(val)\n\n def text(self):\n return 'xhr = %s' % self.val\n\n phash = text\n\n def __call__(self, context, request):\n return bool(request.is_xhr) is self.val\n\nclass RequestMethodPredicate(object):\n def __init__(self, val, config):\n request_method = as_sorted_tuple(val)\n if 'GET' in request_method and 'HEAD' not in request_method:\n # GET implies HEAD too\n request_method = as_sorted_tuple(request_method + ('HEAD',))\n self.val = request_method\n\n def text(self):\n return 'request_method = %s' % (','.join(self.val))\n\n phash = text\n\n def __call__(self, context, request):\n return request.method in self.val\n\nclass PathInfoPredicate(object):\n def __init__(self, val, config):\n self.orig = val\n try:\n val = re.compile(val)\n except re.error as why:\n raise ConfigurationError(why.args[0])\n self.val = val\n\n def text(self):\n return 'path_info = %s' % (self.orig,)\n\n phash = text\n\n def __call__(self, context, request):\n return self.val.match(request.upath_info) is not None\n \nclass RequestParamPredicate(object):\n def __init__(self, val, config):\n val = as_sorted_tuple(val)\n reqs = []\n for p in val:\n k = p\n v = None\n if '=' in p:\n k, v = p.split('=', 1)\n k, v = k.strip(), v.strip()\n reqs.append((k, v))\n self.val = val\n self.reqs = reqs\n\n def text(self):\n return 'request_param %s' % ','.join(\n ['%s=%s' % (x,y) if y else x for x, y in self.reqs]\n )\n\n phash = text\n\n def __call__(self, context, request):\n for k, v in self.reqs:\n actual = request.params.get(k)\n if actual is None:\n return False\n if v is not None and actual != v:\n return False\n return True\n\nclass HeaderPredicate(object):\n def __init__(self, val, config):\n name = val\n v = None\n if ':' in name:\n name, val_str = name.split(':', 1)\n try:\n v = re.compile(val_str)\n except re.error as why:\n raise ConfigurationError(why.args[0])\n if v is None:\n self._text = 'header %s' % (name,)\n else:\n self._text = 'header %s=%s' % (name, val_str)\n self.name = name\n self.val = v\n\n def text(self):\n return self._text\n\n phash = text\n\n def __call__(self, context, request):\n if self.val is None:\n return self.name in request.headers\n val = request.headers.get(self.name)\n if val is None:\n return False\n return self.val.match(val) is not None\n\nclass AcceptPredicate(object):\n def __init__(self, val, config):\n self.val = val\n\n def text(self):\n return 'accept = %s' % (self.val,)\n\n phash = text\n\n def __call__(self, context, request):\n return self.val in request.accept\n\nclass ContainmentPredicate(object):\n def __init__(self, val, config):\n self.val = config.maybe_dotted(val)\n\n def text(self):\n return 'containment = %s' % (self.val,)\n\n phash = text\n\n def __call__(self, context, request):\n ctx = getattr(request, 'context', context)\n return find_interface(ctx, self.val) is not None\n \nclass RequestTypePredicate(object):\n def __init__(self, val, config):\n self.val = val\n\n def text(self):\n return 'request_type = %s' % (self.val,)\n\n phash = text\n\n def __call__(self, context, request):\n return self.val.providedBy(request)\n \nclass MatchParamPredicate(object):\n def __init__(self, val, config):\n val = as_sorted_tuple(val)\n self.val = val\n reqs = [ p.split('=', 1) for p in val ]\n self.reqs = [ (x.strip(), y.strip()) for x, y in reqs ]\n\n def text(self):\n return 'match_param %s' % ','.join(\n ['%s=%s' % (x,y) for x, y in self.reqs]\n )\n\n phash = text\n\n def __call__(self, context, request):\n for k, v in self.reqs:\n if request.matchdict.get(k) != v:\n return False\n return True\n \nclass CustomPredicate(object):\n def __init__(self, func, config):\n self.func = func\n\n def text(self):\n return getattr(\n self.func,\n '__text__',\n 'custom predicate: %s' % object_description(self.func)\n )\n\n def phash(self):\n # using hash() here rather than id() is intentional: we\n # want to allow custom predicates that are part of\n # frameworks to be able to define custom __hash__\n # functions for custom predicates, so that the hash output\n # of predicate instances which are \"logically the same\"\n # may compare equal.\n return 'custom:%r' % hash(self.func)\n\n def __call__(self, context, request):\n return self.func(context, request)\n \n \nclass TraversePredicate(object):\n # Can only be used as a *route* \"predicate\"; it adds 'traverse' to the\n # matchdict if it's specified in the routing args. This causes the\n # ResourceTreeTraverser to use the resolved traverse pattern as the\n # traversal path.\n def __init__(self, val, config):\n _, self.tgenerate = _compile_route(val)\n self.val = val\n \n def text(self):\n return 'traverse matchdict pseudo-predicate'\n\n def phash(self):\n # This isn't actually a predicate, it's just a infodict modifier that\n # injects ``traverse`` into the matchdict. As a result, we don't\n # need to update the hash.\n return ''\n\n def __call__(self, context, request):\n if 'traverse' in context:\n return True\n m = context['match']\n tvalue = self.tgenerate(m) # tvalue will be urlquoted string\n m['traverse'] = traversal_path(tvalue)\n # This isn't actually a predicate, it's just a infodict modifier that\n # injects ``traverse`` into the matchdict. As a result, we just\n # return True.\n return True\n\nclass CheckCSRFTokenPredicate(object):\n\n check_csrf_token = staticmethod(check_csrf_token) # testing\n \n def __init__(self, val, config):\n self.val = val\n\n def text(self):\n return 'check_csrf = %s' % (self.val,)\n\n phash = text\n\n def __call__(self, context, request):\n val = self.val\n if val:\n if val is True:\n val = 'csrf_token'\n return self.check_csrf_token(request, val, raises=False)\n return True\n\n", "path": "pyramid/config/predicates.py"}]}
| 2,693 | 706 |
gh_patches_debug_21571
|
rasdani/github-patches
|
git_diff
|
e-valuation__EvaP-1805
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove sass + ts compilation from ./manage.py run
These slow down starting the development server.
For developers actually changing TS or SASS files, they are not helpful enough, and those will likely start a `--watch` job anyway. We can simply provide a ´./manage.py watch` script for this use case.
For anyone else, they simply slow down the development server startup, which is a bit annoying.
Opinions?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `evap/development/management/commands/run.py`
Content:
```
1 import sys
2
3 from django.core.management import execute_from_command_line
4 from django.core.management.base import BaseCommand
5
6
7 class Command(BaseCommand):
8 args = ""
9 help = 'Execute "runserver 0.0.0.0:8000"'
10
11 def handle(self, *args, **options):
12 self.stdout.write('Executing "manage.py scss"')
13 execute_from_command_line(["manage.py", "scss"])
14 self.stdout.write('Executing "manage.py ts compile"')
15 execute_from_command_line(["manage.py", "ts", "compile"])
16 self.stdout.write('Executing "manage.py runserver 0.0.0.0:8000"')
17 sys.argv = ["manage.py", "runserver", "0.0.0.0:8000"]
18 execute_from_command_line(sys.argv)
19
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/evap/development/management/commands/run.py b/evap/development/management/commands/run.py
--- a/evap/development/management/commands/run.py
+++ b/evap/development/management/commands/run.py
@@ -1,4 +1,5 @@
import sys
+from subprocess import Popen # nosec
from django.core.management import execute_from_command_line
from django.core.management.base import BaseCommand
@@ -9,10 +10,8 @@
help = 'Execute "runserver 0.0.0.0:8000"'
def handle(self, *args, **options):
- self.stdout.write('Executing "manage.py scss"')
- execute_from_command_line(["manage.py", "scss"])
- self.stdout.write('Executing "manage.py ts compile"')
- execute_from_command_line(["manage.py", "ts", "compile"])
- self.stdout.write('Executing "manage.py runserver 0.0.0.0:8000"')
- sys.argv = ["manage.py", "runserver", "0.0.0.0:8000"]
- execute_from_command_line(sys.argv)
+ self.stdout.write('Executing "manage.py scss" and "manage.py ts compile"')
+ with Popen(["./manage.py", "scss"]), Popen(["./manage.py", "ts", "compile"]): # nosec
+ self.stdout.write('Executing "manage.py runserver 0.0.0.0:8000"')
+ sys.argv = ["manage.py", "runserver", "0.0.0.0:8000"]
+ execute_from_command_line(sys.argv)
|
{"golden_diff": "diff --git a/evap/development/management/commands/run.py b/evap/development/management/commands/run.py\n--- a/evap/development/management/commands/run.py\n+++ b/evap/development/management/commands/run.py\n@@ -1,4 +1,5 @@\n import sys\n+from subprocess import Popen # nosec\n \n from django.core.management import execute_from_command_line\n from django.core.management.base import BaseCommand\n@@ -9,10 +10,8 @@\n help = 'Execute \"runserver 0.0.0.0:8000\"'\n \n def handle(self, *args, **options):\n- self.stdout.write('Executing \"manage.py scss\"')\n- execute_from_command_line([\"manage.py\", \"scss\"])\n- self.stdout.write('Executing \"manage.py ts compile\"')\n- execute_from_command_line([\"manage.py\", \"ts\", \"compile\"])\n- self.stdout.write('Executing \"manage.py runserver 0.0.0.0:8000\"')\n- sys.argv = [\"manage.py\", \"runserver\", \"0.0.0.0:8000\"]\n- execute_from_command_line(sys.argv)\n+ self.stdout.write('Executing \"manage.py scss\" and \"manage.py ts compile\"')\n+ with Popen([\"./manage.py\", \"scss\"]), Popen([\"./manage.py\", \"ts\", \"compile\"]): # nosec\n+ self.stdout.write('Executing \"manage.py runserver 0.0.0.0:8000\"')\n+ sys.argv = [\"manage.py\", \"runserver\", \"0.0.0.0:8000\"]\n+ execute_from_command_line(sys.argv)\n", "issue": "Remove sass + ts compilation from ./manage.py run\nThese slow down starting the development server.\r\n\r\nFor developers actually changing TS or SASS files, they are not helpful enough, and those will likely start a `--watch` job anyway. We can simply provide a \u00b4./manage.py watch` script for this use case.\r\n\r\nFor anyone else, they simply slow down the development server startup, which is a bit annoying.\r\n\r\nOpinions?\n", "before_files": [{"content": "import sys\n\nfrom django.core.management import execute_from_command_line\nfrom django.core.management.base import BaseCommand\n\n\nclass Command(BaseCommand):\n args = \"\"\n help = 'Execute \"runserver 0.0.0.0:8000\"'\n\n def handle(self, *args, **options):\n self.stdout.write('Executing \"manage.py scss\"')\n execute_from_command_line([\"manage.py\", \"scss\"])\n self.stdout.write('Executing \"manage.py ts compile\"')\n execute_from_command_line([\"manage.py\", \"ts\", \"compile\"])\n self.stdout.write('Executing \"manage.py runserver 0.0.0.0:8000\"')\n sys.argv = [\"manage.py\", \"runserver\", \"0.0.0.0:8000\"]\n execute_from_command_line(sys.argv)\n", "path": "evap/development/management/commands/run.py"}], "after_files": [{"content": "import sys\nfrom subprocess import Popen # nosec\n\nfrom django.core.management import execute_from_command_line\nfrom django.core.management.base import BaseCommand\n\n\nclass Command(BaseCommand):\n args = \"\"\n help = 'Execute \"runserver 0.0.0.0:8000\"'\n\n def handle(self, *args, **options):\n self.stdout.write('Executing \"manage.py scss\" and \"manage.py ts compile\"')\n with Popen([\"./manage.py\", \"scss\"]), Popen([\"./manage.py\", \"ts\", \"compile\"]): # nosec\n self.stdout.write('Executing \"manage.py runserver 0.0.0.0:8000\"')\n sys.argv = [\"manage.py\", \"runserver\", \"0.0.0.0:8000\"]\n execute_from_command_line(sys.argv)\n", "path": "evap/development/management/commands/run.py"}]}
| 562 | 381 |
gh_patches_debug_18056
|
rasdani/github-patches
|
git_diff
|
PaddlePaddle__PaddleDetection-522
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
检测结果可视化脚本运行报错
请帮忙看下这个报错,paddle version:1.7.1,master分支和0.2分支都有同样问题
参考文档:
https://github.com/PaddlePaddle/PaddleDetection/blob/release/0.2/docs/advanced_tutorials/inference/DEPLOYMENT.md
`
root@eric-VirtualBox:/home/eric/project/PaddleDetection/inference/build# python ../tools/vis.py --img_path=jpeg/chinamap_a236.jpg --img_result_path=jpeg/chinamap_a236.jpg.pb --threshold=0.1 --c2l_path=../tools/chinamap.json
Traceback (most recent call last):
File "../tools/vis.py", line 83, in <module>
with open(Flags.c2l_path, "r", encoding="utf-8") as json_f:
TypeError: 'encoding' is an invalid keyword argument for this function
root@eric-VirtualBox:/home/eric/project/PaddleDetection/inference/build#
`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `inference/tools/vis.py`
Content:
```
1 # coding: utf-8
2 # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import detection_result_pb2
17 import cv2
18 import sys
19 import gflags
20 import numpy as np
21 import json
22 from PIL import Image, ImageDraw, ImageFont
23
24 Flags = gflags.FLAGS
25 gflags.DEFINE_string('img_path', 'abc', 'image path')
26 gflags.DEFINE_string('img_result_path', 'def', 'image result path')
27 gflags.DEFINE_float('threshold', 0.0, 'threshold of score')
28 gflags.DEFINE_string('c2l_path', 'ghk', 'class to label path')
29
30
31 def colormap(rgb=False):
32 """
33 Get colormap
34 """
35 color_list = np.array([
36 0.000, 0.447, 0.741, 0.850, 0.325, 0.098, 0.929, 0.694, 0.125, 0.494,
37 0.184, 0.556, 0.466, 0.674, 0.188, 0.301, 0.745, 0.933, 0.635, 0.078,
38 0.184, 0.300, 0.300, 0.300, 0.600, 0.600, 0.600, 1.000, 0.000, 0.000,
39 1.000, 0.500, 0.000, 0.749, 0.749, 0.000, 0.000, 1.000, 0.000, 0.000,
40 0.000, 1.000, 0.667, 0.000, 1.000, 0.333, 0.333, 0.000, 0.333, 0.667,
41 0.000, 0.333, 1.000, 0.000, 0.667, 0.333, 0.000, 0.667, 0.667, 0.000,
42 0.667, 1.000, 0.000, 1.000, 0.333, 0.000, 1.000, 0.667, 0.000, 1.000,
43 1.000, 0.000, 0.000, 0.333, 0.500, 0.000, 0.667, 0.500, 0.000, 1.000,
44 0.500, 0.333, 0.000, 0.500, 0.333, 0.333, 0.500, 0.333, 0.667, 0.500,
45 0.333, 1.000, 0.500, 0.667, 0.000, 0.500, 0.667, 0.333, 0.500, 0.667,
46 0.667, 0.500, 0.667, 1.000, 0.500, 1.000, 0.000, 0.500, 1.000, 0.333,
47 0.500, 1.000, 0.667, 0.500, 1.000, 1.000, 0.500, 0.000, 0.333, 1.000,
48 0.000, 0.667, 1.000, 0.000, 1.000, 1.000, 0.333, 0.000, 1.000, 0.333,
49 0.333, 1.000, 0.333, 0.667, 1.000, 0.333, 1.000, 1.000, 0.667, 0.000,
50 1.000, 0.667, 0.333, 1.000, 0.667, 0.667, 1.000, 0.667, 1.000, 1.000,
51 1.000, 0.000, 1.000, 1.000, 0.333, 1.000, 1.000, 0.667, 1.000, 0.167,
52 0.000, 0.000, 0.333, 0.000, 0.000, 0.500, 0.000, 0.000, 0.667, 0.000,
53 0.000, 0.833, 0.000, 0.000, 1.000, 0.000, 0.000, 0.000, 0.167, 0.000,
54 0.000, 0.333, 0.000, 0.000, 0.500, 0.000, 0.000, 0.667, 0.000, 0.000,
55 0.833, 0.000, 0.000, 1.000, 0.000, 0.000, 0.000, 0.167, 0.000, 0.000,
56 0.333, 0.000, 0.000, 0.500, 0.000, 0.000, 0.667, 0.000, 0.000, 0.833,
57 0.000, 0.000, 1.000, 0.000, 0.000, 0.000, 0.143, 0.143, 0.143, 0.286,
58 0.286, 0.286, 0.429, 0.429, 0.429, 0.571, 0.571, 0.571, 0.714, 0.714,
59 0.714, 0.857, 0.857, 0.857, 1.000, 1.000, 1.000
60 ]).astype(np.float32)
61 color_list = color_list.reshape((-1, 3)) * 255
62 if not rgb:
63 color_list = color_list[:, ::-1]
64 return color_list
65
66
67 if __name__ == "__main__":
68 if len(sys.argv) != 5:
69 print(
70 "Usage: python vis.py --img_path=/path/to/image --img_result_path=/path/to/image_result.pb --threshold=0.1 --c2l_path=/path/to/class2label.json"
71 )
72 else:
73 Flags(sys.argv)
74 color_list = colormap(rgb=True)
75 text_thickness = 1
76 text_scale = 0.3
77 with open(Flags.img_result_path, "rb") as f:
78 detection_result = detection_result_pb2.DetectionResult()
79 detection_result.ParseFromString(f.read())
80 img = cv2.imread(Flags.img_path)
81 img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
82 class2LabelMap = dict()
83 with open(Flags.c2l_path, "r", encoding="utf-8") as json_f:
84 class2LabelMap = json.load(json_f)
85 for box in detection_result.detection_boxes:
86 if box.score >= Flags.threshold:
87 box_class = getattr(box, 'class')
88 text_class_score_str = "%s %.2f" % (
89 class2LabelMap.get(str(box_class)), box.score)
90 text_point = (int(box.left_top_x), int(box.left_top_y))
91
92 ptLeftTop = (int(box.left_top_x), int(box.left_top_y))
93 ptRightBottom = (int(box.right_bottom_x),
94 int(box.right_bottom_y))
95 box_thickness = 1
96 color = tuple([int(c) for c in color_list[box_class]])
97 cv2.rectangle(img, ptLeftTop, ptRightBottom, color,
98 box_thickness, 8)
99 if text_point[1] < 0:
100 text_point = (int(box.left_top_x),
101 int(box.right_bottom_y))
102 WHITE = (255, 255, 255)
103 font = cv2.FONT_HERSHEY_SIMPLEX
104 text_size = cv2.getTextSize(text_class_score_str, font,
105 text_scale, text_thickness)
106
107 text_box_left_top = (text_point[0],
108 text_point[1] - text_size[0][1])
109 text_box_right_bottom = (
110 text_point[0] + text_size[0][0], text_point[1])
111
112 cv2.rectangle(img, text_box_left_top,
113 text_box_right_bottom, color, -1, 8)
114 cv2.putText(img, text_class_score_str, text_point, font,
115 text_scale, WHITE, text_thickness)
116 img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
117 cv2.imwrite(Flags.img_path + ".png", img)
118
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/inference/tools/vis.py b/inference/tools/vis.py
--- a/inference/tools/vis.py
+++ b/inference/tools/vis.py
@@ -20,6 +20,7 @@
import numpy as np
import json
from PIL import Image, ImageDraw, ImageFont
+import io
Flags = gflags.FLAGS
gflags.DEFINE_string('img_path', 'abc', 'image path')
@@ -80,7 +81,7 @@
img = cv2.imread(Flags.img_path)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
class2LabelMap = dict()
- with open(Flags.c2l_path, "r", encoding="utf-8") as json_f:
+ with io.open(Flags.c2l_path, "r", encoding="utf-8") as json_f:
class2LabelMap = json.load(json_f)
for box in detection_result.detection_boxes:
if box.score >= Flags.threshold:
|
{"golden_diff": "diff --git a/inference/tools/vis.py b/inference/tools/vis.py\n--- a/inference/tools/vis.py\n+++ b/inference/tools/vis.py\n@@ -20,6 +20,7 @@\n import numpy as np\n import json\n from PIL import Image, ImageDraw, ImageFont\n+import io\n \n Flags = gflags.FLAGS\n gflags.DEFINE_string('img_path', 'abc', 'image path')\n@@ -80,7 +81,7 @@\n img = cv2.imread(Flags.img_path)\n img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n class2LabelMap = dict()\n- with open(Flags.c2l_path, \"r\", encoding=\"utf-8\") as json_f:\n+ with io.open(Flags.c2l_path, \"r\", encoding=\"utf-8\") as json_f:\n class2LabelMap = json.load(json_f)\n for box in detection_result.detection_boxes:\n if box.score >= Flags.threshold:\n", "issue": "\u68c0\u6d4b\u7ed3\u679c\u53ef\u89c6\u5316\u811a\u672c\u8fd0\u884c\u62a5\u9519\n\u8bf7\u5e2e\u5fd9\u770b\u4e0b\u8fd9\u4e2a\u62a5\u9519\uff0cpaddle version\uff1a1.7.1\uff0cmaster\u5206\u652f\u548c0.2\u5206\u652f\u90fd\u6709\u540c\u6837\u95ee\u9898\r\n\u53c2\u8003\u6587\u6863\uff1a\r\nhttps://github.com/PaddlePaddle/PaddleDetection/blob/release/0.2/docs/advanced_tutorials/inference/DEPLOYMENT.md\r\n\r\n`\r\nroot@eric-VirtualBox:/home/eric/project/PaddleDetection/inference/build# python ../tools/vis.py --img_path=jpeg/chinamap_a236.jpg --img_result_path=jpeg/chinamap_a236.jpg.pb --threshold=0.1 --c2l_path=../tools/chinamap.json\r\nTraceback (most recent call last):\r\n File \"../tools/vis.py\", line 83, in <module>\r\n with open(Flags.c2l_path, \"r\", encoding=\"utf-8\") as json_f:\r\nTypeError: 'encoding' is an invalid keyword argument for this function\r\nroot@eric-VirtualBox:/home/eric/project/PaddleDetection/inference/build#\r\n`\r\n\r\n\n", "before_files": [{"content": "# coding: utf-8\n# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport detection_result_pb2\nimport cv2\nimport sys\nimport gflags\nimport numpy as np\nimport json\nfrom PIL import Image, ImageDraw, ImageFont\n\nFlags = gflags.FLAGS\ngflags.DEFINE_string('img_path', 'abc', 'image path')\ngflags.DEFINE_string('img_result_path', 'def', 'image result path')\ngflags.DEFINE_float('threshold', 0.0, 'threshold of score')\ngflags.DEFINE_string('c2l_path', 'ghk', 'class to label path')\n\n\ndef colormap(rgb=False):\n \"\"\"\n Get colormap\n \"\"\"\n color_list = np.array([\n 0.000, 0.447, 0.741, 0.850, 0.325, 0.098, 0.929, 0.694, 0.125, 0.494,\n 0.184, 0.556, 0.466, 0.674, 0.188, 0.301, 0.745, 0.933, 0.635, 0.078,\n 0.184, 0.300, 0.300, 0.300, 0.600, 0.600, 0.600, 1.000, 0.000, 0.000,\n 1.000, 0.500, 0.000, 0.749, 0.749, 0.000, 0.000, 1.000, 0.000, 0.000,\n 0.000, 1.000, 0.667, 0.000, 1.000, 0.333, 0.333, 0.000, 0.333, 0.667,\n 0.000, 0.333, 1.000, 0.000, 0.667, 0.333, 0.000, 0.667, 0.667, 0.000,\n 0.667, 1.000, 0.000, 1.000, 0.333, 0.000, 1.000, 0.667, 0.000, 1.000,\n 1.000, 0.000, 0.000, 0.333, 0.500, 0.000, 0.667, 0.500, 0.000, 1.000,\n 0.500, 0.333, 0.000, 0.500, 0.333, 0.333, 0.500, 0.333, 0.667, 0.500,\n 0.333, 1.000, 0.500, 0.667, 0.000, 0.500, 0.667, 0.333, 0.500, 0.667,\n 0.667, 0.500, 0.667, 1.000, 0.500, 1.000, 0.000, 0.500, 1.000, 0.333,\n 0.500, 1.000, 0.667, 0.500, 1.000, 1.000, 0.500, 0.000, 0.333, 1.000,\n 0.000, 0.667, 1.000, 0.000, 1.000, 1.000, 0.333, 0.000, 1.000, 0.333,\n 0.333, 1.000, 0.333, 0.667, 1.000, 0.333, 1.000, 1.000, 0.667, 0.000,\n 1.000, 0.667, 0.333, 1.000, 0.667, 0.667, 1.000, 0.667, 1.000, 1.000,\n 1.000, 0.000, 1.000, 1.000, 0.333, 1.000, 1.000, 0.667, 1.000, 0.167,\n 0.000, 0.000, 0.333, 0.000, 0.000, 0.500, 0.000, 0.000, 0.667, 0.000,\n 0.000, 0.833, 0.000, 0.000, 1.000, 0.000, 0.000, 0.000, 0.167, 0.000,\n 0.000, 0.333, 0.000, 0.000, 0.500, 0.000, 0.000, 0.667, 0.000, 0.000,\n 0.833, 0.000, 0.000, 1.000, 0.000, 0.000, 0.000, 0.167, 0.000, 0.000,\n 0.333, 0.000, 0.000, 0.500, 0.000, 0.000, 0.667, 0.000, 0.000, 0.833,\n 0.000, 0.000, 1.000, 0.000, 0.000, 0.000, 0.143, 0.143, 0.143, 0.286,\n 0.286, 0.286, 0.429, 0.429, 0.429, 0.571, 0.571, 0.571, 0.714, 0.714,\n 0.714, 0.857, 0.857, 0.857, 1.000, 1.000, 1.000\n ]).astype(np.float32)\n color_list = color_list.reshape((-1, 3)) * 255\n if not rgb:\n color_list = color_list[:, ::-1]\n return color_list\n\n\nif __name__ == \"__main__\":\n if len(sys.argv) != 5:\n print(\n \"Usage: python vis.py --img_path=/path/to/image --img_result_path=/path/to/image_result.pb --threshold=0.1 --c2l_path=/path/to/class2label.json\"\n )\n else:\n Flags(sys.argv)\n color_list = colormap(rgb=True)\n text_thickness = 1\n text_scale = 0.3\n with open(Flags.img_result_path, \"rb\") as f:\n detection_result = detection_result_pb2.DetectionResult()\n detection_result.ParseFromString(f.read())\n img = cv2.imread(Flags.img_path)\n img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n class2LabelMap = dict()\n with open(Flags.c2l_path, \"r\", encoding=\"utf-8\") as json_f:\n class2LabelMap = json.load(json_f)\n for box in detection_result.detection_boxes:\n if box.score >= Flags.threshold:\n box_class = getattr(box, 'class')\n text_class_score_str = \"%s %.2f\" % (\n class2LabelMap.get(str(box_class)), box.score)\n text_point = (int(box.left_top_x), int(box.left_top_y))\n\n ptLeftTop = (int(box.left_top_x), int(box.left_top_y))\n ptRightBottom = (int(box.right_bottom_x),\n int(box.right_bottom_y))\n box_thickness = 1\n color = tuple([int(c) for c in color_list[box_class]])\n cv2.rectangle(img, ptLeftTop, ptRightBottom, color,\n box_thickness, 8)\n if text_point[1] < 0:\n text_point = (int(box.left_top_x),\n int(box.right_bottom_y))\n WHITE = (255, 255, 255)\n font = cv2.FONT_HERSHEY_SIMPLEX\n text_size = cv2.getTextSize(text_class_score_str, font,\n text_scale, text_thickness)\n\n text_box_left_top = (text_point[0],\n text_point[1] - text_size[0][1])\n text_box_right_bottom = (\n text_point[0] + text_size[0][0], text_point[1])\n\n cv2.rectangle(img, text_box_left_top,\n text_box_right_bottom, color, -1, 8)\n cv2.putText(img, text_class_score_str, text_point, font,\n text_scale, WHITE, text_thickness)\n img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)\n cv2.imwrite(Flags.img_path + \".png\", img)\n", "path": "inference/tools/vis.py"}], "after_files": [{"content": "# coding: utf-8\n# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport detection_result_pb2\nimport cv2\nimport sys\nimport gflags\nimport numpy as np\nimport json\nfrom PIL import Image, ImageDraw, ImageFont\nimport io\n\nFlags = gflags.FLAGS\ngflags.DEFINE_string('img_path', 'abc', 'image path')\ngflags.DEFINE_string('img_result_path', 'def', 'image result path')\ngflags.DEFINE_float('threshold', 0.0, 'threshold of score')\ngflags.DEFINE_string('c2l_path', 'ghk', 'class to label path')\n\n\ndef colormap(rgb=False):\n \"\"\"\n Get colormap\n \"\"\"\n color_list = np.array([\n 0.000, 0.447, 0.741, 0.850, 0.325, 0.098, 0.929, 0.694, 0.125, 0.494,\n 0.184, 0.556, 0.466, 0.674, 0.188, 0.301, 0.745, 0.933, 0.635, 0.078,\n 0.184, 0.300, 0.300, 0.300, 0.600, 0.600, 0.600, 1.000, 0.000, 0.000,\n 1.000, 0.500, 0.000, 0.749, 0.749, 0.000, 0.000, 1.000, 0.000, 0.000,\n 0.000, 1.000, 0.667, 0.000, 1.000, 0.333, 0.333, 0.000, 0.333, 0.667,\n 0.000, 0.333, 1.000, 0.000, 0.667, 0.333, 0.000, 0.667, 0.667, 0.000,\n 0.667, 1.000, 0.000, 1.000, 0.333, 0.000, 1.000, 0.667, 0.000, 1.000,\n 1.000, 0.000, 0.000, 0.333, 0.500, 0.000, 0.667, 0.500, 0.000, 1.000,\n 0.500, 0.333, 0.000, 0.500, 0.333, 0.333, 0.500, 0.333, 0.667, 0.500,\n 0.333, 1.000, 0.500, 0.667, 0.000, 0.500, 0.667, 0.333, 0.500, 0.667,\n 0.667, 0.500, 0.667, 1.000, 0.500, 1.000, 0.000, 0.500, 1.000, 0.333,\n 0.500, 1.000, 0.667, 0.500, 1.000, 1.000, 0.500, 0.000, 0.333, 1.000,\n 0.000, 0.667, 1.000, 0.000, 1.000, 1.000, 0.333, 0.000, 1.000, 0.333,\n 0.333, 1.000, 0.333, 0.667, 1.000, 0.333, 1.000, 1.000, 0.667, 0.000,\n 1.000, 0.667, 0.333, 1.000, 0.667, 0.667, 1.000, 0.667, 1.000, 1.000,\n 1.000, 0.000, 1.000, 1.000, 0.333, 1.000, 1.000, 0.667, 1.000, 0.167,\n 0.000, 0.000, 0.333, 0.000, 0.000, 0.500, 0.000, 0.000, 0.667, 0.000,\n 0.000, 0.833, 0.000, 0.000, 1.000, 0.000, 0.000, 0.000, 0.167, 0.000,\n 0.000, 0.333, 0.000, 0.000, 0.500, 0.000, 0.000, 0.667, 0.000, 0.000,\n 0.833, 0.000, 0.000, 1.000, 0.000, 0.000, 0.000, 0.167, 0.000, 0.000,\n 0.333, 0.000, 0.000, 0.500, 0.000, 0.000, 0.667, 0.000, 0.000, 0.833,\n 0.000, 0.000, 1.000, 0.000, 0.000, 0.000, 0.143, 0.143, 0.143, 0.286,\n 0.286, 0.286, 0.429, 0.429, 0.429, 0.571, 0.571, 0.571, 0.714, 0.714,\n 0.714, 0.857, 0.857, 0.857, 1.000, 1.000, 1.000\n ]).astype(np.float32)\n color_list = color_list.reshape((-1, 3)) * 255\n if not rgb:\n color_list = color_list[:, ::-1]\n return color_list\n\n\nif __name__ == \"__main__\":\n if len(sys.argv) != 5:\n print(\n \"Usage: python vis.py --img_path=/path/to/image --img_result_path=/path/to/image_result.pb --threshold=0.1 --c2l_path=/path/to/class2label.json\"\n )\n else:\n Flags(sys.argv)\n color_list = colormap(rgb=True)\n text_thickness = 1\n text_scale = 0.3\n with open(Flags.img_result_path, \"rb\") as f:\n detection_result = detection_result_pb2.DetectionResult()\n detection_result.ParseFromString(f.read())\n img = cv2.imread(Flags.img_path)\n img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n class2LabelMap = dict()\n with io.open(Flags.c2l_path, \"r\", encoding=\"utf-8\") as json_f:\n class2LabelMap = json.load(json_f)\n for box in detection_result.detection_boxes:\n if box.score >= Flags.threshold:\n box_class = getattr(box, 'class')\n text_class_score_str = \"%s %.2f\" % (\n class2LabelMap.get(str(box_class)), box.score)\n text_point = (int(box.left_top_x), int(box.left_top_y))\n\n ptLeftTop = (int(box.left_top_x), int(box.left_top_y))\n ptRightBottom = (int(box.right_bottom_x),\n int(box.right_bottom_y))\n box_thickness = 1\n color = tuple([int(c) for c in color_list[box_class]])\n cv2.rectangle(img, ptLeftTop, ptRightBottom, color,\n box_thickness, 8)\n if text_point[1] < 0:\n text_point = (int(box.left_top_x),\n int(box.right_bottom_y))\n WHITE = (255, 255, 255)\n font = cv2.FONT_HERSHEY_SIMPLEX\n text_size = cv2.getTextSize(text_class_score_str, font,\n text_scale, text_thickness)\n\n text_box_left_top = (text_point[0],\n text_point[1] - text_size[0][1])\n text_box_right_bottom = (\n text_point[0] + text_size[0][0], text_point[1])\n\n cv2.rectangle(img, text_box_left_top,\n text_box_right_bottom, color, -1, 8)\n cv2.putText(img, text_class_score_str, text_point, font,\n text_scale, WHITE, text_thickness)\n img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)\n cv2.imwrite(Flags.img_path + \".png\", img)\n", "path": "inference/tools/vis.py"}]}
| 3,323 | 217 |
gh_patches_debug_22391
|
rasdani/github-patches
|
git_diff
|
vyperlang__vyper-1082
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Converting sliced bytes to non-bytes32 doesn't work
### Version Information
* vyper Version: 0.1.0b4
### What's your issue about?
Vyper doesn't like converting a slice of `bytes` (slice len <=32) to a type other than `bytes32`:
```python
# Compiles!
@public
@constant
def foo(_arg: bytes[100]) -> bytes32:
return convert(slice(_arg, start=20, len=32), bytes32)
# Does not compile....
@public
@constant
def bar(_arg: bytes[100]) -> uint256:
return convert(slice(_arg, start=20, len=32), uint256)
# Compiles! (Workaround)
@public
@constant
def baz(_arg: bytes[100]) -> uint256:
return convert(convert(slice(_arg, start=20, len=32), bytes32), uint256)
# Note: above does not work for addresses!
```
### How can it be fixed?
Make it so!
#### Cute Animal Picture

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `vyper/types/convert.py`
Content:
```
1 import ast
2 import warnings
3
4 from vyper.functions.signature import (
5 signature
6 )
7 from vyper.parser.parser_utils import (
8 LLLnode,
9 getpos,
10 byte_array_to_num
11 )
12 from vyper.exceptions import (
13 InvalidLiteralException,
14 TypeMismatchException,
15 ParserException,
16 )
17 from vyper.types import (
18 BaseType,
19 )
20 from vyper.types import (
21 get_type,
22 )
23 from vyper.utils import (
24 DECIMAL_DIVISOR,
25 MemoryPositions,
26 SizeLimits
27 )
28
29
30 @signature(('uint256', 'bytes32', 'bytes', 'bool'), '*')
31 def to_int128(expr, args, kwargs, context):
32 in_node = args[0]
33 typ, len = get_type(in_node)
34 if typ in ('uint256', 'bytes32'):
35 if in_node.typ.is_literal and not SizeLimits.in_bounds('int128', in_node.value):
36 raise InvalidLiteralException("Number out of range: {}".format(in_node.value), expr)
37 return LLLnode.from_list(
38 ['clamp', ['mload', MemoryPositions.MINNUM], in_node,
39 ['mload', MemoryPositions.MAXNUM]], typ=BaseType('int128', in_node.typ.unit), pos=getpos(expr)
40 )
41
42 elif typ is 'bool':
43 return LLLnode.from_list(
44 ['clamp', ['mload', MemoryPositions.MINNUM], in_node,
45 ['mload', MemoryPositions.MAXNUM]], typ=BaseType('int128', in_node.typ.unit), pos=getpos(expr)
46 )
47
48 else:
49 return byte_array_to_num(in_node, expr, 'int128')
50
51
52 @signature(('num_literal', 'int128', 'bytes32', 'address', 'bool'), '*')
53 def to_uint256(expr, args, kwargs, context):
54 in_node = args[0]
55 input_type, len = get_type(in_node)
56
57 if isinstance(in_node, int):
58 if not SizeLimits.in_bounds('uint256', in_node):
59 raise InvalidLiteralException("Number out of range: {}".format(in_node))
60 _unit = in_node.typ.unit if input_type == 'int128' else None
61 return LLLnode.from_list(in_node, typ=BaseType('uint256', _unit), pos=getpos(expr))
62
63 elif isinstance(in_node, LLLnode) and input_type in ('int128', 'num_literal', 'bool'):
64 _unit = in_node.typ.unit if input_type == 'int128' else None
65 return LLLnode.from_list(['clampge', in_node, 0], typ=BaseType('uint256', _unit), pos=getpos(expr))
66
67 elif isinstance(in_node, LLLnode) and input_type in ('bytes32', 'address'):
68 return LLLnode(value=in_node.value, args=in_node.args, typ=BaseType('uint256'), pos=getpos(expr))
69
70 else:
71 raise InvalidLiteralException("Invalid input for uint256: %r" % in_node, expr)
72
73
74 @signature(('int128', 'uint256'), '*')
75 def to_decimal(expr, args, kwargs, context):
76 input = args[0]
77 if input.typ.typ == 'uint256':
78 return LLLnode.from_list(
79 ['uclample', ['mul', input, DECIMAL_DIVISOR], ['mload', MemoryPositions.MAXDECIMAL]],
80 typ=BaseType('decimal', input.typ.unit, input.typ.positional), pos=getpos(expr)
81 )
82 else:
83 return LLLnode.from_list(
84 ['mul', input, DECIMAL_DIVISOR],
85 typ=BaseType('decimal', input.typ.unit, input.typ.positional),
86 pos=getpos(expr)
87 )
88
89
90 @signature(('int128', 'uint256', 'address', 'bytes', 'bool'), '*')
91 def to_bytes32(expr, args, kwargs, context):
92 in_arg = args[0]
93 typ, _len = get_type(in_arg)
94
95 if typ == 'bytes':
96
97 if _len > 32:
98 raise TypeMismatchException("Unable to convert bytes[{}] to bytes32, max length is too large.".format(len))
99
100 if in_arg.location == "memory":
101 return LLLnode.from_list(
102 ['mload', ['add', in_arg, 32]], typ=BaseType('bytes32')
103 )
104 elif in_arg.location == "storage":
105 return LLLnode.from_list(
106 ['sload', ['add', ['sha3_32', in_arg], 1]], typ=BaseType('bytes32')
107 )
108
109 else:
110 return LLLnode(value=in_arg.value, args=in_arg.args, typ=BaseType('bytes32'), pos=getpos(expr))
111
112
113 @signature(('bytes32'), '*')
114 def to_address(expr, args, kwargs, context):
115 in_arg = args[0]
116
117 return LLLnode(value=in_arg.value, args=in_arg.args, typ=BaseType('address'), pos=getpos(expr))
118
119
120 def convert(expr, context):
121
122 if isinstance(expr.args[1], ast.Str):
123 warnings.warn(
124 "String parameter has been removed, see VIP1026). "
125 "Use a vyper type instead.",
126 DeprecationWarning
127 )
128
129 if isinstance(expr.args[1], ast.Name):
130 output_type = expr.args[1].id
131 else:
132 raise ParserException("Invalid conversion type, use valid vyper type.", expr)
133
134 if output_type in conversion_table:
135 return conversion_table[output_type](expr, context)
136 else:
137 raise ParserException("Conversion to {} is invalid.".format(output_type), expr)
138
139
140 conversion_table = {
141 'int128': to_int128,
142 'uint256': to_uint256,
143 'decimal': to_decimal,
144 'bytes32': to_bytes32,
145 'address': to_address,
146 }
147
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/vyper/types/convert.py b/vyper/types/convert.py
--- a/vyper/types/convert.py
+++ b/vyper/types/convert.py
@@ -49,7 +49,7 @@
return byte_array_to_num(in_node, expr, 'int128')
-@signature(('num_literal', 'int128', 'bytes32', 'address', 'bool'), '*')
+@signature(('num_literal', 'int128', 'bytes32', 'bytes', 'address', 'bool'), '*')
def to_uint256(expr, args, kwargs, context):
in_node = args[0]
input_type, len = get_type(in_node)
@@ -67,6 +67,11 @@
elif isinstance(in_node, LLLnode) and input_type in ('bytes32', 'address'):
return LLLnode(value=in_node.value, args=in_node.args, typ=BaseType('uint256'), pos=getpos(expr))
+ elif isinstance(in_node, LLLnode) and input_type is 'bytes':
+ if in_node.typ.maxlen > 32:
+ raise InvalidLiteralException("Cannot convert bytes array of max length {} to uint256".format(in_node.value), expr)
+ return byte_array_to_num(in_node, expr, 'uint256')
+
else:
raise InvalidLiteralException("Invalid input for uint256: %r" % in_node, expr)
|
{"golden_diff": "diff --git a/vyper/types/convert.py b/vyper/types/convert.py\n--- a/vyper/types/convert.py\n+++ b/vyper/types/convert.py\n@@ -49,7 +49,7 @@\n return byte_array_to_num(in_node, expr, 'int128')\n \n \n-@signature(('num_literal', 'int128', 'bytes32', 'address', 'bool'), '*')\n+@signature(('num_literal', 'int128', 'bytes32', 'bytes', 'address', 'bool'), '*')\n def to_uint256(expr, args, kwargs, context):\n in_node = args[0]\n input_type, len = get_type(in_node)\n@@ -67,6 +67,11 @@\n elif isinstance(in_node, LLLnode) and input_type in ('bytes32', 'address'):\n return LLLnode(value=in_node.value, args=in_node.args, typ=BaseType('uint256'), pos=getpos(expr))\n \n+ elif isinstance(in_node, LLLnode) and input_type is 'bytes':\n+ if in_node.typ.maxlen > 32:\n+ raise InvalidLiteralException(\"Cannot convert bytes array of max length {} to uint256\".format(in_node.value), expr)\n+ return byte_array_to_num(in_node, expr, 'uint256')\n+\n else:\n raise InvalidLiteralException(\"Invalid input for uint256: %r\" % in_node, expr)\n", "issue": "Converting sliced bytes to non-bytes32 doesn't work\n### Version Information\r\n* vyper Version: 0.1.0b4\r\n\r\n### What's your issue about?\r\nVyper doesn't like converting a slice of `bytes` (slice len <=32) to a type other than `bytes32`:\r\n\r\n```python\r\n# Compiles!\r\n@public\r\n@constant\r\ndef foo(_arg: bytes[100]) -> bytes32:\r\n return convert(slice(_arg, start=20, len=32), bytes32)\r\n\r\n# Does not compile....\r\n@public\r\n@constant\r\ndef bar(_arg: bytes[100]) -> uint256:\r\n return convert(slice(_arg, start=20, len=32), uint256)\r\n\r\n# Compiles! (Workaround)\r\n@public\r\n@constant\r\ndef baz(_arg: bytes[100]) -> uint256:\r\n return convert(convert(slice(_arg, start=20, len=32), bytes32), uint256)\r\n\r\n# Note: above does not work for addresses!\r\n```\r\n\r\n### How can it be fixed?\r\nMake it so!\r\n\r\n#### Cute Animal Picture\r\n\r\n\r\n\n", "before_files": [{"content": "import ast\nimport warnings\n\nfrom vyper.functions.signature import (\n signature\n)\nfrom vyper.parser.parser_utils import (\n LLLnode,\n getpos,\n byte_array_to_num\n)\nfrom vyper.exceptions import (\n InvalidLiteralException,\n TypeMismatchException,\n ParserException,\n)\nfrom vyper.types import (\n BaseType,\n)\nfrom vyper.types import (\n get_type,\n)\nfrom vyper.utils import (\n DECIMAL_DIVISOR,\n MemoryPositions,\n SizeLimits\n)\n\n\n@signature(('uint256', 'bytes32', 'bytes', 'bool'), '*')\ndef to_int128(expr, args, kwargs, context):\n in_node = args[0]\n typ, len = get_type(in_node)\n if typ in ('uint256', 'bytes32'):\n if in_node.typ.is_literal and not SizeLimits.in_bounds('int128', in_node.value):\n raise InvalidLiteralException(\"Number out of range: {}\".format(in_node.value), expr)\n return LLLnode.from_list(\n ['clamp', ['mload', MemoryPositions.MINNUM], in_node,\n ['mload', MemoryPositions.MAXNUM]], typ=BaseType('int128', in_node.typ.unit), pos=getpos(expr)\n )\n\n elif typ is 'bool':\n return LLLnode.from_list(\n ['clamp', ['mload', MemoryPositions.MINNUM], in_node,\n ['mload', MemoryPositions.MAXNUM]], typ=BaseType('int128', in_node.typ.unit), pos=getpos(expr)\n )\n\n else:\n return byte_array_to_num(in_node, expr, 'int128')\n\n\n@signature(('num_literal', 'int128', 'bytes32', 'address', 'bool'), '*')\ndef to_uint256(expr, args, kwargs, context):\n in_node = args[0]\n input_type, len = get_type(in_node)\n\n if isinstance(in_node, int):\n if not SizeLimits.in_bounds('uint256', in_node):\n raise InvalidLiteralException(\"Number out of range: {}\".format(in_node))\n _unit = in_node.typ.unit if input_type == 'int128' else None\n return LLLnode.from_list(in_node, typ=BaseType('uint256', _unit), pos=getpos(expr))\n\n elif isinstance(in_node, LLLnode) and input_type in ('int128', 'num_literal', 'bool'):\n _unit = in_node.typ.unit if input_type == 'int128' else None\n return LLLnode.from_list(['clampge', in_node, 0], typ=BaseType('uint256', _unit), pos=getpos(expr))\n\n elif isinstance(in_node, LLLnode) and input_type in ('bytes32', 'address'):\n return LLLnode(value=in_node.value, args=in_node.args, typ=BaseType('uint256'), pos=getpos(expr))\n\n else:\n raise InvalidLiteralException(\"Invalid input for uint256: %r\" % in_node, expr)\n\n\n@signature(('int128', 'uint256'), '*')\ndef to_decimal(expr, args, kwargs, context):\n input = args[0]\n if input.typ.typ == 'uint256':\n return LLLnode.from_list(\n ['uclample', ['mul', input, DECIMAL_DIVISOR], ['mload', MemoryPositions.MAXDECIMAL]],\n typ=BaseType('decimal', input.typ.unit, input.typ.positional), pos=getpos(expr)\n )\n else:\n return LLLnode.from_list(\n ['mul', input, DECIMAL_DIVISOR],\n typ=BaseType('decimal', input.typ.unit, input.typ.positional),\n pos=getpos(expr)\n )\n\n\n@signature(('int128', 'uint256', 'address', 'bytes', 'bool'), '*')\ndef to_bytes32(expr, args, kwargs, context):\n in_arg = args[0]\n typ, _len = get_type(in_arg)\n\n if typ == 'bytes':\n\n if _len > 32:\n raise TypeMismatchException(\"Unable to convert bytes[{}] to bytes32, max length is too large.\".format(len))\n\n if in_arg.location == \"memory\":\n return LLLnode.from_list(\n ['mload', ['add', in_arg, 32]], typ=BaseType('bytes32')\n )\n elif in_arg.location == \"storage\":\n return LLLnode.from_list(\n ['sload', ['add', ['sha3_32', in_arg], 1]], typ=BaseType('bytes32')\n )\n\n else:\n return LLLnode(value=in_arg.value, args=in_arg.args, typ=BaseType('bytes32'), pos=getpos(expr))\n\n\n@signature(('bytes32'), '*')\ndef to_address(expr, args, kwargs, context):\n in_arg = args[0]\n\n return LLLnode(value=in_arg.value, args=in_arg.args, typ=BaseType('address'), pos=getpos(expr))\n\n\ndef convert(expr, context):\n\n if isinstance(expr.args[1], ast.Str):\n warnings.warn(\n \"String parameter has been removed, see VIP1026). \"\n \"Use a vyper type instead.\",\n DeprecationWarning\n )\n\n if isinstance(expr.args[1], ast.Name):\n output_type = expr.args[1].id\n else:\n raise ParserException(\"Invalid conversion type, use valid vyper type.\", expr)\n\n if output_type in conversion_table:\n return conversion_table[output_type](expr, context)\n else:\n raise ParserException(\"Conversion to {} is invalid.\".format(output_type), expr)\n\n\nconversion_table = {\n 'int128': to_int128,\n 'uint256': to_uint256,\n 'decimal': to_decimal,\n 'bytes32': to_bytes32,\n 'address': to_address,\n}\n", "path": "vyper/types/convert.py"}], "after_files": [{"content": "import ast\nimport warnings\n\nfrom vyper.functions.signature import (\n signature\n)\nfrom vyper.parser.parser_utils import (\n LLLnode,\n getpos,\n byte_array_to_num\n)\nfrom vyper.exceptions import (\n InvalidLiteralException,\n TypeMismatchException,\n ParserException,\n)\nfrom vyper.types import (\n BaseType,\n)\nfrom vyper.types import (\n get_type,\n)\nfrom vyper.utils import (\n DECIMAL_DIVISOR,\n MemoryPositions,\n SizeLimits\n)\n\n\n@signature(('uint256', 'bytes32', 'bytes', 'bool'), '*')\ndef to_int128(expr, args, kwargs, context):\n in_node = args[0]\n typ, len = get_type(in_node)\n if typ in ('uint256', 'bytes32'):\n if in_node.typ.is_literal and not SizeLimits.in_bounds('int128', in_node.value):\n raise InvalidLiteralException(\"Number out of range: {}\".format(in_node.value), expr)\n return LLLnode.from_list(\n ['clamp', ['mload', MemoryPositions.MINNUM], in_node,\n ['mload', MemoryPositions.MAXNUM]], typ=BaseType('int128', in_node.typ.unit), pos=getpos(expr)\n )\n\n elif typ is 'bool':\n return LLLnode.from_list(\n ['clamp', ['mload', MemoryPositions.MINNUM], in_node,\n ['mload', MemoryPositions.MAXNUM]], typ=BaseType('int128', in_node.typ.unit), pos=getpos(expr)\n )\n\n else:\n return byte_array_to_num(in_node, expr, 'int128')\n\n\n@signature(('num_literal', 'int128', 'bytes32', 'bytes', 'address', 'bool'), '*')\ndef to_uint256(expr, args, kwargs, context):\n in_node = args[0]\n input_type, len = get_type(in_node)\n\n if isinstance(in_node, int):\n if not SizeLimits.in_bounds('uint256', in_node):\n raise InvalidLiteralException(\"Number out of range: {}\".format(in_node))\n _unit = in_node.typ.unit if input_type == 'int128' else None\n return LLLnode.from_list(in_node, typ=BaseType('uint256', _unit), pos=getpos(expr))\n\n elif isinstance(in_node, LLLnode) and input_type in ('int128', 'num_literal', 'bool'):\n _unit = in_node.typ.unit if input_type == 'int128' else None\n return LLLnode.from_list(['clampge', in_node, 0], typ=BaseType('uint256', _unit), pos=getpos(expr))\n\n elif isinstance(in_node, LLLnode) and input_type in ('bytes32', 'address'):\n return LLLnode(value=in_node.value, args=in_node.args, typ=BaseType('uint256'), pos=getpos(expr))\n\n elif isinstance(in_node, LLLnode) and input_type is 'bytes':\n if in_node.typ.maxlen > 32:\n raise InvalidLiteralException(\"Cannot convert bytes array of max length {} to uint256\".format(in_node.value), expr)\n return byte_array_to_num(in_node, expr, 'uint256')\n\n else:\n raise InvalidLiteralException(\"Invalid input for uint256: %r\" % in_node, expr)\n\n\n@signature(('int128', 'uint256'), '*')\ndef to_decimal(expr, args, kwargs, context):\n input = args[0]\n if input.typ.typ == 'uint256':\n return LLLnode.from_list(\n ['uclample', ['mul', input, DECIMAL_DIVISOR], ['mload', MemoryPositions.MAXDECIMAL]],\n typ=BaseType('decimal', input.typ.unit, input.typ.positional), pos=getpos(expr)\n )\n else:\n return LLLnode.from_list(\n ['mul', input, DECIMAL_DIVISOR],\n typ=BaseType('decimal', input.typ.unit, input.typ.positional),\n pos=getpos(expr)\n )\n\n\n@signature(('int128', 'uint256', 'address', 'bytes', 'bool'), '*')\ndef to_bytes32(expr, args, kwargs, context):\n in_arg = args[0]\n typ, _len = get_type(in_arg)\n\n if typ == 'bytes':\n\n if _len > 32:\n raise TypeMismatchException(\"Unable to convert bytes[{}] to bytes32, max length is too large.\".format(len))\n\n if in_arg.location == \"memory\":\n return LLLnode.from_list(\n ['mload', ['add', in_arg, 32]], typ=BaseType('bytes32')\n )\n elif in_arg.location == \"storage\":\n return LLLnode.from_list(\n ['sload', ['add', ['sha3_32', in_arg], 1]], typ=BaseType('bytes32')\n )\n\n else:\n return LLLnode(value=in_arg.value, args=in_arg.args, typ=BaseType('bytes32'), pos=getpos(expr))\n\n\n@signature(('bytes32'), '*')\ndef to_address(expr, args, kwargs, context):\n in_arg = args[0]\n\n return LLLnode(value=in_arg.value, args=in_arg.args, typ=BaseType('address'), pos=getpos(expr))\n\n\ndef convert(expr, context):\n\n if isinstance(expr.args[1], ast.Str):\n warnings.warn(\n \"String parameter has been removed, see VIP1026). \"\n \"Use a vyper type instead.\",\n DeprecationWarning\n )\n\n if isinstance(expr.args[1], ast.Name):\n output_type = expr.args[1].id\n else:\n raise ParserException(\"Invalid conversion type, use valid vyper type.\", expr)\n\n if output_type in conversion_table:\n return conversion_table[output_type](expr, context)\n else:\n raise ParserException(\"Conversion to {} is invalid.\".format(output_type), expr)\n\n\nconversion_table = {\n 'int128': to_int128,\n 'uint256': to_uint256,\n 'decimal': to_decimal,\n 'bytes32': to_bytes32,\n 'address': to_address,\n}\n", "path": "vyper/types/convert.py"}]}
| 2,194 | 324 |
gh_patches_debug_562
|
rasdani/github-patches
|
git_diff
|
mabel-dev__opteryx-1641
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
🪲 Python 3.9 tests stalling
### Thank you for taking the time to report a problem with Opteryx.
_To help us to respond to your request we ask that you try to provide the below detail about the bug._
**Describe the bug** _A clear and specific description of what the bug is. What the error, incorrect or unexpected behaviour was._
**Expected behaviour** _A clear and concise description of what you expected to happen._
**Sample Code/Statement** _If you can, please submit the SQL statement or Python code snippet, or a representative example using the sample datasets._
~~~sql
~~~
**Additional context** _Add any other context about the problem here, for example what you have done to try to diagnose or workaround the problem._
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `opteryx/__version__.py`
Content:
```
1 __build__ = 477
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """
16 Store the version here so:
17 1) we don't load dependencies by storing it in __init__.py
18 2) we can import it in setup.py for the same reason
19 """
20 from enum import Enum # isort: skip
21
22
23 class VersionStatus(Enum):
24 ALPHA = "alpha"
25 BETA = "beta"
26 RELEASE = "release"
27
28
29 _major = 0
30 _minor = 15
31 _revision = 0
32 _status = VersionStatus.BETA
33
34 __author__ = "@joocer"
35 __version__ = f"{_major}.{_minor}.{_revision}" + (
36 f"-{_status.value}.{__build__}" if _status != VersionStatus.RELEASE else ""
37 )
38
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/opteryx/__version__.py b/opteryx/__version__.py
--- a/opteryx/__version__.py
+++ b/opteryx/__version__.py
@@ -1,4 +1,4 @@
-__build__ = 477
+__build__ = 482
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
|
{"golden_diff": "diff --git a/opteryx/__version__.py b/opteryx/__version__.py\n--- a/opteryx/__version__.py\n+++ b/opteryx/__version__.py\n@@ -1,4 +1,4 @@\n-__build__ = 477\n+__build__ = 482\n \n # Licensed under the Apache License, Version 2.0 (the \"License\");\n # you may not use this file except in compliance with the License.\n", "issue": "\ud83e\udeb2 Python 3.9 tests stalling\n### Thank you for taking the time to report a problem with Opteryx.\r\n_To help us to respond to your request we ask that you try to provide the below detail about the bug._\r\n\r\n**Describe the bug** _A clear and specific description of what the bug is. What the error, incorrect or unexpected behaviour was._\r\n\r\n\r\n**Expected behaviour** _A clear and concise description of what you expected to happen._\r\n\r\n\r\n**Sample Code/Statement** _If you can, please submit the SQL statement or Python code snippet, or a representative example using the sample datasets._\r\n\r\n~~~sql\r\n\r\n~~~\r\n\r\n**Additional context** _Add any other context about the problem here, for example what you have done to try to diagnose or workaround the problem._\r\n\n", "before_files": [{"content": "__build__ = 477\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\nStore the version here so:\n1) we don't load dependencies by storing it in __init__.py\n2) we can import it in setup.py for the same reason\n\"\"\"\nfrom enum import Enum # isort: skip\n\n\nclass VersionStatus(Enum):\n ALPHA = \"alpha\"\n BETA = \"beta\"\n RELEASE = \"release\"\n\n\n_major = 0\n_minor = 15\n_revision = 0\n_status = VersionStatus.BETA\n\n__author__ = \"@joocer\"\n__version__ = f\"{_major}.{_minor}.{_revision}\" + (\n f\"-{_status.value}.{__build__}\" if _status != VersionStatus.RELEASE else \"\"\n)\n", "path": "opteryx/__version__.py"}], "after_files": [{"content": "__build__ = 482\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\nStore the version here so:\n1) we don't load dependencies by storing it in __init__.py\n2) we can import it in setup.py for the same reason\n\"\"\"\nfrom enum import Enum # isort: skip\n\n\nclass VersionStatus(Enum):\n ALPHA = \"alpha\"\n BETA = \"beta\"\n RELEASE = \"release\"\n\n\n_major = 0\n_minor = 15\n_revision = 0\n_status = VersionStatus.BETA\n\n__author__ = \"@joocer\"\n__version__ = f\"{_major}.{_minor}.{_revision}\" + (\n f\"-{_status.value}.{__build__}\" if _status != VersionStatus.RELEASE else \"\"\n)\n", "path": "opteryx/__version__.py"}]}
| 779 | 101 |
gh_patches_debug_36740
|
rasdani/github-patches
|
git_diff
|
plotly__dash-1970
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[BUG] Dropdown: Selected options not showing when the `value` contains a comma
As [reported on the forum by @ marcus](https://community.plotly.com/t/dcc-dropdown-bug-suspected-please-confirm-or-correct-me/60585):
Problem is in this spot: 'value': "50 , 10"
Specifically comma in `value` string makes that selection is not being displayed. When coma is removed or replaced (dot creates no issue) it works fine, but when coma is within the string, the selections are not displayed in the input box and are not removed from the `options`
This only occurs in Multi-Value dropdowns. This is new in Dash V2.1.0 It works as expected in V2.0.0
```
from dash import Dash, dcc, html, Input, Output
app = Dash(__name__)
app.layout = html.Div(
[
dcc.Dropdown(
id="dropdown",
className="inputbox-long",
options=[
{"label": "New York City", "value": "50, 10"},
{"label": "Montreal", "value": "MTL" },
{"label": "San Francisco", "value": "SF"},
],
placeholder="Select one or more",
multi=True,
),
html.Div(id="output"),
]
)
@app.callback(
Output("output", "children"), Input("dropdown", "value"),
)
def update(value):
return value
if __name__ == "__main__":
app.run_server(debug=True)
```

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `components/dash-html-components/dash_html_components_base/__init__.py`
Content:
```
1 """Vanilla HTML components for Dash"""
2
3 from ._imports_ import * # noqa: E402, F401, F403
4 from ._imports_ import __all__ # noqa: E402
5
6 import json
7 import os as _os
8 import sys as _sys
9 import dash as _dash
10
11 _basepath = _os.path.dirname(__file__)
12 _filepath = _os.path.abspath(_os.path.join(_basepath, "package-info.json"))
13 with open(_filepath) as f:
14 package = json.load(f)
15
16 package_name = package["name"].replace(" ", "_").replace("-", "_")
17 __version__ = package["version"]
18
19
20 # Module imports trigger a dash.development import, need to check this first
21 if not hasattr(_dash, "__plotly_dash") and not hasattr(_dash, "development"):
22 print(
23 "Dash was not successfully imported. Make sure you don't have a file "
24 "named \n'dash.py' in your current directory.",
25 file=_sys.stderr,
26 )
27 _sys.exit(1)
28
29 _current_path = _os.path.dirname(_os.path.abspath(__file__))
30
31
32 _this_module = "dash_html_components"
33
34 _js_dist = [
35 {
36 "relative_package_path": 'html/{}.min.js'.format(_this_module),
37 "external_url": (
38 "https://unpkg.com/dash-html-components@{}"
39 "/dash_html_components/dash_html_components.min.js"
40 ).format(__version__),
41 "namespace": "dash"
42 },
43 {
44 'relative_package_path': 'html/{}.min.js.map'.format(_this_module),
45 'external_url': (
46 'https://unpkg.com/dash-html-components@{}'
47 '/dash_html_components/dash_html_components.min.js.map'
48 ).format(__version__),
49 'namespace': 'dash',
50 'dynamic': True
51 }
52 ]
53
54 _css_dist = []
55
56
57 for _component in __all__:
58 setattr(locals()[_component], '_js_dist', _js_dist)
59 setattr(locals()[_component], '_css_dist', _css_dist)
60
```
Path: `components/dash-html-components/setup.py`
Content:
```
1 import io
2 import json
3 from setuptools import setup
4
5 with open('package.json') as f:
6 package = json.load(f)
7
8 package_name = str(package["name"].replace(" ", "_").replace("-", "_"))
9
10 setup(
11 name='dash_html_components',
12 version=package["version"],
13 author=package['author'],
14 author_email='[email protected]',
15 packages=[package_name],
16 url='https://github.com/plotly/dash-html-components',
17 include_package_data=True,
18 license=package['license'],
19 description=package['description'] if 'description' in package else package_name,
20 long_description=io.open('README.md', encoding='utf-8').read(),
21 long_description_content_type='text/markdown',
22 install_requires=[]
23 )
24
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/components/dash-html-components/dash_html_components_base/__init__.py b/components/dash-html-components/dash_html_components_base/__init__.py
--- a/components/dash-html-components/dash_html_components_base/__init__.py
+++ b/components/dash-html-components/dash_html_components_base/__init__.py
@@ -33,27 +33,27 @@
_js_dist = [
{
- "relative_package_path": 'html/{}.min.js'.format(_this_module),
+ "relative_package_path": "html/{}.min.js".format(_this_module),
"external_url": (
"https://unpkg.com/dash-html-components@{}"
"/dash_html_components/dash_html_components.min.js"
).format(__version__),
- "namespace": "dash"
+ "namespace": "dash",
},
{
- 'relative_package_path': 'html/{}.min.js.map'.format(_this_module),
- 'external_url': (
- 'https://unpkg.com/dash-html-components@{}'
- '/dash_html_components/dash_html_components.min.js.map'
+ "relative_package_path": "html/{}.min.js.map".format(_this_module),
+ "external_url": (
+ "https://unpkg.com/dash-html-components@{}"
+ "/dash_html_components/dash_html_components.min.js.map"
).format(__version__),
- 'namespace': 'dash',
- 'dynamic': True
- }
+ "namespace": "dash",
+ "dynamic": True,
+ },
]
_css_dist = []
for _component in __all__:
- setattr(locals()[_component], '_js_dist', _js_dist)
- setattr(locals()[_component], '_css_dist', _css_dist)
+ setattr(locals()[_component], "_js_dist", _js_dist)
+ setattr(locals()[_component], "_css_dist", _css_dist)
diff --git a/components/dash-html-components/setup.py b/components/dash-html-components/setup.py
--- a/components/dash-html-components/setup.py
+++ b/components/dash-html-components/setup.py
@@ -2,22 +2,22 @@
import json
from setuptools import setup
-with open('package.json') as f:
+with open("package.json") as f:
package = json.load(f)
package_name = str(package["name"].replace(" ", "_").replace("-", "_"))
setup(
- name='dash_html_components',
+ name="dash_html_components",
version=package["version"],
- author=package['author'],
- author_email='[email protected]',
+ author=package["author"],
+ author_email="[email protected]",
packages=[package_name],
- url='https://github.com/plotly/dash-html-components',
+ url="https://github.com/plotly/dash-html-components",
include_package_data=True,
- license=package['license'],
- description=package['description'] if 'description' in package else package_name,
- long_description=io.open('README.md', encoding='utf-8').read(),
- long_description_content_type='text/markdown',
- install_requires=[]
+ license=package["license"],
+ description=package["description"] if "description" in package else package_name,
+ long_description=io.open("README.md", encoding="utf-8").read(),
+ long_description_content_type="text/markdown",
+ install_requires=[],
)
|
{"golden_diff": "diff --git a/components/dash-html-components/dash_html_components_base/__init__.py b/components/dash-html-components/dash_html_components_base/__init__.py\n--- a/components/dash-html-components/dash_html_components_base/__init__.py\n+++ b/components/dash-html-components/dash_html_components_base/__init__.py\n@@ -33,27 +33,27 @@\n \n _js_dist = [\n {\n- \"relative_package_path\": 'html/{}.min.js'.format(_this_module),\n+ \"relative_package_path\": \"html/{}.min.js\".format(_this_module),\n \"external_url\": (\n \"https://unpkg.com/dash-html-components@{}\"\n \"/dash_html_components/dash_html_components.min.js\"\n ).format(__version__),\n- \"namespace\": \"dash\"\n+ \"namespace\": \"dash\",\n },\n {\n- 'relative_package_path': 'html/{}.min.js.map'.format(_this_module),\n- 'external_url': (\n- 'https://unpkg.com/dash-html-components@{}'\n- '/dash_html_components/dash_html_components.min.js.map'\n+ \"relative_package_path\": \"html/{}.min.js.map\".format(_this_module),\n+ \"external_url\": (\n+ \"https://unpkg.com/dash-html-components@{}\"\n+ \"/dash_html_components/dash_html_components.min.js.map\"\n ).format(__version__),\n- 'namespace': 'dash',\n- 'dynamic': True\n- }\n+ \"namespace\": \"dash\",\n+ \"dynamic\": True,\n+ },\n ]\n \n _css_dist = []\n \n \n for _component in __all__:\n- setattr(locals()[_component], '_js_dist', _js_dist)\n- setattr(locals()[_component], '_css_dist', _css_dist)\n+ setattr(locals()[_component], \"_js_dist\", _js_dist)\n+ setattr(locals()[_component], \"_css_dist\", _css_dist)\ndiff --git a/components/dash-html-components/setup.py b/components/dash-html-components/setup.py\n--- a/components/dash-html-components/setup.py\n+++ b/components/dash-html-components/setup.py\n@@ -2,22 +2,22 @@\n import json\n from setuptools import setup\n \n-with open('package.json') as f:\n+with open(\"package.json\") as f:\n package = json.load(f)\n \n package_name = str(package[\"name\"].replace(\" \", \"_\").replace(\"-\", \"_\"))\n \n setup(\n- name='dash_html_components',\n+ name=\"dash_html_components\",\n version=package[\"version\"],\n- author=package['author'],\n- author_email='[email protected]',\n+ author=package[\"author\"],\n+ author_email=\"[email protected]\",\n packages=[package_name],\n- url='https://github.com/plotly/dash-html-components',\n+ url=\"https://github.com/plotly/dash-html-components\",\n include_package_data=True,\n- license=package['license'],\n- description=package['description'] if 'description' in package else package_name,\n- long_description=io.open('README.md', encoding='utf-8').read(),\n- long_description_content_type='text/markdown',\n- install_requires=[]\n+ license=package[\"license\"],\n+ description=package[\"description\"] if \"description\" in package else package_name,\n+ long_description=io.open(\"README.md\", encoding=\"utf-8\").read(),\n+ long_description_content_type=\"text/markdown\",\n+ install_requires=[],\n )\n", "issue": "[BUG] Dropdown: Selected options not showing when the `value` contains a comma \nAs [reported on the forum by @ marcus](https://community.plotly.com/t/dcc-dropdown-bug-suspected-please-confirm-or-correct-me/60585):\r\n\r\nProblem is in this spot: 'value': \"50 , 10\"\r\nSpecifically comma in `value` string makes that selection is not being displayed. When coma is removed or replaced (dot creates no issue) it works fine, but when coma is within the string, the selections are not displayed in the input box and are not removed from the `options`\r\n\r\n This only occurs in Multi-Value dropdowns. This is new in Dash V2.1.0 It works as expected in V2.0.0\r\n\r\n\r\n\r\n\r\n```\r\nfrom dash import Dash, dcc, html, Input, Output\r\n\r\napp = Dash(__name__)\r\n\r\napp.layout = html.Div(\r\n [\r\n dcc.Dropdown(\r\n id=\"dropdown\",\r\n className=\"inputbox-long\",\r\n options=[\r\n {\"label\": \"New York City\", \"value\": \"50, 10\"},\r\n {\"label\": \"Montreal\", \"value\": \"MTL\" },\r\n {\"label\": \"San Francisco\", \"value\": \"SF\"},\r\n ],\r\n placeholder=\"Select one or more\",\r\n multi=True,\r\n ),\r\n html.Div(id=\"output\"),\r\n ]\r\n)\r\n\r\n\r\[email protected](\r\n Output(\"output\", \"children\"), Input(\"dropdown\", \"value\"),\r\n)\r\ndef update(value): \r\n return value\r\n\r\n\r\nif __name__ == \"__main__\":\r\n app.run_server(debug=True)\r\n```\r\n\r\n\n", "before_files": [{"content": "\"\"\"Vanilla HTML components for Dash\"\"\"\n\nfrom ._imports_ import * # noqa: E402, F401, F403\nfrom ._imports_ import __all__ # noqa: E402\n\nimport json\nimport os as _os\nimport sys as _sys\nimport dash as _dash\n\n_basepath = _os.path.dirname(__file__)\n_filepath = _os.path.abspath(_os.path.join(_basepath, \"package-info.json\"))\nwith open(_filepath) as f:\n package = json.load(f)\n\npackage_name = package[\"name\"].replace(\" \", \"_\").replace(\"-\", \"_\")\n__version__ = package[\"version\"]\n\n\n# Module imports trigger a dash.development import, need to check this first\nif not hasattr(_dash, \"__plotly_dash\") and not hasattr(_dash, \"development\"):\n print(\n \"Dash was not successfully imported. Make sure you don't have a file \"\n \"named \\n'dash.py' in your current directory.\",\n file=_sys.stderr,\n )\n _sys.exit(1)\n\n_current_path = _os.path.dirname(_os.path.abspath(__file__))\n\n\n_this_module = \"dash_html_components\"\n\n_js_dist = [\n {\n \"relative_package_path\": 'html/{}.min.js'.format(_this_module),\n \"external_url\": (\n \"https://unpkg.com/dash-html-components@{}\"\n \"/dash_html_components/dash_html_components.min.js\"\n ).format(__version__),\n \"namespace\": \"dash\"\n },\n {\n 'relative_package_path': 'html/{}.min.js.map'.format(_this_module),\n 'external_url': (\n 'https://unpkg.com/dash-html-components@{}'\n '/dash_html_components/dash_html_components.min.js.map'\n ).format(__version__),\n 'namespace': 'dash',\n 'dynamic': True\n }\n]\n\n_css_dist = []\n\n\nfor _component in __all__:\n setattr(locals()[_component], '_js_dist', _js_dist)\n setattr(locals()[_component], '_css_dist', _css_dist)\n", "path": "components/dash-html-components/dash_html_components_base/__init__.py"}, {"content": "import io\nimport json\nfrom setuptools import setup\n\nwith open('package.json') as f:\n package = json.load(f)\n\npackage_name = str(package[\"name\"].replace(\" \", \"_\").replace(\"-\", \"_\"))\n\nsetup(\n name='dash_html_components',\n version=package[\"version\"],\n author=package['author'],\n author_email='[email protected]',\n packages=[package_name],\n url='https://github.com/plotly/dash-html-components',\n include_package_data=True,\n license=package['license'],\n description=package['description'] if 'description' in package else package_name,\n long_description=io.open('README.md', encoding='utf-8').read(),\n long_description_content_type='text/markdown',\n install_requires=[]\n)\n", "path": "components/dash-html-components/setup.py"}], "after_files": [{"content": "\"\"\"Vanilla HTML components for Dash\"\"\"\n\nfrom ._imports_ import * # noqa: E402, F401, F403\nfrom ._imports_ import __all__ # noqa: E402\n\nimport json\nimport os as _os\nimport sys as _sys\nimport dash as _dash\n\n_basepath = _os.path.dirname(__file__)\n_filepath = _os.path.abspath(_os.path.join(_basepath, \"package-info.json\"))\nwith open(_filepath) as f:\n package = json.load(f)\n\npackage_name = package[\"name\"].replace(\" \", \"_\").replace(\"-\", \"_\")\n__version__ = package[\"version\"]\n\n\n# Module imports trigger a dash.development import, need to check this first\nif not hasattr(_dash, \"__plotly_dash\") and not hasattr(_dash, \"development\"):\n print(\n \"Dash was not successfully imported. Make sure you don't have a file \"\n \"named \\n'dash.py' in your current directory.\",\n file=_sys.stderr,\n )\n _sys.exit(1)\n\n_current_path = _os.path.dirname(_os.path.abspath(__file__))\n\n\n_this_module = \"dash_html_components\"\n\n_js_dist = [\n {\n \"relative_package_path\": \"html/{}.min.js\".format(_this_module),\n \"external_url\": (\n \"https://unpkg.com/dash-html-components@{}\"\n \"/dash_html_components/dash_html_components.min.js\"\n ).format(__version__),\n \"namespace\": \"dash\",\n },\n {\n \"relative_package_path\": \"html/{}.min.js.map\".format(_this_module),\n \"external_url\": (\n \"https://unpkg.com/dash-html-components@{}\"\n \"/dash_html_components/dash_html_components.min.js.map\"\n ).format(__version__),\n \"namespace\": \"dash\",\n \"dynamic\": True,\n },\n]\n\n_css_dist = []\n\n\nfor _component in __all__:\n setattr(locals()[_component], \"_js_dist\", _js_dist)\n setattr(locals()[_component], \"_css_dist\", _css_dist)\n", "path": "components/dash-html-components/dash_html_components_base/__init__.py"}, {"content": "import io\nimport json\nfrom setuptools import setup\n\nwith open(\"package.json\") as f:\n package = json.load(f)\n\npackage_name = str(package[\"name\"].replace(\" \", \"_\").replace(\"-\", \"_\"))\n\nsetup(\n name=\"dash_html_components\",\n version=package[\"version\"],\n author=package[\"author\"],\n author_email=\"[email protected]\",\n packages=[package_name],\n url=\"https://github.com/plotly/dash-html-components\",\n include_package_data=True,\n license=package[\"license\"],\n description=package[\"description\"] if \"description\" in package else package_name,\n long_description=io.open(\"README.md\", encoding=\"utf-8\").read(),\n long_description_content_type=\"text/markdown\",\n install_requires=[],\n)\n", "path": "components/dash-html-components/setup.py"}]}
| 1,468 | 764 |
gh_patches_debug_3710
|
rasdani/github-patches
|
git_diff
|
Cloud-CV__EvalAI-2198
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Challenges not displayed on the challenge page if > 10
## Current Behaviour:
If there are more than 10 challenges on the challenge page then those challenges aren't displayed on the page.
## Expected Behaviour:
All the challenges should be displayed on the challenge page.
## Deliverables -
- [ ] The API already returns the challenge data along with a URL for the next set of challenges.
- [ ] Make an API call to the next URL and append the data into the existing list in the controller.
- [ ] Terminate the appending once the `next` field is null in the API's response.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `settings/common.py`
Content:
```
1 """
2 Django settings for evalai project.
3
4 Generated by 'django-admin startproject' using Django 1.10.2.
5
6 For more information on this file, see
7 https://docs.djangoproject.com/en/1.10/topics/settings/
8
9 For the full list of settings and their values, see
10 https://docs.djangoproject.com/en/1.10/ref/settings/
11 """
12
13 import datetime
14 import os
15 import sys
16
17 # Build paths inside the project like this: os.path.join(BASE_DIR, ...)
18 BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
19 APPS_DIR = os.path.join(BASE_DIR, 'apps')
20
21 sys.path.append(APPS_DIR)
22
23 # Quick-start development settings - unsuitable for production
24 # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
25
26 # SECURITY WARNING: keep the secret key used in production secret!
27 SECRET_KEY = os.environ.get('SECRET_KEY', 'random_secret_key')
28
29 # SECURITY WARNING: don't run with debug turned on in production!
30 DEBUG = True
31
32 TEST = False
33
34 ALLOWED_HOSTS = []
35
36
37 # Application definition
38
39 DEFAULT_APPS = [
40 'django.contrib.admin',
41 'django.contrib.auth',
42 'django.contrib.contenttypes',
43 'django.contrib.sessions',
44 'django.contrib.messages',
45 'django.contrib.staticfiles',
46 'django.contrib.sites',
47 ]
48
49 OUR_APPS = [
50 'accounts',
51 'analytics',
52 'base',
53 'challenges',
54 'hosts',
55 'jobs',
56 'participants',
57 'web',
58 ]
59
60 THIRD_PARTY_APPS = [
61 'allauth',
62 'allauth.account',
63 'corsheaders',
64 'django_ses',
65 'import_export',
66 'rest_auth',
67 'rest_auth.registration',
68 'rest_framework.authtoken',
69 'rest_framework',
70 'rest_framework_docs',
71 'rest_framework_expiring_authtoken',
72 'drf_yasg',
73 ]
74
75 INSTALLED_APPS = DEFAULT_APPS + OUR_APPS + THIRD_PARTY_APPS
76
77 MIDDLEWARE = [
78 'corsheaders.middleware.CorsMiddleware',
79 'django.middleware.security.SecurityMiddleware',
80 'django.contrib.sessions.middleware.SessionMiddleware',
81 'django.middleware.common.CommonMiddleware',
82 'django.middleware.csrf.CsrfViewMiddleware',
83 'django.contrib.auth.middleware.AuthenticationMiddleware',
84 'django.contrib.messages.middleware.MessageMiddleware',
85 'django.middleware.clickjacking.XFrameOptionsMiddleware',
86 ]
87
88 ROOT_URLCONF = 'evalai.urls'
89
90
91 TEMPLATES = [
92 {
93 'BACKEND': 'django.template.backends.django.DjangoTemplates',
94 'DIRS': [],
95 'APP_DIRS': True,
96 'OPTIONS': {
97 'context_processors': [
98 'django.template.context_processors.debug',
99 'django.template.context_processors.request',
100 'django.contrib.auth.context_processors.auth',
101 'django.contrib.messages.context_processors.messages',
102 ],
103 },
104 },
105 ]
106
107 WSGI_APPLICATION = 'evalai.wsgi.application'
108
109
110 # Password validation
111 # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
112
113 AUTH_PASSWORD_VALIDATORS = [
114 {
115 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', # noqa
116 },
117 {
118 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', # noqa
119 },
120 {
121 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', # noqa
122 },
123 {
124 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', # noqa
125 },
126 ]
127
128
129 # Internationalization
130 # https://docs.djangoproject.com/en/1.10/topics/i18n/
131
132 LANGUAGE_CODE = 'en-us'
133
134 TIME_ZONE = 'UTC'
135
136 USE_I18N = True
137
138 USE_L10N = True
139
140 USE_TZ = True
141
142 # Static files (CSS, JavaScript, Images)
143 # https://docs.djangoproject.com/en/1.10/howto/static-files/
144
145 STATIC_URL = '/static/'
146 STATIC_ROOT = os.path.join(BASE_DIR, 'static')
147 MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
148 MEDIA_URL = "/media/"
149
150 SITE_ID = 1
151
152 REST_FRAMEWORK = {
153 'DEFAULT_PAGINATION_CLASS': (
154 'rest_framework.pagination.LimitOffsetPagination'),
155 'PAGE_SIZE': 10,
156 'DEFAULT_PERMISSION_CLASSES': [
157 'rest_framework.permissions.IsAuthenticatedOrReadOnly'
158 ],
159 'DEFAULT_AUTHENTICATION_CLASSES': [
160 'rest_framework_expiring_authtoken.authentication.ExpiringTokenAuthentication',
161 ],
162 'TEST_REQUEST_DEFAULT_FORMAT': 'json',
163 'DEFAULT_THROTTLE_CLASSES': (
164 'rest_framework.throttling.AnonRateThrottle',
165 'rest_framework.throttling.UserRateThrottle'
166 ),
167 'DEFAULT_THROTTLE_RATES': {
168 'anon': '100/minute',
169 'user': '100/minute'
170 },
171 'DEFAULT_RENDERER_CLASSES': (
172 'rest_framework.renderers.JSONRenderer',
173 )
174 }
175
176 # ALLAUTH SETTINGS
177 ACCOUNT_EMAIL_REQUIRED = True
178 OLD_PASSWORD_FIELD_ENABLED = True
179 ACCOUNT_CONFIRM_EMAIL_ON_GET = True
180 ACCOUNT_EMAIL_CONFIRMATION_ANONYMOUS_REDIRECT_URL = '/api/auth/email-confirmed/'
181 ACCOUNT_EMAIL_CONFIRMATION_AUTHENTICATED_REDIRECT_URL = '/api/auth/email-confirmed/'
182
183 AUTHENTICATION_BACKENDS = (
184 # Needed to login by username in Django admin, regardless of `allauth`
185 'django.contrib.auth.backends.ModelBackend',
186 # `allauth` specific authentication methods, such as login by e-mail
187 'allauth.account.auth_backends.AuthenticationBackend',
188 )
189
190 # CORS Settings
191 CORS_ORIGIN_ALLOW_ALL = True
192
193 # REST Framework Expiring Tokens Configuration
194 EXPIRING_TOKEN_LIFESPAN = datetime.timedelta(days=365)
195
196 # Logging
197 LOGGING = {
198 'version': 1,
199 'disable_existing_loggers': False,
200 'root': {
201 'level': 'INFO',
202 'handlers': ['console'],
203 },
204 'filters': {
205 'require_debug_false': {
206 '()': 'django.utils.log.RequireDebugFalse',
207 },
208 'require_debug_true': {
209 '()': 'django.utils.log.RequireDebugTrue',
210 }
211 },
212 'formatters': {
213 'simple': {
214 'format': '[%(asctime)s] %(levelname)s %(message)s',
215 'datefmt': '%Y-%m-%d %H:%M:%S'
216 },
217 'verbose': {
218 'format': '[%(asctime)s] %(levelname)s %(module)s %(message)s',
219 'datefmt': '%Y-%m-%d %H:%M:%S'
220 }
221 },
222 'handlers': {
223 'console': {
224 'level': 'INFO',
225 'filters': ['require_debug_true'],
226 'class': 'logging.StreamHandler',
227 'formatter': 'simple'
228 },
229 'logfile': {
230 'level': 'DEBUG',
231 'class': 'logging.handlers.RotatingFileHandler',
232 'filename': os.path.join(BASE_DIR, 'django.log'),
233 'maxBytes': 50000,
234 'backupCount': 10,
235 'formatter': 'verbose'
236 },
237 'mail_admins': {
238 'level': 'ERROR',
239 'class': 'django.utils.log.AdminEmailHandler',
240 'filters': ['require_debug_false'],
241 }
242 },
243 'loggers': {
244 'django': {
245 'handlers': ['console'],
246 'propagate': False,
247 },
248 'django.request': {
249 'handlers': ['mail_admins'],
250 'level': 'ERROR',
251 'propagate': False,
252 },
253 'django.security': {
254 'handlers': ['mail_admins'],
255 'level': 'ERROR',
256 'propagate': False,
257 },
258 'django.db.backends': {
259 'handlers': ['mail_admins'],
260 'level': 'ERROR',
261 'propagate': False,
262 }
263 }
264 }
265
266 CACHES = {
267 'default': {
268 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
269 }
270 }
271
272 # The maximum size in bytes for request body
273 # https://docs.djangoproject.com/en/1.10/ref/settings/#data-upload-max-memory-size
274 FILE_UPLOAD_MAX_MEMORY_SIZE = 524288000 # 500 MB
275 DATA_UPLOAD_MAX_MEMORY_SIZE = 524288000 # 500 MB
276
277 # To make usermame field read-only, customized serializer is defined.
278 REST_AUTH_SERIALIZERS = {
279 'USER_DETAILS_SERIALIZER': 'accounts.serializers.ProfileSerializer',
280 }
281
282 # For inviting users to participant and host teams.
283 ADMIN_EMAIL = "[email protected]"
284 CLOUDCV_TEAM_EMAIL = "EvalAI Team <[email protected]>"
285
286 SWAGGER_SETTINGS = {
287 'DEFAULT_INFO': 'evalai.urls.swagger_api_info',
288 'SECURITY_DEFINITIONS': {
289 'Token Authentication': {
290 'type': 'apiKey',
291 'name': 'Authorization',
292 'in': 'header'
293 },
294 }
295 }
296
297 REDOC_SETTINGS = {
298 'SPEC_URL': ('docs.yaml', {'format': '.yaml'}),
299 }
300
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/settings/common.py b/settings/common.py
--- a/settings/common.py
+++ b/settings/common.py
@@ -152,7 +152,7 @@
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': (
'rest_framework.pagination.LimitOffsetPagination'),
- 'PAGE_SIZE': 10,
+ 'PAGE_SIZE': 100,
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.IsAuthenticatedOrReadOnly'
],
|
{"golden_diff": "diff --git a/settings/common.py b/settings/common.py\n--- a/settings/common.py\n+++ b/settings/common.py\n@@ -152,7 +152,7 @@\n REST_FRAMEWORK = {\n 'DEFAULT_PAGINATION_CLASS': (\n 'rest_framework.pagination.LimitOffsetPagination'),\n- 'PAGE_SIZE': 10,\n+ 'PAGE_SIZE': 100,\n 'DEFAULT_PERMISSION_CLASSES': [\n 'rest_framework.permissions.IsAuthenticatedOrReadOnly'\n ],\n", "issue": "Challenges not displayed on the challenge page if > 10\n## Current Behaviour:\r\nIf there are more than 10 challenges on the challenge page then those challenges aren't displayed on the page.\r\n\r\n## Expected Behaviour:\r\nAll the challenges should be displayed on the challenge page.\r\n\r\n## Deliverables - \r\n- [ ] The API already returns the challenge data along with a URL for the next set of challenges.\r\n- [ ] Make an API call to the next URL and append the data into the existing list in the controller.\r\n- [ ] Terminate the appending once the `next` field is null in the API's response.\n", "before_files": [{"content": "\"\"\"\nDjango settings for evalai project.\n\nGenerated by 'django-admin startproject' using Django 1.10.2.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/1.10/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/1.10/ref/settings/\n\"\"\"\n\nimport datetime\nimport os\nimport sys\n\n# Build paths inside the project like this: os.path.join(BASE_DIR, ...)\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\nAPPS_DIR = os.path.join(BASE_DIR, 'apps')\n\nsys.path.append(APPS_DIR)\n\n# Quick-start development settings - unsuitable for production\n# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/\n\n# SECURITY WARNING: keep the secret key used in production secret!\nSECRET_KEY = os.environ.get('SECRET_KEY', 'random_secret_key')\n\n# SECURITY WARNING: don't run with debug turned on in production!\nDEBUG = True\n\nTEST = False\n\nALLOWED_HOSTS = []\n\n\n# Application definition\n\nDEFAULT_APPS = [\n 'django.contrib.admin',\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sessions',\n 'django.contrib.messages',\n 'django.contrib.staticfiles',\n 'django.contrib.sites',\n]\n\nOUR_APPS = [\n 'accounts',\n 'analytics',\n 'base',\n 'challenges',\n 'hosts',\n 'jobs',\n 'participants',\n 'web',\n]\n\nTHIRD_PARTY_APPS = [\n 'allauth',\n 'allauth.account',\n 'corsheaders',\n 'django_ses',\n 'import_export',\n 'rest_auth',\n 'rest_auth.registration',\n 'rest_framework.authtoken',\n 'rest_framework',\n 'rest_framework_docs',\n 'rest_framework_expiring_authtoken',\n 'drf_yasg',\n]\n\nINSTALLED_APPS = DEFAULT_APPS + OUR_APPS + THIRD_PARTY_APPS\n\nMIDDLEWARE = [\n 'corsheaders.middleware.CorsMiddleware',\n 'django.middleware.security.SecurityMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n]\n\nROOT_URLCONF = 'evalai.urls'\n\n\nTEMPLATES = [\n {\n 'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [],\n 'APP_DIRS': True,\n 'OPTIONS': {\n 'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages',\n ],\n },\n },\n]\n\nWSGI_APPLICATION = 'evalai.wsgi.application'\n\n\n# Password validation\n# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators\n\nAUTH_PASSWORD_VALIDATORS = [\n {\n 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', # noqa\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', # noqa\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', # noqa\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', # noqa\n },\n]\n\n\n# Internationalization\n# https://docs.djangoproject.com/en/1.10/topics/i18n/\n\nLANGUAGE_CODE = 'en-us'\n\nTIME_ZONE = 'UTC'\n\nUSE_I18N = True\n\nUSE_L10N = True\n\nUSE_TZ = True\n\n# Static files (CSS, JavaScript, Images)\n# https://docs.djangoproject.com/en/1.10/howto/static-files/\n\nSTATIC_URL = '/static/'\nSTATIC_ROOT = os.path.join(BASE_DIR, 'static')\nMEDIA_ROOT = os.path.join(BASE_DIR, 'media')\nMEDIA_URL = \"/media/\"\n\nSITE_ID = 1\n\nREST_FRAMEWORK = {\n 'DEFAULT_PAGINATION_CLASS': (\n 'rest_framework.pagination.LimitOffsetPagination'),\n 'PAGE_SIZE': 10,\n 'DEFAULT_PERMISSION_CLASSES': [\n 'rest_framework.permissions.IsAuthenticatedOrReadOnly'\n ],\n 'DEFAULT_AUTHENTICATION_CLASSES': [\n 'rest_framework_expiring_authtoken.authentication.ExpiringTokenAuthentication',\n ],\n 'TEST_REQUEST_DEFAULT_FORMAT': 'json',\n 'DEFAULT_THROTTLE_CLASSES': (\n 'rest_framework.throttling.AnonRateThrottle',\n 'rest_framework.throttling.UserRateThrottle'\n ),\n 'DEFAULT_THROTTLE_RATES': {\n 'anon': '100/minute',\n 'user': '100/minute'\n },\n 'DEFAULT_RENDERER_CLASSES': (\n 'rest_framework.renderers.JSONRenderer',\n )\n}\n\n# ALLAUTH SETTINGS\nACCOUNT_EMAIL_REQUIRED = True\nOLD_PASSWORD_FIELD_ENABLED = True\nACCOUNT_CONFIRM_EMAIL_ON_GET = True\nACCOUNT_EMAIL_CONFIRMATION_ANONYMOUS_REDIRECT_URL = '/api/auth/email-confirmed/'\nACCOUNT_EMAIL_CONFIRMATION_AUTHENTICATED_REDIRECT_URL = '/api/auth/email-confirmed/'\n\nAUTHENTICATION_BACKENDS = (\n # Needed to login by username in Django admin, regardless of `allauth`\n 'django.contrib.auth.backends.ModelBackend',\n # `allauth` specific authentication methods, such as login by e-mail\n 'allauth.account.auth_backends.AuthenticationBackend',\n)\n\n# CORS Settings\nCORS_ORIGIN_ALLOW_ALL = True\n\n# REST Framework Expiring Tokens Configuration\nEXPIRING_TOKEN_LIFESPAN = datetime.timedelta(days=365)\n\n# Logging\nLOGGING = {\n 'version': 1,\n 'disable_existing_loggers': False,\n 'root': {\n 'level': 'INFO',\n 'handlers': ['console'],\n },\n 'filters': {\n 'require_debug_false': {\n '()': 'django.utils.log.RequireDebugFalse',\n },\n 'require_debug_true': {\n '()': 'django.utils.log.RequireDebugTrue',\n }\n },\n 'formatters': {\n 'simple': {\n 'format': '[%(asctime)s] %(levelname)s %(message)s',\n 'datefmt': '%Y-%m-%d %H:%M:%S'\n },\n 'verbose': {\n 'format': '[%(asctime)s] %(levelname)s %(module)s %(message)s',\n 'datefmt': '%Y-%m-%d %H:%M:%S'\n }\n },\n 'handlers': {\n 'console': {\n 'level': 'INFO',\n 'filters': ['require_debug_true'],\n 'class': 'logging.StreamHandler',\n 'formatter': 'simple'\n },\n 'logfile': {\n 'level': 'DEBUG',\n 'class': 'logging.handlers.RotatingFileHandler',\n 'filename': os.path.join(BASE_DIR, 'django.log'),\n 'maxBytes': 50000,\n 'backupCount': 10,\n 'formatter': 'verbose'\n },\n 'mail_admins': {\n 'level': 'ERROR',\n 'class': 'django.utils.log.AdminEmailHandler',\n 'filters': ['require_debug_false'],\n }\n },\n 'loggers': {\n 'django': {\n 'handlers': ['console'],\n 'propagate': False,\n },\n 'django.request': {\n 'handlers': ['mail_admins'],\n 'level': 'ERROR',\n 'propagate': False,\n },\n 'django.security': {\n 'handlers': ['mail_admins'],\n 'level': 'ERROR',\n 'propagate': False,\n },\n 'django.db.backends': {\n 'handlers': ['mail_admins'],\n 'level': 'ERROR',\n 'propagate': False,\n }\n }\n}\n\nCACHES = {\n 'default': {\n 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',\n }\n}\n\n# The maximum size in bytes for request body\n# https://docs.djangoproject.com/en/1.10/ref/settings/#data-upload-max-memory-size\nFILE_UPLOAD_MAX_MEMORY_SIZE = 524288000 # 500 MB\nDATA_UPLOAD_MAX_MEMORY_SIZE = 524288000 # 500 MB\n\n# To make usermame field read-only, customized serializer is defined.\nREST_AUTH_SERIALIZERS = {\n 'USER_DETAILS_SERIALIZER': 'accounts.serializers.ProfileSerializer',\n}\n\n# For inviting users to participant and host teams.\nADMIN_EMAIL = \"[email protected]\"\nCLOUDCV_TEAM_EMAIL = \"EvalAI Team <[email protected]>\"\n\nSWAGGER_SETTINGS = {\n 'DEFAULT_INFO': 'evalai.urls.swagger_api_info',\n 'SECURITY_DEFINITIONS': {\n 'Token Authentication': {\n 'type': 'apiKey',\n 'name': 'Authorization',\n 'in': 'header'\n },\n }\n}\n\nREDOC_SETTINGS = {\n 'SPEC_URL': ('docs.yaml', {'format': '.yaml'}),\n}\n", "path": "settings/common.py"}], "after_files": [{"content": "\"\"\"\nDjango settings for evalai project.\n\nGenerated by 'django-admin startproject' using Django 1.10.2.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/1.10/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/1.10/ref/settings/\n\"\"\"\n\nimport datetime\nimport os\nimport sys\n\n# Build paths inside the project like this: os.path.join(BASE_DIR, ...)\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\nAPPS_DIR = os.path.join(BASE_DIR, 'apps')\n\nsys.path.append(APPS_DIR)\n\n# Quick-start development settings - unsuitable for production\n# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/\n\n# SECURITY WARNING: keep the secret key used in production secret!\nSECRET_KEY = os.environ.get('SECRET_KEY', 'random_secret_key')\n\n# SECURITY WARNING: don't run with debug turned on in production!\nDEBUG = True\n\nTEST = False\n\nALLOWED_HOSTS = []\n\n\n# Application definition\n\nDEFAULT_APPS = [\n 'django.contrib.admin',\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sessions',\n 'django.contrib.messages',\n 'django.contrib.staticfiles',\n 'django.contrib.sites',\n]\n\nOUR_APPS = [\n 'accounts',\n 'analytics',\n 'base',\n 'challenges',\n 'hosts',\n 'jobs',\n 'participants',\n 'web',\n]\n\nTHIRD_PARTY_APPS = [\n 'allauth',\n 'allauth.account',\n 'corsheaders',\n 'django_ses',\n 'import_export',\n 'rest_auth',\n 'rest_auth.registration',\n 'rest_framework.authtoken',\n 'rest_framework',\n 'rest_framework_docs',\n 'rest_framework_expiring_authtoken',\n 'drf_yasg',\n]\n\nINSTALLED_APPS = DEFAULT_APPS + OUR_APPS + THIRD_PARTY_APPS\n\nMIDDLEWARE = [\n 'corsheaders.middleware.CorsMiddleware',\n 'django.middleware.security.SecurityMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n]\n\nROOT_URLCONF = 'evalai.urls'\n\n\nTEMPLATES = [\n {\n 'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [],\n 'APP_DIRS': True,\n 'OPTIONS': {\n 'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages',\n ],\n },\n },\n]\n\nWSGI_APPLICATION = 'evalai.wsgi.application'\n\n\n# Password validation\n# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators\n\nAUTH_PASSWORD_VALIDATORS = [\n {\n 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', # noqa\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', # noqa\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', # noqa\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', # noqa\n },\n]\n\n\n# Internationalization\n# https://docs.djangoproject.com/en/1.10/topics/i18n/\n\nLANGUAGE_CODE = 'en-us'\n\nTIME_ZONE = 'UTC'\n\nUSE_I18N = True\n\nUSE_L10N = True\n\nUSE_TZ = True\n\n# Static files (CSS, JavaScript, Images)\n# https://docs.djangoproject.com/en/1.10/howto/static-files/\n\nSTATIC_URL = '/static/'\nSTATIC_ROOT = os.path.join(BASE_DIR, 'static')\nMEDIA_ROOT = os.path.join(BASE_DIR, 'media')\nMEDIA_URL = \"/media/\"\n\nSITE_ID = 1\n\nREST_FRAMEWORK = {\n 'DEFAULT_PAGINATION_CLASS': (\n 'rest_framework.pagination.LimitOffsetPagination'),\n 'PAGE_SIZE': 100,\n 'DEFAULT_PERMISSION_CLASSES': [\n 'rest_framework.permissions.IsAuthenticatedOrReadOnly'\n ],\n 'DEFAULT_AUTHENTICATION_CLASSES': [\n 'rest_framework_expiring_authtoken.authentication.ExpiringTokenAuthentication',\n ],\n 'TEST_REQUEST_DEFAULT_FORMAT': 'json',\n 'DEFAULT_THROTTLE_CLASSES': (\n 'rest_framework.throttling.AnonRateThrottle',\n 'rest_framework.throttling.UserRateThrottle'\n ),\n 'DEFAULT_THROTTLE_RATES': {\n 'anon': '100/minute',\n 'user': '100/minute'\n },\n 'DEFAULT_RENDERER_CLASSES': (\n 'rest_framework.renderers.JSONRenderer',\n )\n}\n\n# ALLAUTH SETTINGS\nACCOUNT_EMAIL_REQUIRED = True\nOLD_PASSWORD_FIELD_ENABLED = True\nACCOUNT_CONFIRM_EMAIL_ON_GET = True\nACCOUNT_EMAIL_CONFIRMATION_ANONYMOUS_REDIRECT_URL = '/api/auth/email-confirmed/'\nACCOUNT_EMAIL_CONFIRMATION_AUTHENTICATED_REDIRECT_URL = '/api/auth/email-confirmed/'\n\nAUTHENTICATION_BACKENDS = (\n # Needed to login by username in Django admin, regardless of `allauth`\n 'django.contrib.auth.backends.ModelBackend',\n # `allauth` specific authentication methods, such as login by e-mail\n 'allauth.account.auth_backends.AuthenticationBackend',\n)\n\n# CORS Settings\nCORS_ORIGIN_ALLOW_ALL = True\n\n# REST Framework Expiring Tokens Configuration\nEXPIRING_TOKEN_LIFESPAN = datetime.timedelta(days=365)\n\n# Logging\nLOGGING = {\n 'version': 1,\n 'disable_existing_loggers': False,\n 'root': {\n 'level': 'INFO',\n 'handlers': ['console'],\n },\n 'filters': {\n 'require_debug_false': {\n '()': 'django.utils.log.RequireDebugFalse',\n },\n 'require_debug_true': {\n '()': 'django.utils.log.RequireDebugTrue',\n }\n },\n 'formatters': {\n 'simple': {\n 'format': '[%(asctime)s] %(levelname)s %(message)s',\n 'datefmt': '%Y-%m-%d %H:%M:%S'\n },\n 'verbose': {\n 'format': '[%(asctime)s] %(levelname)s %(module)s %(message)s',\n 'datefmt': '%Y-%m-%d %H:%M:%S'\n }\n },\n 'handlers': {\n 'console': {\n 'level': 'INFO',\n 'filters': ['require_debug_true'],\n 'class': 'logging.StreamHandler',\n 'formatter': 'simple'\n },\n 'logfile': {\n 'level': 'DEBUG',\n 'class': 'logging.handlers.RotatingFileHandler',\n 'filename': os.path.join(BASE_DIR, 'django.log'),\n 'maxBytes': 50000,\n 'backupCount': 10,\n 'formatter': 'verbose'\n },\n 'mail_admins': {\n 'level': 'ERROR',\n 'class': 'django.utils.log.AdminEmailHandler',\n 'filters': ['require_debug_false'],\n }\n },\n 'loggers': {\n 'django': {\n 'handlers': ['console'],\n 'propagate': False,\n },\n 'django.request': {\n 'handlers': ['mail_admins'],\n 'level': 'ERROR',\n 'propagate': False,\n },\n 'django.security': {\n 'handlers': ['mail_admins'],\n 'level': 'ERROR',\n 'propagate': False,\n },\n 'django.db.backends': {\n 'handlers': ['mail_admins'],\n 'level': 'ERROR',\n 'propagate': False,\n }\n }\n}\n\nCACHES = {\n 'default': {\n 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',\n }\n}\n\n# The maximum size in bytes for request body\n# https://docs.djangoproject.com/en/1.10/ref/settings/#data-upload-max-memory-size\nFILE_UPLOAD_MAX_MEMORY_SIZE = 524288000 # 500 MB\nDATA_UPLOAD_MAX_MEMORY_SIZE = 524288000 # 500 MB\n\n# To make usermame field read-only, customized serializer is defined.\nREST_AUTH_SERIALIZERS = {\n 'USER_DETAILS_SERIALIZER': 'accounts.serializers.ProfileSerializer',\n}\n\n# For inviting users to participant and host teams.\nADMIN_EMAIL = \"[email protected]\"\nCLOUDCV_TEAM_EMAIL = \"EvalAI Team <[email protected]>\"\n\nSWAGGER_SETTINGS = {\n 'DEFAULT_INFO': 'evalai.urls.swagger_api_info',\n 'SECURITY_DEFINITIONS': {\n 'Token Authentication': {\n 'type': 'apiKey',\n 'name': 'Authorization',\n 'in': 'header'\n },\n }\n}\n\nREDOC_SETTINGS = {\n 'SPEC_URL': ('docs.yaml', {'format': '.yaml'}),\n}\n", "path": "settings/common.py"}]}
| 3,134 | 102 |
gh_patches_debug_21725
|
rasdani/github-patches
|
git_diff
|
getpelican__pelican-1937
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Atom feed could/should use <published> date
I've noticed that my atom feed will use `:date:` in the `<updated>` field in feed.xml, and will override that with `:modified:` if modified is specified. This results in an unwanted email re-send when using MailChimp's RSS --> Email feature. Going by this page, I think they would use `<published>` first if it existed:
http://kb.mailchimp.com/campaigns/rss-in-campaigns/troubleshooting-rss-in-campaigns
And I think in general it would be nice to call out both `<published>` to mark the original date, and `<updated>` for modifications. (And it aligns better with what I expect with `:date:` and `:modified:` meta in my .rst files.)
Not a big problem -- I'll just have to refrain from using :modified: to avoid this behavior, but I figured it wouldn't hurt to ask. :-)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2 from os import walk
3 from os.path import join, relpath, dirname
4
5 from setuptools import setup
6
7 requires = ['feedgenerator >= 1.6', 'jinja2 >= 2.7', 'pygments', 'docutils',
8 'pytz >= 0a', 'blinker', 'unidecode', 'six >= 1.4',
9 'python-dateutil']
10
11 entry_points = {
12 'console_scripts': [
13 'pelican = pelican:main',
14 'pelican-import = pelican.tools.pelican_import:main',
15 'pelican-quickstart = pelican.tools.pelican_quickstart:main',
16 'pelican-themes = pelican.tools.pelican_themes:main'
17 ]
18 }
19
20 README = open('README.rst').read()
21 CHANGELOG = open('docs/changelog.rst').read()
22
23 setup(
24 name="pelican",
25 version="3.6.4.dev0",
26 url='http://getpelican.com/',
27 author='Alexis Metaireau',
28 author_email='[email protected]',
29 description="A tool to generate a static blog from reStructuredText or "
30 "Markdown input files.",
31 long_description=README + '\n' + CHANGELOG,
32 packages=['pelican', 'pelican.tools'],
33 package_data={
34 # we manually collect the package data, as opposed to using include_package_data=True
35 # because we don't want the tests to be included automatically as package data
36 # (MANIFEST.in is too greedy)
37 'pelican': [
38 relpath(join(root, name), 'pelican')
39 for root, _, names in walk(join('pelican', 'themes')) for name in names
40 ],
41 'pelican.tools': [
42 relpath(join(root, name), join('pelican', 'tools'))
43 for root, _, names in walk(join('pelican', 'tools', 'templates')) for name in names
44 ],
45 },
46 install_requires=requires,
47 entry_points=entry_points,
48 classifiers=[
49 'Development Status :: 5 - Production/Stable',
50 'Environment :: Console',
51 'License :: OSI Approved :: GNU Affero General Public License v3',
52 'Operating System :: OS Independent',
53 'Programming Language :: Python :: 2',
54 'Programming Language :: Python :: 2.7',
55 'Programming Language :: Python :: 3',
56 'Programming Language :: Python :: 3.3',
57 'Programming Language :: Python :: 3.4',
58 'Programming Language :: Python :: 3.5',
59 'Topic :: Internet :: WWW/HTTP',
60 'Topic :: Software Development :: Libraries :: Python Modules',
61 ],
62 test_suite='pelican.tests',
63 )
64
```
Path: `pelican/writers.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 from __future__ import print_function, unicode_literals, with_statement
3
4 import logging
5 import os
6
7 from feedgenerator import Atom1Feed, Rss201rev2Feed
8
9 from jinja2 import Markup
10
11 import six
12 from six.moves.urllib.parse import urlparse
13
14 from pelican import signals
15 from pelican.paginator import Paginator
16 from pelican.utils import (get_relative_path, is_selected_for_writing,
17 path_to_url, set_date_tzinfo)
18
19 if not six.PY3:
20 from codecs import open
21
22 logger = logging.getLogger(__name__)
23
24
25 class Writer(object):
26
27 def __init__(self, output_path, settings=None):
28 self.output_path = output_path
29 self.reminder = dict()
30 self.settings = settings or {}
31 self._written_files = set()
32 self._overridden_files = set()
33
34 def _create_new_feed(self, feed_type, context):
35 feed_class = Rss201rev2Feed if feed_type == 'rss' else Atom1Feed
36 sitename = Markup(context['SITENAME']).striptags()
37 feed = feed_class(
38 title=sitename,
39 link=(self.site_url + '/'),
40 feed_url=self.feed_url,
41 description=context.get('SITESUBTITLE', ''))
42 return feed
43
44 def _add_item_to_the_feed(self, feed, item):
45
46 title = Markup(item.title).striptags()
47 link = '%s/%s' % (self.site_url, item.url)
48 feed.add_item(
49 title=title,
50 link=link,
51 unique_id='tag:%s,%s:%s' % (urlparse(link).netloc,
52 item.date.date(),
53 urlparse(link).path.lstrip('/')),
54 description=item.get_content(self.site_url),
55 categories=item.tags if hasattr(item, 'tags') else None,
56 author_name=getattr(item, 'author', ''),
57 pubdate=set_date_tzinfo(
58 item.modified if hasattr(item, 'modified') else item.date,
59 self.settings.get('TIMEZONE', None)))
60
61 def _open_w(self, filename, encoding, override=False):
62 """Open a file to write some content to it.
63
64 Exit if we have already written to that file, unless one (and no more
65 than one) of the writes has the override parameter set to True.
66 """
67 if filename in self._overridden_files:
68 if override:
69 raise RuntimeError('File %s is set to be overridden twice'
70 % filename)
71 else:
72 logger.info('Skipping %s', filename)
73 filename = os.devnull
74 elif filename in self._written_files:
75 if override:
76 logger.info('Overwriting %s', filename)
77 else:
78 raise RuntimeError('File %s is to be overwritten' % filename)
79 if override:
80 self._overridden_files.add(filename)
81 self._written_files.add(filename)
82 return open(filename, 'w', encoding=encoding)
83
84 def write_feed(self, elements, context, path=None, feed_type='atom',
85 override_output=False):
86 """Generate a feed with the list of articles provided
87
88 Return the feed. If no path or output_path is specified, just
89 return the feed object.
90
91 :param elements: the articles to put on the feed.
92 :param context: the context to get the feed metadata.
93 :param path: the path to output.
94 :param feed_type: the feed type to use (atom or rss)
95 :param override_output: boolean telling if we can override previous
96 output with the same name (and if next files written with the same
97 name should be skipped to keep that one)
98 """
99 if not is_selected_for_writing(self.settings, path):
100 return
101
102 self.site_url = context.get(
103 'SITEURL', path_to_url(get_relative_path(path)))
104
105 self.feed_domain = context.get('FEED_DOMAIN')
106 self.feed_url = '{}/{}'.format(self.feed_domain, path)
107
108 feed = self._create_new_feed(feed_type, context)
109
110 max_items = len(elements)
111 if self.settings['FEED_MAX_ITEMS']:
112 max_items = min(self.settings['FEED_MAX_ITEMS'], max_items)
113 for i in range(max_items):
114 self._add_item_to_the_feed(feed, elements[i])
115
116 if path:
117 complete_path = os.path.join(self.output_path, path)
118 try:
119 os.makedirs(os.path.dirname(complete_path))
120 except Exception:
121 pass
122
123 encoding = 'utf-8' if six.PY3 else None
124 with self._open_w(complete_path, encoding, override_output) as fp:
125 feed.write(fp, 'utf-8')
126 logger.info('Writing %s', complete_path)
127
128 signals.feed_written.send(
129 complete_path, context=context, feed=feed)
130 return feed
131
132 def write_file(self, name, template, context, relative_urls=False,
133 paginated=None, override_output=False, **kwargs):
134 """Render the template and write the file.
135
136 :param name: name of the file to output
137 :param template: template to use to generate the content
138 :param context: dict to pass to the templates.
139 :param relative_urls: use relative urls or absolutes ones
140 :param paginated: dict of article list to paginate - must have the
141 same length (same list in different orders)
142 :param override_output: boolean telling if we can override previous
143 output with the same name (and if next files written with the same
144 name should be skipped to keep that one)
145 :param **kwargs: additional variables to pass to the templates
146 """
147
148 if name is False or \
149 name == "" or \
150 not is_selected_for_writing(self.settings,
151 os.path.join(self.output_path, name)):
152 return
153 elif not name:
154 # other stuff, just return for now
155 return
156
157 def _write_file(template, localcontext, output_path, name, override):
158 """Render the template write the file."""
159 # set localsiteurl for context so that Contents can adjust links
160 if localcontext['localsiteurl']:
161 context['localsiteurl'] = localcontext['localsiteurl']
162 output = template.render(localcontext)
163 path = os.path.join(output_path, name)
164 try:
165 os.makedirs(os.path.dirname(path))
166 except Exception:
167 pass
168
169 with self._open_w(path, 'utf-8', override=override) as f:
170 f.write(output)
171 logger.info('Writing %s', path)
172
173 # Send a signal to say we're writing a file with some specific
174 # local context.
175 signals.content_written.send(path, context=localcontext)
176
177 def _get_localcontext(context, name, kwargs, relative_urls):
178 localcontext = context.copy()
179 localcontext['localsiteurl'] = localcontext.get(
180 'localsiteurl', None)
181 if relative_urls:
182 relative_url = path_to_url(get_relative_path(name))
183 localcontext['SITEURL'] = relative_url
184 localcontext['localsiteurl'] = relative_url
185 localcontext['output_file'] = name
186 localcontext.update(kwargs)
187 return localcontext
188
189 # pagination
190 if paginated:
191
192 # pagination needed, init paginators
193 paginators = {key: Paginator(name, val, self.settings)
194 for key, val in paginated.items()}
195
196 # generated pages, and write
197 for page_num in range(list(paginators.values())[0].num_pages):
198 paginated_kwargs = kwargs.copy()
199 for key in paginators.keys():
200 paginator = paginators[key]
201 previous_page = paginator.page(page_num) \
202 if page_num > 0 else None
203 page = paginator.page(page_num + 1)
204 next_page = paginator.page(page_num + 2) \
205 if page_num + 1 < paginator.num_pages else None
206 paginated_kwargs.update(
207 {'%s_paginator' % key: paginator,
208 '%s_page' % key: page,
209 '%s_previous_page' % key: previous_page,
210 '%s_next_page' % key: next_page})
211
212 localcontext = _get_localcontext(
213 context, page.save_as, paginated_kwargs, relative_urls)
214 _write_file(template, localcontext, self.output_path,
215 page.save_as, override_output)
216 else:
217 # no pagination
218 localcontext = _get_localcontext(
219 context, name, kwargs, relative_urls)
220 _write_file(template, localcontext, self.output_path, name,
221 override_output)
222
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pelican/writers.py b/pelican/writers.py
--- a/pelican/writers.py
+++ b/pelican/writers.py
@@ -55,8 +55,10 @@
categories=item.tags if hasattr(item, 'tags') else None,
author_name=getattr(item, 'author', ''),
pubdate=set_date_tzinfo(
- item.modified if hasattr(item, 'modified') else item.date,
- self.settings.get('TIMEZONE', None)))
+ item.date, self.settings.get('TIMEZONE', None)),
+ updateddate=set_date_tzinfo(
+ item.modified, self.settings.get('TIMEZONE', None)
+ ) if hasattr(item, 'modified') else None)
def _open_w(self, filename, encoding, override=False):
"""Open a file to write some content to it.
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@
from setuptools import setup
-requires = ['feedgenerator >= 1.6', 'jinja2 >= 2.7', 'pygments', 'docutils',
+requires = ['feedgenerator >= 1.8', 'jinja2 >= 2.7', 'pygments', 'docutils',
'pytz >= 0a', 'blinker', 'unidecode', 'six >= 1.4',
'python-dateutil']
|
{"golden_diff": "diff --git a/pelican/writers.py b/pelican/writers.py\n--- a/pelican/writers.py\n+++ b/pelican/writers.py\n@@ -55,8 +55,10 @@\n categories=item.tags if hasattr(item, 'tags') else None,\n author_name=getattr(item, 'author', ''),\n pubdate=set_date_tzinfo(\n- item.modified if hasattr(item, 'modified') else item.date,\n- self.settings.get('TIMEZONE', None)))\n+ item.date, self.settings.get('TIMEZONE', None)),\n+ updateddate=set_date_tzinfo(\n+ item.modified, self.settings.get('TIMEZONE', None)\n+ ) if hasattr(item, 'modified') else None)\n \n def _open_w(self, filename, encoding, override=False):\n \"\"\"Open a file to write some content to it.\ndiff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -4,7 +4,7 @@\n \n from setuptools import setup\n \n-requires = ['feedgenerator >= 1.6', 'jinja2 >= 2.7', 'pygments', 'docutils',\n+requires = ['feedgenerator >= 1.8', 'jinja2 >= 2.7', 'pygments', 'docutils',\n 'pytz >= 0a', 'blinker', 'unidecode', 'six >= 1.4',\n 'python-dateutil']\n", "issue": "Atom feed could/should use <published> date\nI've noticed that my atom feed will use `:date:` in the `<updated>` field in feed.xml, and will override that with `:modified:` if modified is specified. This results in an unwanted email re-send when using MailChimp's RSS --> Email feature. Going by this page, I think they would use `<published>` first if it existed:\n\nhttp://kb.mailchimp.com/campaigns/rss-in-campaigns/troubleshooting-rss-in-campaigns\n\nAnd I think in general it would be nice to call out both `<published>` to mark the original date, and `<updated>` for modifications. (And it aligns better with what I expect with `:date:` and `:modified:` meta in my .rst files.)\n\nNot a big problem -- I'll just have to refrain from using :modified: to avoid this behavior, but I figured it wouldn't hurt to ask. :-)\n\n", "before_files": [{"content": "#!/usr/bin/env python\nfrom os import walk\nfrom os.path import join, relpath, dirname\n\nfrom setuptools import setup\n\nrequires = ['feedgenerator >= 1.6', 'jinja2 >= 2.7', 'pygments', 'docutils',\n 'pytz >= 0a', 'blinker', 'unidecode', 'six >= 1.4',\n 'python-dateutil']\n\nentry_points = {\n 'console_scripts': [\n 'pelican = pelican:main',\n 'pelican-import = pelican.tools.pelican_import:main',\n 'pelican-quickstart = pelican.tools.pelican_quickstart:main',\n 'pelican-themes = pelican.tools.pelican_themes:main'\n ]\n}\n\nREADME = open('README.rst').read()\nCHANGELOG = open('docs/changelog.rst').read()\n\nsetup(\n name=\"pelican\",\n version=\"3.6.4.dev0\",\n url='http://getpelican.com/',\n author='Alexis Metaireau',\n author_email='[email protected]',\n description=\"A tool to generate a static blog from reStructuredText or \"\n \"Markdown input files.\",\n long_description=README + '\\n' + CHANGELOG,\n packages=['pelican', 'pelican.tools'],\n package_data={\n # we manually collect the package data, as opposed to using include_package_data=True\n # because we don't want the tests to be included automatically as package data\n # (MANIFEST.in is too greedy)\n 'pelican': [\n relpath(join(root, name), 'pelican')\n for root, _, names in walk(join('pelican', 'themes')) for name in names\n ],\n 'pelican.tools': [\n relpath(join(root, name), join('pelican', 'tools'))\n for root, _, names in walk(join('pelican', 'tools', 'templates')) for name in names\n ],\n },\n install_requires=requires,\n entry_points=entry_points,\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'License :: OSI Approved :: GNU Affero General Public License v3',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Topic :: Internet :: WWW/HTTP',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n ],\n test_suite='pelican.tests',\n)\n", "path": "setup.py"}, {"content": "# -*- coding: utf-8 -*-\nfrom __future__ import print_function, unicode_literals, with_statement\n\nimport logging\nimport os\n\nfrom feedgenerator import Atom1Feed, Rss201rev2Feed\n\nfrom jinja2 import Markup\n\nimport six\nfrom six.moves.urllib.parse import urlparse\n\nfrom pelican import signals\nfrom pelican.paginator import Paginator\nfrom pelican.utils import (get_relative_path, is_selected_for_writing,\n path_to_url, set_date_tzinfo)\n\nif not six.PY3:\n from codecs import open\n\nlogger = logging.getLogger(__name__)\n\n\nclass Writer(object):\n\n def __init__(self, output_path, settings=None):\n self.output_path = output_path\n self.reminder = dict()\n self.settings = settings or {}\n self._written_files = set()\n self._overridden_files = set()\n\n def _create_new_feed(self, feed_type, context):\n feed_class = Rss201rev2Feed if feed_type == 'rss' else Atom1Feed\n sitename = Markup(context['SITENAME']).striptags()\n feed = feed_class(\n title=sitename,\n link=(self.site_url + '/'),\n feed_url=self.feed_url,\n description=context.get('SITESUBTITLE', ''))\n return feed\n\n def _add_item_to_the_feed(self, feed, item):\n\n title = Markup(item.title).striptags()\n link = '%s/%s' % (self.site_url, item.url)\n feed.add_item(\n title=title,\n link=link,\n unique_id='tag:%s,%s:%s' % (urlparse(link).netloc,\n item.date.date(),\n urlparse(link).path.lstrip('/')),\n description=item.get_content(self.site_url),\n categories=item.tags if hasattr(item, 'tags') else None,\n author_name=getattr(item, 'author', ''),\n pubdate=set_date_tzinfo(\n item.modified if hasattr(item, 'modified') else item.date,\n self.settings.get('TIMEZONE', None)))\n\n def _open_w(self, filename, encoding, override=False):\n \"\"\"Open a file to write some content to it.\n\n Exit if we have already written to that file, unless one (and no more\n than one) of the writes has the override parameter set to True.\n \"\"\"\n if filename in self._overridden_files:\n if override:\n raise RuntimeError('File %s is set to be overridden twice'\n % filename)\n else:\n logger.info('Skipping %s', filename)\n filename = os.devnull\n elif filename in self._written_files:\n if override:\n logger.info('Overwriting %s', filename)\n else:\n raise RuntimeError('File %s is to be overwritten' % filename)\n if override:\n self._overridden_files.add(filename)\n self._written_files.add(filename)\n return open(filename, 'w', encoding=encoding)\n\n def write_feed(self, elements, context, path=None, feed_type='atom',\n override_output=False):\n \"\"\"Generate a feed with the list of articles provided\n\n Return the feed. If no path or output_path is specified, just\n return the feed object.\n\n :param elements: the articles to put on the feed.\n :param context: the context to get the feed metadata.\n :param path: the path to output.\n :param feed_type: the feed type to use (atom or rss)\n :param override_output: boolean telling if we can override previous\n output with the same name (and if next files written with the same\n name should be skipped to keep that one)\n \"\"\"\n if not is_selected_for_writing(self.settings, path):\n return\n\n self.site_url = context.get(\n 'SITEURL', path_to_url(get_relative_path(path)))\n\n self.feed_domain = context.get('FEED_DOMAIN')\n self.feed_url = '{}/{}'.format(self.feed_domain, path)\n\n feed = self._create_new_feed(feed_type, context)\n\n max_items = len(elements)\n if self.settings['FEED_MAX_ITEMS']:\n max_items = min(self.settings['FEED_MAX_ITEMS'], max_items)\n for i in range(max_items):\n self._add_item_to_the_feed(feed, elements[i])\n\n if path:\n complete_path = os.path.join(self.output_path, path)\n try:\n os.makedirs(os.path.dirname(complete_path))\n except Exception:\n pass\n\n encoding = 'utf-8' if six.PY3 else None\n with self._open_w(complete_path, encoding, override_output) as fp:\n feed.write(fp, 'utf-8')\n logger.info('Writing %s', complete_path)\n\n signals.feed_written.send(\n complete_path, context=context, feed=feed)\n return feed\n\n def write_file(self, name, template, context, relative_urls=False,\n paginated=None, override_output=False, **kwargs):\n \"\"\"Render the template and write the file.\n\n :param name: name of the file to output\n :param template: template to use to generate the content\n :param context: dict to pass to the templates.\n :param relative_urls: use relative urls or absolutes ones\n :param paginated: dict of article list to paginate - must have the\n same length (same list in different orders)\n :param override_output: boolean telling if we can override previous\n output with the same name (and if next files written with the same\n name should be skipped to keep that one)\n :param **kwargs: additional variables to pass to the templates\n \"\"\"\n\n if name is False or \\\n name == \"\" or \\\n not is_selected_for_writing(self.settings,\n os.path.join(self.output_path, name)):\n return\n elif not name:\n # other stuff, just return for now\n return\n\n def _write_file(template, localcontext, output_path, name, override):\n \"\"\"Render the template write the file.\"\"\"\n # set localsiteurl for context so that Contents can adjust links\n if localcontext['localsiteurl']:\n context['localsiteurl'] = localcontext['localsiteurl']\n output = template.render(localcontext)\n path = os.path.join(output_path, name)\n try:\n os.makedirs(os.path.dirname(path))\n except Exception:\n pass\n\n with self._open_w(path, 'utf-8', override=override) as f:\n f.write(output)\n logger.info('Writing %s', path)\n\n # Send a signal to say we're writing a file with some specific\n # local context.\n signals.content_written.send(path, context=localcontext)\n\n def _get_localcontext(context, name, kwargs, relative_urls):\n localcontext = context.copy()\n localcontext['localsiteurl'] = localcontext.get(\n 'localsiteurl', None)\n if relative_urls:\n relative_url = path_to_url(get_relative_path(name))\n localcontext['SITEURL'] = relative_url\n localcontext['localsiteurl'] = relative_url\n localcontext['output_file'] = name\n localcontext.update(kwargs)\n return localcontext\n\n # pagination\n if paginated:\n\n # pagination needed, init paginators\n paginators = {key: Paginator(name, val, self.settings)\n for key, val in paginated.items()}\n\n # generated pages, and write\n for page_num in range(list(paginators.values())[0].num_pages):\n paginated_kwargs = kwargs.copy()\n for key in paginators.keys():\n paginator = paginators[key]\n previous_page = paginator.page(page_num) \\\n if page_num > 0 else None\n page = paginator.page(page_num + 1)\n next_page = paginator.page(page_num + 2) \\\n if page_num + 1 < paginator.num_pages else None\n paginated_kwargs.update(\n {'%s_paginator' % key: paginator,\n '%s_page' % key: page,\n '%s_previous_page' % key: previous_page,\n '%s_next_page' % key: next_page})\n\n localcontext = _get_localcontext(\n context, page.save_as, paginated_kwargs, relative_urls)\n _write_file(template, localcontext, self.output_path,\n page.save_as, override_output)\n else:\n # no pagination\n localcontext = _get_localcontext(\n context, name, kwargs, relative_urls)\n _write_file(template, localcontext, self.output_path, name,\n override_output)\n", "path": "pelican/writers.py"}], "after_files": [{"content": "#!/usr/bin/env python\nfrom os import walk\nfrom os.path import join, relpath, dirname\n\nfrom setuptools import setup\n\nrequires = ['feedgenerator >= 1.8', 'jinja2 >= 2.7', 'pygments', 'docutils',\n 'pytz >= 0a', 'blinker', 'unidecode', 'six >= 1.4',\n 'python-dateutil']\n\nentry_points = {\n 'console_scripts': [\n 'pelican = pelican:main',\n 'pelican-import = pelican.tools.pelican_import:main',\n 'pelican-quickstart = pelican.tools.pelican_quickstart:main',\n 'pelican-themes = pelican.tools.pelican_themes:main'\n ]\n}\n\nREADME = open('README.rst').read()\nCHANGELOG = open('docs/changelog.rst').read()\n\nsetup(\n name=\"pelican\",\n version=\"3.6.4.dev0\",\n url='http://getpelican.com/',\n author='Alexis Metaireau',\n author_email='[email protected]',\n description=\"A tool to generate a static blog from reStructuredText or \"\n \"Markdown input files.\",\n long_description=README + '\\n' + CHANGELOG,\n packages=['pelican', 'pelican.tools'],\n package_data={\n # we manually collect the package data, as opposed to using include_package_data=True\n # because we don't want the tests to be included automatically as package data\n # (MANIFEST.in is too greedy)\n 'pelican': [\n relpath(join(root, name), 'pelican')\n for root, _, names in walk(join('pelican', 'themes')) for name in names\n ],\n 'pelican.tools': [\n relpath(join(root, name), join('pelican', 'tools'))\n for root, _, names in walk(join('pelican', 'tools', 'templates')) for name in names\n ],\n },\n install_requires=requires,\n entry_points=entry_points,\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'License :: OSI Approved :: GNU Affero General Public License v3',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Topic :: Internet :: WWW/HTTP',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n ],\n test_suite='pelican.tests',\n)\n", "path": "setup.py"}, {"content": "# -*- coding: utf-8 -*-\nfrom __future__ import print_function, unicode_literals, with_statement\n\nimport logging\nimport os\n\nfrom feedgenerator import Atom1Feed, Rss201rev2Feed\n\nfrom jinja2 import Markup\n\nimport six\nfrom six.moves.urllib.parse import urlparse\n\nfrom pelican import signals\nfrom pelican.paginator import Paginator\nfrom pelican.utils import (get_relative_path, is_selected_for_writing,\n path_to_url, set_date_tzinfo)\n\nif not six.PY3:\n from codecs import open\n\nlogger = logging.getLogger(__name__)\n\n\nclass Writer(object):\n\n def __init__(self, output_path, settings=None):\n self.output_path = output_path\n self.reminder = dict()\n self.settings = settings or {}\n self._written_files = set()\n self._overridden_files = set()\n\n def _create_new_feed(self, feed_type, context):\n feed_class = Rss201rev2Feed if feed_type == 'rss' else Atom1Feed\n sitename = Markup(context['SITENAME']).striptags()\n feed = feed_class(\n title=sitename,\n link=(self.site_url + '/'),\n feed_url=self.feed_url,\n description=context.get('SITESUBTITLE', ''))\n return feed\n\n def _add_item_to_the_feed(self, feed, item):\n\n title = Markup(item.title).striptags()\n link = '%s/%s' % (self.site_url, item.url)\n feed.add_item(\n title=title,\n link=link,\n unique_id='tag:%s,%s:%s' % (urlparse(link).netloc,\n item.date.date(),\n urlparse(link).path.lstrip('/')),\n description=item.get_content(self.site_url),\n categories=item.tags if hasattr(item, 'tags') else None,\n author_name=getattr(item, 'author', ''),\n pubdate=set_date_tzinfo(\n item.date, self.settings.get('TIMEZONE', None)),\n updateddate=set_date_tzinfo(\n item.modified, self.settings.get('TIMEZONE', None)\n ) if hasattr(item, 'modified') else None)\n\n def _open_w(self, filename, encoding, override=False):\n \"\"\"Open a file to write some content to it.\n\n Exit if we have already written to that file, unless one (and no more\n than one) of the writes has the override parameter set to True.\n \"\"\"\n if filename in self._overridden_files:\n if override:\n raise RuntimeError('File %s is set to be overridden twice'\n % filename)\n else:\n logger.info('Skipping %s', filename)\n filename = os.devnull\n elif filename in self._written_files:\n if override:\n logger.info('Overwriting %s', filename)\n else:\n raise RuntimeError('File %s is to be overwritten' % filename)\n if override:\n self._overridden_files.add(filename)\n self._written_files.add(filename)\n return open(filename, 'w', encoding=encoding)\n\n def write_feed(self, elements, context, path=None, feed_type='atom',\n override_output=False):\n \"\"\"Generate a feed with the list of articles provided\n\n Return the feed. If no path or output_path is specified, just\n return the feed object.\n\n :param elements: the articles to put on the feed.\n :param context: the context to get the feed metadata.\n :param path: the path to output.\n :param feed_type: the feed type to use (atom or rss)\n :param override_output: boolean telling if we can override previous\n output with the same name (and if next files written with the same\n name should be skipped to keep that one)\n \"\"\"\n if not is_selected_for_writing(self.settings, path):\n return\n\n self.site_url = context.get(\n 'SITEURL', path_to_url(get_relative_path(path)))\n\n self.feed_domain = context.get('FEED_DOMAIN')\n self.feed_url = '{}/{}'.format(self.feed_domain, path)\n\n feed = self._create_new_feed(feed_type, context)\n\n max_items = len(elements)\n if self.settings['FEED_MAX_ITEMS']:\n max_items = min(self.settings['FEED_MAX_ITEMS'], max_items)\n for i in range(max_items):\n self._add_item_to_the_feed(feed, elements[i])\n\n if path:\n complete_path = os.path.join(self.output_path, path)\n try:\n os.makedirs(os.path.dirname(complete_path))\n except Exception:\n pass\n\n encoding = 'utf-8' if six.PY3 else None\n with self._open_w(complete_path, encoding, override_output) as fp:\n feed.write(fp, 'utf-8')\n logger.info('Writing %s', complete_path)\n\n signals.feed_written.send(\n complete_path, context=context, feed=feed)\n return feed\n\n def write_file(self, name, template, context, relative_urls=False,\n paginated=None, override_output=False, **kwargs):\n \"\"\"Render the template and write the file.\n\n :param name: name of the file to output\n :param template: template to use to generate the content\n :param context: dict to pass to the templates.\n :param relative_urls: use relative urls or absolutes ones\n :param paginated: dict of article list to paginate - must have the\n same length (same list in different orders)\n :param override_output: boolean telling if we can override previous\n output with the same name (and if next files written with the same\n name should be skipped to keep that one)\n :param **kwargs: additional variables to pass to the templates\n \"\"\"\n\n if name is False or \\\n name == \"\" or \\\n not is_selected_for_writing(self.settings,\n os.path.join(self.output_path, name)):\n return\n elif not name:\n # other stuff, just return for now\n return\n\n def _write_file(template, localcontext, output_path, name, override):\n \"\"\"Render the template write the file.\"\"\"\n # set localsiteurl for context so that Contents can adjust links\n if localcontext['localsiteurl']:\n context['localsiteurl'] = localcontext['localsiteurl']\n output = template.render(localcontext)\n path = os.path.join(output_path, name)\n try:\n os.makedirs(os.path.dirname(path))\n except Exception:\n pass\n\n with self._open_w(path, 'utf-8', override=override) as f:\n f.write(output)\n logger.info('Writing %s', path)\n\n # Send a signal to say we're writing a file with some specific\n # local context.\n signals.content_written.send(path, context=localcontext)\n\n def _get_localcontext(context, name, kwargs, relative_urls):\n localcontext = context.copy()\n localcontext['localsiteurl'] = localcontext.get(\n 'localsiteurl', None)\n if relative_urls:\n relative_url = path_to_url(get_relative_path(name))\n localcontext['SITEURL'] = relative_url\n localcontext['localsiteurl'] = relative_url\n localcontext['output_file'] = name\n localcontext.update(kwargs)\n return localcontext\n\n # pagination\n if paginated:\n\n # pagination needed, init paginators\n paginators = {key: Paginator(name, val, self.settings)\n for key, val in paginated.items()}\n\n # generated pages, and write\n for page_num in range(list(paginators.values())[0].num_pages):\n paginated_kwargs = kwargs.copy()\n for key in paginators.keys():\n paginator = paginators[key]\n previous_page = paginator.page(page_num) \\\n if page_num > 0 else None\n page = paginator.page(page_num + 1)\n next_page = paginator.page(page_num + 2) \\\n if page_num + 1 < paginator.num_pages else None\n paginated_kwargs.update(\n {'%s_paginator' % key: paginator,\n '%s_page' % key: page,\n '%s_previous_page' % key: previous_page,\n '%s_next_page' % key: next_page})\n\n localcontext = _get_localcontext(\n context, page.save_as, paginated_kwargs, relative_urls)\n _write_file(template, localcontext, self.output_path,\n page.save_as, override_output)\n else:\n # no pagination\n localcontext = _get_localcontext(\n context, name, kwargs, relative_urls)\n _write_file(template, localcontext, self.output_path, name,\n override_output)\n", "path": "pelican/writers.py"}]}
| 3,583 | 315 |
gh_patches_debug_13099
|
rasdani/github-patches
|
git_diff
|
python-gitlab__python-gitlab-941
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Feature idea: autocompletion for CLI mode
## Description of the problem, including code/CLI snippet
Feature/improve suggestion: in many of my cases, I would like to have an autocompletion to have less time to spend at docs and source code to find out all possible combination of commands/object/args.
## Expected Behavior
Would be great to have autocompletion for bash or zsh.
## Actual Behavior
No completion right now.
## Specifications
- python-gitlab version: 1.6.0
- API version you are using (v3/v4): 4
- Gitlab server version (or gitlab.com): gitlab.com
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/python
2 # -*- coding: utf-8 -*-
3
4 from setuptools import setup
5 from setuptools import find_packages
6
7
8 def get_version():
9 with open("gitlab/__init__.py") as f:
10 for line in f:
11 if line.startswith("__version__"):
12 return eval(line.split("=")[-1])
13
14
15 with open("README.rst", "r") as readme_file:
16 readme = readme_file.read()
17
18 setup(
19 name="python-gitlab",
20 version=get_version(),
21 description="Interact with GitLab API",
22 long_description=readme,
23 author="Gauvain Pocentek",
24 author_email="[email protected]",
25 license="LGPLv3",
26 url="https://github.com/python-gitlab/python-gitlab",
27 packages=find_packages(),
28 install_requires=["requests>=2.4.2", "six"],
29 entry_points={"console_scripts": ["gitlab = gitlab.cli:main"]},
30 classifiers=[
31 "Development Status :: 5 - Production/Stable",
32 "Environment :: Console",
33 "Intended Audience :: System Administrators",
34 "License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
35 "Natural Language :: English",
36 "Operating System :: POSIX",
37 "Operating System :: Microsoft :: Windows",
38 "Programming Language :: Python",
39 "Programming Language :: Python :: 2",
40 "Programming Language :: Python :: 2.7",
41 "Programming Language :: Python :: 3",
42 "Programming Language :: Python :: 3.4",
43 "Programming Language :: Python :: 3.5",
44 "Programming Language :: Python :: 3.6",
45 "Programming Language :: Python :: 3.7",
46 "Programming Language :: Python :: 3.8",
47 ],
48 )
49
```
Path: `gitlab/cli.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 #
4 # Copyright (C) 2013-2017 Gauvain Pocentek <[email protected]>
5 #
6 # This program is free software: you can redistribute it and/or modify
7 # it under the terms of the GNU Lesser General Public License as published by
8 # the Free Software Foundation, either version 3 of the License, or
9 # (at your option) any later version.
10 #
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU Lesser General Public License for more details.
15 #
16 # You should have received a copy of the GNU Lesser General Public License
17 # along with this program. If not, see <http://www.gnu.org/licenses/>.
18
19 from __future__ import print_function
20
21 import argparse
22 import functools
23 import importlib
24 import re
25 import sys
26
27 import gitlab.config
28
29 camel_re = re.compile("(.)([A-Z])")
30
31 # custom_actions = {
32 # cls: {
33 # action: (mandatory_args, optional_args, in_obj),
34 # },
35 # }
36 custom_actions = {}
37
38
39 def register_custom_action(cls_names, mandatory=tuple(), optional=tuple()):
40 def wrap(f):
41 @functools.wraps(f)
42 def wrapped_f(*args, **kwargs):
43 return f(*args, **kwargs)
44
45 # in_obj defines whether the method belongs to the obj or the manager
46 in_obj = True
47 classes = cls_names
48 if type(cls_names) != tuple:
49 classes = (cls_names,)
50
51 for cls_name in classes:
52 final_name = cls_name
53 if cls_name.endswith("Manager"):
54 final_name = cls_name.replace("Manager", "")
55 in_obj = False
56 if final_name not in custom_actions:
57 custom_actions[final_name] = {}
58
59 action = f.__name__.replace("_", "-")
60 custom_actions[final_name][action] = (mandatory, optional, in_obj)
61
62 return wrapped_f
63
64 return wrap
65
66
67 def die(msg, e=None):
68 if e:
69 msg = "%s (%s)" % (msg, e)
70 sys.stderr.write(msg + "\n")
71 sys.exit(1)
72
73
74 def what_to_cls(what):
75 return "".join([s.capitalize() for s in what.split("-")])
76
77
78 def cls_to_what(cls):
79 return camel_re.sub(r"\1-\2", cls.__name__).lower()
80
81
82 def _get_base_parser(add_help=True):
83 parser = argparse.ArgumentParser(
84 add_help=add_help, description="GitLab API Command Line Interface"
85 )
86 parser.add_argument("--version", help="Display the version.", action="store_true")
87 parser.add_argument(
88 "-v",
89 "--verbose",
90 "--fancy",
91 help="Verbose mode (legacy format only)",
92 action="store_true",
93 )
94 parser.add_argument(
95 "-d", "--debug", help="Debug mode (display HTTP requests)", action="store_true"
96 )
97 parser.add_argument(
98 "-c",
99 "--config-file",
100 action="append",
101 help="Configuration file to use. Can be used multiple times.",
102 )
103 parser.add_argument(
104 "-g",
105 "--gitlab",
106 help=(
107 "Which configuration section should "
108 "be used. If not defined, the default selection "
109 "will be used."
110 ),
111 required=False,
112 )
113 parser.add_argument(
114 "-o",
115 "--output",
116 help="Output format (v4 only): json|legacy|yaml",
117 required=False,
118 choices=["json", "legacy", "yaml"],
119 default="legacy",
120 )
121 parser.add_argument(
122 "-f",
123 "--fields",
124 help=(
125 "Fields to display in the output (comma "
126 "separated). Not used with legacy output"
127 ),
128 required=False,
129 )
130
131 return parser
132
133
134 def _get_parser(cli_module):
135 parser = _get_base_parser()
136 return cli_module.extend_parser(parser)
137
138
139 def _parse_value(v):
140 if isinstance(v, str) and v.startswith("@"):
141 # If the user-provided value starts with @, we try to read the file
142 # path provided after @ as the real value. Exit on any error.
143 try:
144 with open(v[1:]) as fl:
145 return fl.read()
146 except Exception as e:
147 sys.stderr.write("%s\n" % e)
148 sys.exit(1)
149
150 return v
151
152
153 def main():
154 if "--version" in sys.argv:
155 print(gitlab.__version__)
156 sys.exit(0)
157
158 parser = _get_base_parser(add_help=False)
159
160 # This first parsing step is used to find the gitlab config to use, and
161 # load the propermodule (v3 or v4) accordingly. At that point we don't have
162 # any subparser setup
163 (options, args) = parser.parse_known_args(sys.argv)
164 try:
165 config = gitlab.config.GitlabConfigParser(options.gitlab, options.config_file)
166 except gitlab.config.ConfigError as e:
167 if "--help" in sys.argv or "-h" in sys.argv:
168 parser.print_help()
169 sys.exit(0)
170 sys.exit(e)
171 cli_module = importlib.import_module("gitlab.v%s.cli" % config.api_version)
172
173 # Now we build the entire set of subcommands and do the complete parsing
174 parser = _get_parser(cli_module)
175 args = parser.parse_args(sys.argv[1:])
176
177 config_files = args.config_file
178 gitlab_id = args.gitlab
179 verbose = args.verbose
180 output = args.output
181 fields = []
182 if args.fields:
183 fields = [x.strip() for x in args.fields.split(",")]
184 debug = args.debug
185 action = args.whaction
186 what = args.what
187
188 args = args.__dict__
189 # Remove CLI behavior-related args
190 for item in (
191 "gitlab",
192 "config_file",
193 "verbose",
194 "debug",
195 "what",
196 "whaction",
197 "version",
198 "output",
199 ):
200 args.pop(item)
201 args = {k: _parse_value(v) for k, v in args.items() if v is not None}
202
203 try:
204 gl = gitlab.Gitlab.from_config(gitlab_id, config_files)
205 if gl.private_token or gl.oauth_token or gl.job_token:
206 gl.auth()
207 except Exception as e:
208 die(str(e))
209
210 if debug:
211 gl.enable_debug()
212
213 cli_module.run(gl, what, action, args, verbose, output, fields)
214
215 sys.exit(0)
216
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/gitlab/cli.py b/gitlab/cli.py
--- a/gitlab/cli.py
+++ b/gitlab/cli.py
@@ -172,6 +172,12 @@
# Now we build the entire set of subcommands and do the complete parsing
parser = _get_parser(cli_module)
+ try:
+ import argcomplete
+
+ argcomplete.autocomplete(parser)
+ except Exception:
+ pass
args = parser.parse_args(sys.argv[1:])
config_files = args.config_file
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -45,4 +45,5 @@
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
+ extras_require={"autocompletion": ["argcomplete>=1.10.0,<2"]},
)
|
{"golden_diff": "diff --git a/gitlab/cli.py b/gitlab/cli.py\n--- a/gitlab/cli.py\n+++ b/gitlab/cli.py\n@@ -172,6 +172,12 @@\n \n # Now we build the entire set of subcommands and do the complete parsing\n parser = _get_parser(cli_module)\n+ try:\n+ import argcomplete\n+\n+ argcomplete.autocomplete(parser)\n+ except Exception:\n+ pass\n args = parser.parse_args(sys.argv[1:])\n \n config_files = args.config_file\ndiff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -45,4 +45,5 @@\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n ],\n+ extras_require={\"autocompletion\": [\"argcomplete>=1.10.0,<2\"]},\n )\n", "issue": "Feature idea: autocompletion for CLI mode\n## Description of the problem, including code/CLI snippet\r\n\r\nFeature/improve suggestion: in many of my cases, I would like to have an autocompletion to have less time to spend at docs and source code to find out all possible combination of commands/object/args.\r\n\r\n## Expected Behavior\r\n\r\nWould be great to have autocompletion for bash or zsh.\r\n\r\n## Actual Behavior\r\n\r\nNo completion right now.\r\n\r\n## Specifications\r\n\r\n - python-gitlab version: 1.6.0\r\n - API version you are using (v3/v4): 4\r\n - Gitlab server version (or gitlab.com): gitlab.com\r\n\n", "before_files": [{"content": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\nfrom setuptools import setup\nfrom setuptools import find_packages\n\n\ndef get_version():\n with open(\"gitlab/__init__.py\") as f:\n for line in f:\n if line.startswith(\"__version__\"):\n return eval(line.split(\"=\")[-1])\n\n\nwith open(\"README.rst\", \"r\") as readme_file:\n readme = readme_file.read()\n\nsetup(\n name=\"python-gitlab\",\n version=get_version(),\n description=\"Interact with GitLab API\",\n long_description=readme,\n author=\"Gauvain Pocentek\",\n author_email=\"[email protected]\",\n license=\"LGPLv3\",\n url=\"https://github.com/python-gitlab/python-gitlab\",\n packages=find_packages(),\n install_requires=[\"requests>=2.4.2\", \"six\"],\n entry_points={\"console_scripts\": [\"gitlab = gitlab.cli:main\"]},\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Environment :: Console\",\n \"Intended Audience :: System Administrators\",\n \"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)\",\n \"Natural Language :: English\",\n \"Operating System :: POSIX\",\n \"Operating System :: Microsoft :: Windows\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n ],\n)\n", "path": "setup.py"}, {"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) 2013-2017 Gauvain Pocentek <[email protected]>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>.\n\nfrom __future__ import print_function\n\nimport argparse\nimport functools\nimport importlib\nimport re\nimport sys\n\nimport gitlab.config\n\ncamel_re = re.compile(\"(.)([A-Z])\")\n\n# custom_actions = {\n# cls: {\n# action: (mandatory_args, optional_args, in_obj),\n# },\n# }\ncustom_actions = {}\n\n\ndef register_custom_action(cls_names, mandatory=tuple(), optional=tuple()):\n def wrap(f):\n @functools.wraps(f)\n def wrapped_f(*args, **kwargs):\n return f(*args, **kwargs)\n\n # in_obj defines whether the method belongs to the obj or the manager\n in_obj = True\n classes = cls_names\n if type(cls_names) != tuple:\n classes = (cls_names,)\n\n for cls_name in classes:\n final_name = cls_name\n if cls_name.endswith(\"Manager\"):\n final_name = cls_name.replace(\"Manager\", \"\")\n in_obj = False\n if final_name not in custom_actions:\n custom_actions[final_name] = {}\n\n action = f.__name__.replace(\"_\", \"-\")\n custom_actions[final_name][action] = (mandatory, optional, in_obj)\n\n return wrapped_f\n\n return wrap\n\n\ndef die(msg, e=None):\n if e:\n msg = \"%s (%s)\" % (msg, e)\n sys.stderr.write(msg + \"\\n\")\n sys.exit(1)\n\n\ndef what_to_cls(what):\n return \"\".join([s.capitalize() for s in what.split(\"-\")])\n\n\ndef cls_to_what(cls):\n return camel_re.sub(r\"\\1-\\2\", cls.__name__).lower()\n\n\ndef _get_base_parser(add_help=True):\n parser = argparse.ArgumentParser(\n add_help=add_help, description=\"GitLab API Command Line Interface\"\n )\n parser.add_argument(\"--version\", help=\"Display the version.\", action=\"store_true\")\n parser.add_argument(\n \"-v\",\n \"--verbose\",\n \"--fancy\",\n help=\"Verbose mode (legacy format only)\",\n action=\"store_true\",\n )\n parser.add_argument(\n \"-d\", \"--debug\", help=\"Debug mode (display HTTP requests)\", action=\"store_true\"\n )\n parser.add_argument(\n \"-c\",\n \"--config-file\",\n action=\"append\",\n help=\"Configuration file to use. Can be used multiple times.\",\n )\n parser.add_argument(\n \"-g\",\n \"--gitlab\",\n help=(\n \"Which configuration section should \"\n \"be used. If not defined, the default selection \"\n \"will be used.\"\n ),\n required=False,\n )\n parser.add_argument(\n \"-o\",\n \"--output\",\n help=\"Output format (v4 only): json|legacy|yaml\",\n required=False,\n choices=[\"json\", \"legacy\", \"yaml\"],\n default=\"legacy\",\n )\n parser.add_argument(\n \"-f\",\n \"--fields\",\n help=(\n \"Fields to display in the output (comma \"\n \"separated). Not used with legacy output\"\n ),\n required=False,\n )\n\n return parser\n\n\ndef _get_parser(cli_module):\n parser = _get_base_parser()\n return cli_module.extend_parser(parser)\n\n\ndef _parse_value(v):\n if isinstance(v, str) and v.startswith(\"@\"):\n # If the user-provided value starts with @, we try to read the file\n # path provided after @ as the real value. Exit on any error.\n try:\n with open(v[1:]) as fl:\n return fl.read()\n except Exception as e:\n sys.stderr.write(\"%s\\n\" % e)\n sys.exit(1)\n\n return v\n\n\ndef main():\n if \"--version\" in sys.argv:\n print(gitlab.__version__)\n sys.exit(0)\n\n parser = _get_base_parser(add_help=False)\n\n # This first parsing step is used to find the gitlab config to use, and\n # load the propermodule (v3 or v4) accordingly. At that point we don't have\n # any subparser setup\n (options, args) = parser.parse_known_args(sys.argv)\n try:\n config = gitlab.config.GitlabConfigParser(options.gitlab, options.config_file)\n except gitlab.config.ConfigError as e:\n if \"--help\" in sys.argv or \"-h\" in sys.argv:\n parser.print_help()\n sys.exit(0)\n sys.exit(e)\n cli_module = importlib.import_module(\"gitlab.v%s.cli\" % config.api_version)\n\n # Now we build the entire set of subcommands and do the complete parsing\n parser = _get_parser(cli_module)\n args = parser.parse_args(sys.argv[1:])\n\n config_files = args.config_file\n gitlab_id = args.gitlab\n verbose = args.verbose\n output = args.output\n fields = []\n if args.fields:\n fields = [x.strip() for x in args.fields.split(\",\")]\n debug = args.debug\n action = args.whaction\n what = args.what\n\n args = args.__dict__\n # Remove CLI behavior-related args\n for item in (\n \"gitlab\",\n \"config_file\",\n \"verbose\",\n \"debug\",\n \"what\",\n \"whaction\",\n \"version\",\n \"output\",\n ):\n args.pop(item)\n args = {k: _parse_value(v) for k, v in args.items() if v is not None}\n\n try:\n gl = gitlab.Gitlab.from_config(gitlab_id, config_files)\n if gl.private_token or gl.oauth_token or gl.job_token:\n gl.auth()\n except Exception as e:\n die(str(e))\n\n if debug:\n gl.enable_debug()\n\n cli_module.run(gl, what, action, args, verbose, output, fields)\n\n sys.exit(0)\n", "path": "gitlab/cli.py"}], "after_files": [{"content": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\nfrom setuptools import setup\nfrom setuptools import find_packages\n\n\ndef get_version():\n with open(\"gitlab/__init__.py\") as f:\n for line in f:\n if line.startswith(\"__version__\"):\n return eval(line.split(\"=\")[-1])\n\n\nwith open(\"README.rst\", \"r\") as readme_file:\n readme = readme_file.read()\n\nsetup(\n name=\"python-gitlab\",\n version=get_version(),\n description=\"Interact with GitLab API\",\n long_description=readme,\n author=\"Gauvain Pocentek\",\n author_email=\"[email protected]\",\n license=\"LGPLv3\",\n url=\"https://github.com/python-gitlab/python-gitlab\",\n packages=find_packages(),\n install_requires=[\"requests>=2.4.2\", \"six\"],\n entry_points={\"console_scripts\": [\"gitlab = gitlab.cli:main\"]},\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Environment :: Console\",\n \"Intended Audience :: System Administrators\",\n \"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)\",\n \"Natural Language :: English\",\n \"Operating System :: POSIX\",\n \"Operating System :: Microsoft :: Windows\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n ],\n extras_require={\"autocompletion\": [\"argcomplete>=1.10.0,<2\"]},\n)\n", "path": "setup.py"}, {"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) 2013-2017 Gauvain Pocentek <[email protected]>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>.\n\nfrom __future__ import print_function\n\nimport argparse\nimport functools\nimport importlib\nimport re\nimport sys\n\nimport gitlab.config\n\ncamel_re = re.compile(\"(.)([A-Z])\")\n\n# custom_actions = {\n# cls: {\n# action: (mandatory_args, optional_args, in_obj),\n# },\n# }\ncustom_actions = {}\n\n\ndef register_custom_action(cls_names, mandatory=tuple(), optional=tuple()):\n def wrap(f):\n @functools.wraps(f)\n def wrapped_f(*args, **kwargs):\n return f(*args, **kwargs)\n\n # in_obj defines whether the method belongs to the obj or the manager\n in_obj = True\n classes = cls_names\n if type(cls_names) != tuple:\n classes = (cls_names,)\n\n for cls_name in classes:\n final_name = cls_name\n if cls_name.endswith(\"Manager\"):\n final_name = cls_name.replace(\"Manager\", \"\")\n in_obj = False\n if final_name not in custom_actions:\n custom_actions[final_name] = {}\n\n action = f.__name__.replace(\"_\", \"-\")\n custom_actions[final_name][action] = (mandatory, optional, in_obj)\n\n return wrapped_f\n\n return wrap\n\n\ndef die(msg, e=None):\n if e:\n msg = \"%s (%s)\" % (msg, e)\n sys.stderr.write(msg + \"\\n\")\n sys.exit(1)\n\n\ndef what_to_cls(what):\n return \"\".join([s.capitalize() for s in what.split(\"-\")])\n\n\ndef cls_to_what(cls):\n return camel_re.sub(r\"\\1-\\2\", cls.__name__).lower()\n\n\ndef _get_base_parser(add_help=True):\n parser = argparse.ArgumentParser(\n add_help=add_help, description=\"GitLab API Command Line Interface\"\n )\n parser.add_argument(\"--version\", help=\"Display the version.\", action=\"store_true\")\n parser.add_argument(\n \"-v\",\n \"--verbose\",\n \"--fancy\",\n help=\"Verbose mode (legacy format only)\",\n action=\"store_true\",\n )\n parser.add_argument(\n \"-d\", \"--debug\", help=\"Debug mode (display HTTP requests)\", action=\"store_true\"\n )\n parser.add_argument(\n \"-c\",\n \"--config-file\",\n action=\"append\",\n help=\"Configuration file to use. Can be used multiple times.\",\n )\n parser.add_argument(\n \"-g\",\n \"--gitlab\",\n help=(\n \"Which configuration section should \"\n \"be used. If not defined, the default selection \"\n \"will be used.\"\n ),\n required=False,\n )\n parser.add_argument(\n \"-o\",\n \"--output\",\n help=\"Output format (v4 only): json|legacy|yaml\",\n required=False,\n choices=[\"json\", \"legacy\", \"yaml\"],\n default=\"legacy\",\n )\n parser.add_argument(\n \"-f\",\n \"--fields\",\n help=(\n \"Fields to display in the output (comma \"\n \"separated). Not used with legacy output\"\n ),\n required=False,\n )\n\n return parser\n\n\ndef _get_parser(cli_module):\n parser = _get_base_parser()\n return cli_module.extend_parser(parser)\n\n\ndef _parse_value(v):\n if isinstance(v, str) and v.startswith(\"@\"):\n # If the user-provided value starts with @, we try to read the file\n # path provided after @ as the real value. Exit on any error.\n try:\n with open(v[1:]) as fl:\n return fl.read()\n except Exception as e:\n sys.stderr.write(\"%s\\n\" % e)\n sys.exit(1)\n\n return v\n\n\ndef main():\n if \"--version\" in sys.argv:\n print(gitlab.__version__)\n sys.exit(0)\n\n parser = _get_base_parser(add_help=False)\n\n # This first parsing step is used to find the gitlab config to use, and\n # load the propermodule (v3 or v4) accordingly. At that point we don't have\n # any subparser setup\n (options, args) = parser.parse_known_args(sys.argv)\n try:\n config = gitlab.config.GitlabConfigParser(options.gitlab, options.config_file)\n except gitlab.config.ConfigError as e:\n if \"--help\" in sys.argv or \"-h\" in sys.argv:\n parser.print_help()\n sys.exit(0)\n sys.exit(e)\n cli_module = importlib.import_module(\"gitlab.v%s.cli\" % config.api_version)\n\n # Now we build the entire set of subcommands and do the complete parsing\n parser = _get_parser(cli_module)\n try:\n import argcomplete\n\n argcomplete.autocomplete(parser)\n except Exception:\n pass\n args = parser.parse_args(sys.argv[1:])\n\n config_files = args.config_file\n gitlab_id = args.gitlab\n verbose = args.verbose\n output = args.output\n fields = []\n if args.fields:\n fields = [x.strip() for x in args.fields.split(\",\")]\n debug = args.debug\n action = args.whaction\n what = args.what\n\n args = args.__dict__\n # Remove CLI behavior-related args\n for item in (\n \"gitlab\",\n \"config_file\",\n \"verbose\",\n \"debug\",\n \"what\",\n \"whaction\",\n \"version\",\n \"output\",\n ):\n args.pop(item)\n args = {k: _parse_value(v) for k, v in args.items() if v is not None}\n\n try:\n gl = gitlab.Gitlab.from_config(gitlab_id, config_files)\n if gl.private_token or gl.oauth_token or gl.job_token:\n gl.auth()\n except Exception as e:\n die(str(e))\n\n if debug:\n gl.enable_debug()\n\n cli_module.run(gl, what, action, args, verbose, output, fields)\n\n sys.exit(0)\n", "path": "gitlab/cli.py"}]}
| 2,895 | 200 |
gh_patches_debug_15670
|
rasdani/github-patches
|
git_diff
|
Mailu__Mailu-1925
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Optimize Rainloop image
## Before you open your issue
- [X] Check if no issue or pull-request for this already exists.
- [X] Check [documentation](https://mailu.io/master/) and [FAQ](https://mailu.io/master/faq.html). (Tip, use the search function on the documentation page)
- [X] You understand `Mailu` is made by volunteers in their **free time** — be conscise, civil and accept that delays can occur.
- [X] The title of the issue should be short and simple. It should contain specific terms related to the actual issue. Be specific while writing the title.
## Environment & Versions
### Environment
- [X] docker-compose
- [X] kubernetes
- [X] docker swarm
### Versions
Master
```
$> docker images
mailu/rainloop master 2ad8d1c29ff3 45 hours ago 607MB
```
Optimized version using NGINX:
```
user/rainloop master 7de9dee9286d 2 seconds ago 116MB
```
## Description
In order to resolve issues #1830 and #1200, it is required to reduce build time, complexity and image size of images before multi-architecture builds can be supported. The current Rainloop image size is 607MB and can be optimized when changing from Apache to NGINX with officially supported Docker images. This can also increase overall web performance.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `webmails/rainloop/start.py`
Content:
```
1 #!/usr/bin/python3
2
3 import os
4 import shutil
5 import logging as log
6 import sys
7 from socrate import system, conf
8
9 log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "WARNING"))
10
11 # Actual startup script
12 os.environ["FRONT_ADDRESS"] = system.resolve_address(os.environ.get("HOST_FRONT", "front"))
13 os.environ["IMAP_ADDRESS"] = system.resolve_address(os.environ.get("HOST_IMAP", "imap"))
14
15 os.environ["MAX_FILESIZE"] = str(int(int(os.environ.get("MESSAGE_SIZE_LIMIT"))*0.66/1048576))
16
17 base = "/data/_data_/_default_/"
18 shutil.rmtree(base + "domains/", ignore_errors=True)
19 os.makedirs(base + "domains", exist_ok=True)
20 os.makedirs(base + "configs", exist_ok=True)
21
22 conf.jinja("/default.ini", os.environ, "/data/_data_/_default_/domains/default.ini")
23 conf.jinja("/application.ini", os.environ, "/data/_data_/_default_/configs/application.ini")
24 conf.jinja("/php.ini", os.environ, "/usr/local/etc/php/conf.d/rainloop.ini")
25
26 os.system("chown -R www-data:www-data /data")
27 os.system("chmod -R a+rX /var/www/html/")
28
29 os.execv("/usr/local/bin/apache2-foreground", ["apache2-foreground"])
30
31
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/webmails/rainloop/start.py b/webmails/rainloop/start.py
--- a/webmails/rainloop/start.py
+++ b/webmails/rainloop/start.py
@@ -19,12 +19,11 @@
os.makedirs(base + "domains", exist_ok=True)
os.makedirs(base + "configs", exist_ok=True)
-conf.jinja("/default.ini", os.environ, "/data/_data_/_default_/domains/default.ini")
-conf.jinja("/application.ini", os.environ, "/data/_data_/_default_/configs/application.ini")
-conf.jinja("/php.ini", os.environ, "/usr/local/etc/php/conf.d/rainloop.ini")
+conf.jinja("/defaults/default.ini", os.environ, "/data/_data_/_default_/domains/default.ini")
+conf.jinja("/defaults/application.ini", os.environ, "/data/_data_/_default_/configs/application.ini")
+conf.jinja("/defaults/php.ini", os.environ, "/etc/php7/php.ini")
-os.system("chown -R www-data:www-data /data")
-os.system("chmod -R a+rX /var/www/html/")
-
-os.execv("/usr/local/bin/apache2-foreground", ["apache2-foreground"])
+os.system("chown -R nginx:nginx /data")
+os.system("chmod -R a+rX /var/www/rainloop/")
+os.execv("/usr/sbin/nginx", ["nginx", "-g", "daemon off;"])
|
{"golden_diff": "diff --git a/webmails/rainloop/start.py b/webmails/rainloop/start.py\n--- a/webmails/rainloop/start.py\n+++ b/webmails/rainloop/start.py\n@@ -19,12 +19,11 @@\n os.makedirs(base + \"domains\", exist_ok=True)\n os.makedirs(base + \"configs\", exist_ok=True)\n \n-conf.jinja(\"/default.ini\", os.environ, \"/data/_data_/_default_/domains/default.ini\")\n-conf.jinja(\"/application.ini\", os.environ, \"/data/_data_/_default_/configs/application.ini\")\n-conf.jinja(\"/php.ini\", os.environ, \"/usr/local/etc/php/conf.d/rainloop.ini\")\n+conf.jinja(\"/defaults/default.ini\", os.environ, \"/data/_data_/_default_/domains/default.ini\")\n+conf.jinja(\"/defaults/application.ini\", os.environ, \"/data/_data_/_default_/configs/application.ini\")\n+conf.jinja(\"/defaults/php.ini\", os.environ, \"/etc/php7/php.ini\")\n \n-os.system(\"chown -R www-data:www-data /data\")\n-os.system(\"chmod -R a+rX /var/www/html/\")\n-\n-os.execv(\"/usr/local/bin/apache2-foreground\", [\"apache2-foreground\"])\n+os.system(\"chown -R nginx:nginx /data\")\n+os.system(\"chmod -R a+rX /var/www/rainloop/\")\n \n+os.execv(\"/usr/sbin/nginx\", [\"nginx\", \"-g\", \"daemon off;\"])\n", "issue": "Optimize Rainloop image\n## Before you open your issue\r\n- [X] Check if no issue or pull-request for this already exists.\r\n- [X] Check [documentation](https://mailu.io/master/) and [FAQ](https://mailu.io/master/faq.html). (Tip, use the search function on the documentation page)\r\n- [X] You understand `Mailu` is made by volunteers in their **free time** \u2014 be conscise, civil and accept that delays can occur.\r\n- [X] The title of the issue should be short and simple. It should contain specific terms related to the actual issue. Be specific while writing the title.\r\n\r\n## Environment & Versions\r\n### Environment\r\n - [X] docker-compose\r\n - [X] kubernetes\r\n - [X] docker swarm\r\n\r\n### Versions\r\nMaster\r\n```\r\n$> docker images\r\nmailu/rainloop master 2ad8d1c29ff3 45 hours ago 607MB\r\n```\r\nOptimized version using NGINX:\r\n```\r\nuser/rainloop master 7de9dee9286d 2 seconds ago 116MB\r\n```\r\n\r\n## Description\r\nIn order to resolve issues #1830 and #1200, it is required to reduce build time, complexity and image size of images before multi-architecture builds can be supported. The current Rainloop image size is 607MB and can be optimized when changing from Apache to NGINX with officially supported Docker images. This can also increase overall web performance.\n", "before_files": [{"content": "#!/usr/bin/python3\n\nimport os\nimport shutil\nimport logging as log\nimport sys\nfrom socrate import system, conf\n\nlog.basicConfig(stream=sys.stderr, level=os.environ.get(\"LOG_LEVEL\", \"WARNING\"))\n\n# Actual startup script\nos.environ[\"FRONT_ADDRESS\"] = system.resolve_address(os.environ.get(\"HOST_FRONT\", \"front\"))\nos.environ[\"IMAP_ADDRESS\"] = system.resolve_address(os.environ.get(\"HOST_IMAP\", \"imap\"))\n\nos.environ[\"MAX_FILESIZE\"] = str(int(int(os.environ.get(\"MESSAGE_SIZE_LIMIT\"))*0.66/1048576))\n\nbase = \"/data/_data_/_default_/\"\nshutil.rmtree(base + \"domains/\", ignore_errors=True)\nos.makedirs(base + \"domains\", exist_ok=True)\nos.makedirs(base + \"configs\", exist_ok=True)\n\nconf.jinja(\"/default.ini\", os.environ, \"/data/_data_/_default_/domains/default.ini\")\nconf.jinja(\"/application.ini\", os.environ, \"/data/_data_/_default_/configs/application.ini\")\nconf.jinja(\"/php.ini\", os.environ, \"/usr/local/etc/php/conf.d/rainloop.ini\")\n\nos.system(\"chown -R www-data:www-data /data\")\nos.system(\"chmod -R a+rX /var/www/html/\")\n\nos.execv(\"/usr/local/bin/apache2-foreground\", [\"apache2-foreground\"])\n\n", "path": "webmails/rainloop/start.py"}], "after_files": [{"content": "#!/usr/bin/python3\n\nimport os\nimport shutil\nimport logging as log\nimport sys\nfrom socrate import system, conf\n\nlog.basicConfig(stream=sys.stderr, level=os.environ.get(\"LOG_LEVEL\", \"WARNING\"))\n\n# Actual startup script\nos.environ[\"FRONT_ADDRESS\"] = system.resolve_address(os.environ.get(\"HOST_FRONT\", \"front\"))\nos.environ[\"IMAP_ADDRESS\"] = system.resolve_address(os.environ.get(\"HOST_IMAP\", \"imap\"))\n\nos.environ[\"MAX_FILESIZE\"] = str(int(int(os.environ.get(\"MESSAGE_SIZE_LIMIT\"))*0.66/1048576))\n\nbase = \"/data/_data_/_default_/\"\nshutil.rmtree(base + \"domains/\", ignore_errors=True)\nos.makedirs(base + \"domains\", exist_ok=True)\nos.makedirs(base + \"configs\", exist_ok=True)\n\nconf.jinja(\"/defaults/default.ini\", os.environ, \"/data/_data_/_default_/domains/default.ini\")\nconf.jinja(\"/defaults/application.ini\", os.environ, \"/data/_data_/_default_/configs/application.ini\")\nconf.jinja(\"/defaults/php.ini\", os.environ, \"/etc/php7/php.ini\")\n\nos.system(\"chown -R nginx:nginx /data\")\nos.system(\"chmod -R a+rX /var/www/rainloop/\")\n\nos.execv(\"/usr/sbin/nginx\", [\"nginx\", \"-g\", \"daemon off;\"])\n", "path": "webmails/rainloop/start.py"}]}
| 942 | 315 |
gh_patches_debug_28752
|
rasdani/github-patches
|
git_diff
|
fidals__shopelectro-174
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove Canonical html tag on CategoryPage
[trello src](https://trello.com/c/QRjRPDKN/295-se-%D1%83%D0%B1%D0%B8%D1%80%D0%B0%D0%B9-canonical-url-%D0%BD%D0%B0-%D1%81%D1%82%D1%80%D0%B0%D0%BD%D0%B8%D1%86%D0%B0%D1%85-%D1%81%D0%B2%D0%BE%D0%B9%D1%81%D1%82)
Сейчас [любая](https://www.shopelectro.ru/catalog/categories/zariadnye-ustroistva-242/tags/robiton/) страница с фильтрами, считается каноничной странице раздела.
Убирай тег `<link rel="canonical" href="">` на всех страницах с фильтрами(по характеристикам. По цене, наличию,популярности и пагинации - оставь).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `shopelectro/views/catalog.py`
Content:
```
1 from functools import partial
2
3 from django.conf import settings
4 from django.http import HttpResponse, HttpResponseForbidden
5 from django.shortcuts import render, get_object_or_404
6 from django.views.decorators.http import require_POST
7 from django_user_agents.utils import get_user_agent
8
9 from catalog.views import catalog
10 from images.models import Image
11 from pages import views as pages_views
12
13 from shopelectro import config
14 from shopelectro import models
15 from shopelectro.views.helpers import set_csrf_cookie
16
17 PRODUCTS_ON_PAGE_PC = 48
18 PRODUCTS_ON_PAGE_MOB = 10
19
20
21 def get_products_count(request):
22 """Get Products count for response context depends on the `user_agent`."""
23 mobile_view = get_user_agent(request).is_mobile
24 return PRODUCTS_ON_PAGE_MOB if mobile_view else PRODUCTS_ON_PAGE_PC
25
26
27 # CATALOG VIEWS
28 class CategoryTree(catalog.CategoryTree):
29 category_model = models.Category
30
31
32 @set_csrf_cookie
33 class ProductPage(catalog.ProductPage):
34 pk_url_kwarg = None
35 slug_url_kwarg = 'product_vendor_code'
36 slug_field = 'vendor_code'
37
38 queryset = (
39 models.Product.objects
40 .filter(category__isnull=False)
41 .prefetch_related('product_feedbacks', 'page__images')
42 .select_related('page')
43 )
44
45 def get_context_data(self, **kwargs):
46 context = super(ProductPage, self).get_context_data(**kwargs)
47
48 group_tags_pairs = (
49 models.Tag.objects
50 .filter(products=self.object)
51 .get_group_tags_pairs()
52 )
53
54 return {
55 **context,
56 'price_bounds': config.PRICE_BOUNDS,
57 'group_tags_pairs': group_tags_pairs
58 }
59
60
61 # SHOPELECTRO-SPECIFIC VIEWS
62 @set_csrf_cookie
63 class IndexPage(pages_views.CustomPageView):
64
65 def get_context_data(self, **kwargs):
66 """Extended method. Add product's images to context."""
67 context = super(IndexPage, self).get_context_data(**kwargs)
68 mobile_view = get_user_agent(self.request).is_mobile
69
70 top_products = (
71 models.Product.objects
72 .filter(id__in=settings.TOP_PRODUCTS)
73 .prefetch_related('category')
74 .select_related('page')
75 )
76
77 images = Image.objects.get_main_images_by_pages(
78 models.ProductPage.objects.filter(
79 shopelectro_product__in=top_products
80 )
81 )
82
83 categories = models.Category.objects.get_root_categories_by_products(
84 top_products)
85
86 prepared_top_products = []
87 if not mobile_view:
88 prepared_top_products = [
89 (product, images.get(product.page), categories.get(product))
90 for product in top_products
91 ]
92
93 return {
94 **context,
95 'category_tile': config.MAIN_PAGE_TILE,
96 'prepared_top_products': prepared_top_products,
97 }
98
99
100 def merge_products_and_images(products):
101 images = Image.objects.get_main_images_by_pages(
102 models.ProductPage.objects.filter(shopelectro_product__in=products)
103 )
104
105 return [
106 (product, images.get(product.page))
107 for product in products
108 ]
109
110
111 @set_csrf_cookie
112 class CategoryPage(catalog.CategoryPage):
113
114 def get_context_data(self, **kwargs):
115 """Add sorting options and view_types in context."""
116 context = super(CategoryPage, self).get_context_data(**kwargs)
117 products_on_page = get_products_count(self.request)
118
119 # tile is default view_type
120 view_type = self.request.session.get('view_type', 'tile')
121
122 category = context['category']
123
124 sorting = int(self.kwargs.get('sorting', 0))
125 sorting_option = config.category_sorting(sorting)
126
127 all_products = (
128 models.Product.objects
129 .prefetch_related('page__images')
130 .select_related('page')
131 .get_by_category(category, ordering=(sorting_option, ))
132 )
133
134 group_tags_pairs = (
135 models.Tag.objects
136 .filter(products__in=all_products)
137 .get_group_tags_pairs()
138 )
139
140 tags = self.kwargs.get('tags')
141 tags_metadata = {
142 'titles': '',
143 }
144
145 if tags:
146 slugs = models.Tag.parse_url_tags(tags)
147 tags = models.Tag.objects.filter(slug__in=slugs)
148
149 all_products = (
150 all_products
151 .filter(tags__in=tags)
152 # Use distinct because filtering by QuerySet tags,
153 # that related with products by many-to-many relation.
154 .distinct(sorting_option.lstrip('-'))
155 )
156
157 tags_titles = models.Tag.serialize_title_tags(
158 tags.get_group_tags_pairs()
159 )
160
161 tags_metadata['titles'] = tags_titles
162
163 def template_context(page, tags):
164 return {
165 'page': page,
166 'tags': tags,
167 }
168
169 page = context['page']
170 page.get_template_render_context = partial(
171 template_context, page, tags_metadata)
172
173 products = all_products.get_offset(0, products_on_page)
174
175 return {
176 **context,
177 'product_image_pairs': merge_products_and_images(products),
178 'group_tags_pairs': group_tags_pairs,
179 'total_products': all_products.count(),
180 'sorting_options': config.category_sorting(),
181 'sort': sorting,
182 'tags': tags,
183 'view_type': view_type,
184 'tags_metadata': tags_metadata,
185 }
186
187
188 def load_more(request, category_slug, offset=0, sorting=0, tags=None):
189 """
190 Load more products of a given category.
191
192 :param sorting: preferred sorting index from CATEGORY_SORTING tuple
193 :param request: HttpRequest object
194 :param category_slug: Slug for a given category
195 :param offset: used for slicing QuerySet.
196 :return:
197 """
198 products_on_page = get_products_count(request)
199
200 category = get_object_or_404(models.CategoryPage, slug=category_slug).model
201 sorting_option = config.category_sorting(int(sorting))
202
203 products = (
204 models.Product.objects
205 .prefetch_related('page__images')
206 .select_related('page')
207 .get_by_category(category, ordering=(sorting_option,))
208 )
209
210 if tags:
211 tag_entities = models.Tag.objects.filter(
212 slug__in=models.Tag.parse_url_tags(tags)
213 )
214
215 products = (
216 products
217 .filter(tags__in=tag_entities)
218 # Use distinct because filtering by QuerySet tags,
219 # that related with products by many-to-many relation.
220 .distinct(sorting_option.lstrip('-'))
221 )
222
223 products = products.get_offset(int(offset), products_on_page)
224 view = request.session.get('view_type', 'tile')
225
226 return render(request, 'catalog/category_products.html', {
227 'product_image_pairs': merge_products_and_images(products),
228 'view_type': view,
229 'prods': products_on_page,
230 })
231
232
233 @require_POST
234 def save_feedback(request):
235 def get_keys_from_post(*args):
236 return {arg: request.POST.get(arg, '') for arg in args}
237
238 product_id = request.POST.get('id')
239 product = models.Product.objects.filter(id=product_id).first()
240 if not (product_id and product):
241 return HttpResponse(status=422)
242
243 fields = ['rating', 'name', 'dignities', 'limitations', 'general']
244 feedback_data = get_keys_from_post(*fields)
245
246 models.ProductFeedback.objects.create(product=product, **feedback_data)
247 return HttpResponse('ok')
248
249
250 @require_POST
251 def delete_feedback(request):
252 if not request.user.is_authenticated:
253 return HttpResponseForbidden('Not today, sly guy...')
254
255 feedback_id = request.POST.get('id')
256 feedback = models.ProductFeedback.objects.filter(id=feedback_id).first()
257 if not (feedback_id and feedback):
258 return HttpResponse(status=422)
259
260 feedback.delete()
261 return HttpResponse('Feedback with id={} was deleted.'.format(feedback_id))
262
263
264 class ProductsWithoutImages(catalog.ProductsWithoutImages):
265 model = models.Product
266
267
268 class ProductsWithoutText(catalog.ProductsWithoutText):
269 model = models.Product
270
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/shopelectro/views/catalog.py b/shopelectro/views/catalog.py
--- a/shopelectro/views/catalog.py
+++ b/shopelectro/views/catalog.py
@@ -4,6 +4,7 @@
from django.http import HttpResponse, HttpResponseForbidden
from django.shortcuts import render, get_object_or_404
from django.views.decorators.http import require_POST
+from django.urls import reverse
from django_user_agents.utils import get_user_agent
from catalog.views import catalog
@@ -112,6 +113,17 @@
class CategoryPage(catalog.CategoryPage):
def get_context_data(self, **kwargs):
+
+ def get_category_canonical_url(slug, tags):
+ # patch canonical url for page.
+ # Using reverse(), not request.path to avoid "sorting" url option
+ return reverse(
+ 'category', kwargs=dict(
+ slug=slug,
+ **tags and dict(tags=tags) or {}
+ )
+ )
+
"""Add sorting options and view_types in context."""
context = super(CategoryPage, self).get_context_data(**kwargs)
products_on_page = get_products_count(self.request)
@@ -182,6 +194,9 @@
'tags': tags,
'view_type': view_type,
'tags_metadata': tags_metadata,
+ 'canonical_url': get_category_canonical_url(
+ slug=page.slug, tags=self.kwargs.get('tags')
+ ),
}
|
{"golden_diff": "diff --git a/shopelectro/views/catalog.py b/shopelectro/views/catalog.py\n--- a/shopelectro/views/catalog.py\n+++ b/shopelectro/views/catalog.py\n@@ -4,6 +4,7 @@\n from django.http import HttpResponse, HttpResponseForbidden\n from django.shortcuts import render, get_object_or_404\n from django.views.decorators.http import require_POST\n+from django.urls import reverse\n from django_user_agents.utils import get_user_agent\n \n from catalog.views import catalog\n@@ -112,6 +113,17 @@\n class CategoryPage(catalog.CategoryPage):\n \n def get_context_data(self, **kwargs):\n+\n+ def get_category_canonical_url(slug, tags):\n+ # patch canonical url for page.\n+ # Using reverse(), not request.path to avoid \"sorting\" url option\n+ return reverse(\n+ 'category', kwargs=dict(\n+ slug=slug,\n+ **tags and dict(tags=tags) or {}\n+ )\n+ )\n+\n \"\"\"Add sorting options and view_types in context.\"\"\"\n context = super(CategoryPage, self).get_context_data(**kwargs)\n products_on_page = get_products_count(self.request)\n@@ -182,6 +194,9 @@\n 'tags': tags,\n 'view_type': view_type,\n 'tags_metadata': tags_metadata,\n+ 'canonical_url': get_category_canonical_url(\n+ slug=page.slug, tags=self.kwargs.get('tags')\n+ ),\n }\n", "issue": "Remove Canonical html tag on CategoryPage\n[trello src](https://trello.com/c/QRjRPDKN/295-se-%D1%83%D0%B1%D0%B8%D1%80%D0%B0%D0%B9-canonical-url-%D0%BD%D0%B0-%D1%81%D1%82%D1%80%D0%B0%D0%BD%D0%B8%D1%86%D0%B0%D1%85-%D1%81%D0%B2%D0%BE%D0%B9%D1%81%D1%82)\r\n\r\n\u0421\u0435\u0439\u0447\u0430\u0441 [\u043b\u044e\u0431\u0430\u044f](https://www.shopelectro.ru/catalog/categories/zariadnye-ustroistva-242/tags/robiton/) \u0441\u0442\u0440\u0430\u043d\u0438\u0446\u0430 \u0441 \u0444\u0438\u043b\u044c\u0442\u0440\u0430\u043c\u0438, \u0441\u0447\u0438\u0442\u0430\u0435\u0442\u0441\u044f \u043a\u0430\u043d\u043e\u043d\u0438\u0447\u043d\u043e\u0439 \u0441\u0442\u0440\u0430\u043d\u0438\u0446\u0435 \u0440\u0430\u0437\u0434\u0435\u043b\u0430.\r\n\u0423\u0431\u0438\u0440\u0430\u0439 \u0442\u0435\u0433 `<link rel=\"canonical\" href=\"\">` \u043d\u0430 \u0432\u0441\u0435\u0445 \u0441\u0442\u0440\u0430\u043d\u0438\u0446\u0430\u0445 \u0441 \u0444\u0438\u043b\u044c\u0442\u0440\u0430\u043c\u0438(\u043f\u043e \u0445\u0430\u0440\u0430\u043a\u0442\u0435\u0440\u0438\u0441\u0442\u0438\u043a\u0430\u043c. \u041f\u043e \u0446\u0435\u043d\u0435, \u043d\u0430\u043b\u0438\u0447\u0438\u044e,\u043f\u043e\u043f\u0443\u043b\u044f\u0440\u043d\u043e\u0441\u0442\u0438 \u0438 \u043f\u0430\u0433\u0438\u043d\u0430\u0446\u0438\u0438 - \u043e\u0441\u0442\u0430\u0432\u044c).\n", "before_files": [{"content": "from functools import partial\n\nfrom django.conf import settings\nfrom django.http import HttpResponse, HttpResponseForbidden\nfrom django.shortcuts import render, get_object_or_404\nfrom django.views.decorators.http import require_POST\nfrom django_user_agents.utils import get_user_agent\n\nfrom catalog.views import catalog\nfrom images.models import Image\nfrom pages import views as pages_views\n\nfrom shopelectro import config\nfrom shopelectro import models\nfrom shopelectro.views.helpers import set_csrf_cookie\n\nPRODUCTS_ON_PAGE_PC = 48\nPRODUCTS_ON_PAGE_MOB = 10\n\n\ndef get_products_count(request):\n \"\"\"Get Products count for response context depends on the `user_agent`.\"\"\"\n mobile_view = get_user_agent(request).is_mobile\n return PRODUCTS_ON_PAGE_MOB if mobile_view else PRODUCTS_ON_PAGE_PC\n\n\n# CATALOG VIEWS\nclass CategoryTree(catalog.CategoryTree):\n category_model = models.Category\n\n\n@set_csrf_cookie\nclass ProductPage(catalog.ProductPage):\n pk_url_kwarg = None\n slug_url_kwarg = 'product_vendor_code'\n slug_field = 'vendor_code'\n\n queryset = (\n models.Product.objects\n .filter(category__isnull=False)\n .prefetch_related('product_feedbacks', 'page__images')\n .select_related('page')\n )\n\n def get_context_data(self, **kwargs):\n context = super(ProductPage, self).get_context_data(**kwargs)\n\n group_tags_pairs = (\n models.Tag.objects\n .filter(products=self.object)\n .get_group_tags_pairs()\n )\n\n return {\n **context,\n 'price_bounds': config.PRICE_BOUNDS,\n 'group_tags_pairs': group_tags_pairs\n }\n\n\n# SHOPELECTRO-SPECIFIC VIEWS\n@set_csrf_cookie\nclass IndexPage(pages_views.CustomPageView):\n\n def get_context_data(self, **kwargs):\n \"\"\"Extended method. Add product's images to context.\"\"\"\n context = super(IndexPage, self).get_context_data(**kwargs)\n mobile_view = get_user_agent(self.request).is_mobile\n\n top_products = (\n models.Product.objects\n .filter(id__in=settings.TOP_PRODUCTS)\n .prefetch_related('category')\n .select_related('page')\n )\n\n images = Image.objects.get_main_images_by_pages(\n models.ProductPage.objects.filter(\n shopelectro_product__in=top_products\n )\n )\n\n categories = models.Category.objects.get_root_categories_by_products(\n top_products)\n\n prepared_top_products = []\n if not mobile_view:\n prepared_top_products = [\n (product, images.get(product.page), categories.get(product))\n for product in top_products\n ]\n\n return {\n **context,\n 'category_tile': config.MAIN_PAGE_TILE,\n 'prepared_top_products': prepared_top_products,\n }\n\n\ndef merge_products_and_images(products):\n images = Image.objects.get_main_images_by_pages(\n models.ProductPage.objects.filter(shopelectro_product__in=products)\n )\n\n return [\n (product, images.get(product.page))\n for product in products\n ]\n\n\n@set_csrf_cookie\nclass CategoryPage(catalog.CategoryPage):\n\n def get_context_data(self, **kwargs):\n \"\"\"Add sorting options and view_types in context.\"\"\"\n context = super(CategoryPage, self).get_context_data(**kwargs)\n products_on_page = get_products_count(self.request)\n\n # tile is default view_type\n view_type = self.request.session.get('view_type', 'tile')\n\n category = context['category']\n\n sorting = int(self.kwargs.get('sorting', 0))\n sorting_option = config.category_sorting(sorting)\n\n all_products = (\n models.Product.objects\n .prefetch_related('page__images')\n .select_related('page')\n .get_by_category(category, ordering=(sorting_option, ))\n )\n\n group_tags_pairs = (\n models.Tag.objects\n .filter(products__in=all_products)\n .get_group_tags_pairs()\n )\n\n tags = self.kwargs.get('tags')\n tags_metadata = {\n 'titles': '',\n }\n\n if tags:\n slugs = models.Tag.parse_url_tags(tags)\n tags = models.Tag.objects.filter(slug__in=slugs)\n\n all_products = (\n all_products\n .filter(tags__in=tags)\n # Use distinct because filtering by QuerySet tags,\n # that related with products by many-to-many relation.\n .distinct(sorting_option.lstrip('-'))\n )\n\n tags_titles = models.Tag.serialize_title_tags(\n tags.get_group_tags_pairs()\n )\n\n tags_metadata['titles'] = tags_titles\n\n def template_context(page, tags):\n return {\n 'page': page,\n 'tags': tags,\n }\n\n page = context['page']\n page.get_template_render_context = partial(\n template_context, page, tags_metadata)\n\n products = all_products.get_offset(0, products_on_page)\n\n return {\n **context,\n 'product_image_pairs': merge_products_and_images(products),\n 'group_tags_pairs': group_tags_pairs,\n 'total_products': all_products.count(),\n 'sorting_options': config.category_sorting(),\n 'sort': sorting,\n 'tags': tags,\n 'view_type': view_type,\n 'tags_metadata': tags_metadata,\n }\n\n\ndef load_more(request, category_slug, offset=0, sorting=0, tags=None):\n \"\"\"\n Load more products of a given category.\n\n :param sorting: preferred sorting index from CATEGORY_SORTING tuple\n :param request: HttpRequest object\n :param category_slug: Slug for a given category\n :param offset: used for slicing QuerySet.\n :return:\n \"\"\"\n products_on_page = get_products_count(request)\n\n category = get_object_or_404(models.CategoryPage, slug=category_slug).model\n sorting_option = config.category_sorting(int(sorting))\n\n products = (\n models.Product.objects\n .prefetch_related('page__images')\n .select_related('page')\n .get_by_category(category, ordering=(sorting_option,))\n )\n\n if tags:\n tag_entities = models.Tag.objects.filter(\n slug__in=models.Tag.parse_url_tags(tags)\n )\n\n products = (\n products\n .filter(tags__in=tag_entities)\n # Use distinct because filtering by QuerySet tags,\n # that related with products by many-to-many relation.\n .distinct(sorting_option.lstrip('-'))\n )\n\n products = products.get_offset(int(offset), products_on_page)\n view = request.session.get('view_type', 'tile')\n\n return render(request, 'catalog/category_products.html', {\n 'product_image_pairs': merge_products_and_images(products),\n 'view_type': view,\n 'prods': products_on_page,\n })\n\n\n@require_POST\ndef save_feedback(request):\n def get_keys_from_post(*args):\n return {arg: request.POST.get(arg, '') for arg in args}\n\n product_id = request.POST.get('id')\n product = models.Product.objects.filter(id=product_id).first()\n if not (product_id and product):\n return HttpResponse(status=422)\n\n fields = ['rating', 'name', 'dignities', 'limitations', 'general']\n feedback_data = get_keys_from_post(*fields)\n\n models.ProductFeedback.objects.create(product=product, **feedback_data)\n return HttpResponse('ok')\n\n\n@require_POST\ndef delete_feedback(request):\n if not request.user.is_authenticated:\n return HttpResponseForbidden('Not today, sly guy...')\n\n feedback_id = request.POST.get('id')\n feedback = models.ProductFeedback.objects.filter(id=feedback_id).first()\n if not (feedback_id and feedback):\n return HttpResponse(status=422)\n\n feedback.delete()\n return HttpResponse('Feedback with id={} was deleted.'.format(feedback_id))\n\n\nclass ProductsWithoutImages(catalog.ProductsWithoutImages):\n model = models.Product\n\n\nclass ProductsWithoutText(catalog.ProductsWithoutText):\n model = models.Product\n", "path": "shopelectro/views/catalog.py"}], "after_files": [{"content": "from functools import partial\n\nfrom django.conf import settings\nfrom django.http import HttpResponse, HttpResponseForbidden\nfrom django.shortcuts import render, get_object_or_404\nfrom django.views.decorators.http import require_POST\nfrom django.urls import reverse\nfrom django_user_agents.utils import get_user_agent\n\nfrom catalog.views import catalog\nfrom images.models import Image\nfrom pages import views as pages_views\n\nfrom shopelectro import config\nfrom shopelectro import models\nfrom shopelectro.views.helpers import set_csrf_cookie\n\nPRODUCTS_ON_PAGE_PC = 48\nPRODUCTS_ON_PAGE_MOB = 10\n\n\ndef get_products_count(request):\n \"\"\"Get Products count for response context depends on the `user_agent`.\"\"\"\n mobile_view = get_user_agent(request).is_mobile\n return PRODUCTS_ON_PAGE_MOB if mobile_view else PRODUCTS_ON_PAGE_PC\n\n\n# CATALOG VIEWS\nclass CategoryTree(catalog.CategoryTree):\n category_model = models.Category\n\n\n@set_csrf_cookie\nclass ProductPage(catalog.ProductPage):\n pk_url_kwarg = None\n slug_url_kwarg = 'product_vendor_code'\n slug_field = 'vendor_code'\n\n queryset = (\n models.Product.objects\n .filter(category__isnull=False)\n .prefetch_related('product_feedbacks', 'page__images')\n .select_related('page')\n )\n\n def get_context_data(self, **kwargs):\n context = super(ProductPage, self).get_context_data(**kwargs)\n\n group_tags_pairs = (\n models.Tag.objects\n .filter(products=self.object)\n .get_group_tags_pairs()\n )\n\n return {\n **context,\n 'price_bounds': config.PRICE_BOUNDS,\n 'group_tags_pairs': group_tags_pairs\n }\n\n\n# SHOPELECTRO-SPECIFIC VIEWS\n@set_csrf_cookie\nclass IndexPage(pages_views.CustomPageView):\n\n def get_context_data(self, **kwargs):\n \"\"\"Extended method. Add product's images to context.\"\"\"\n context = super(IndexPage, self).get_context_data(**kwargs)\n mobile_view = get_user_agent(self.request).is_mobile\n\n top_products = (\n models.Product.objects\n .filter(id__in=settings.TOP_PRODUCTS)\n .prefetch_related('category')\n .select_related('page')\n )\n\n images = Image.objects.get_main_images_by_pages(\n models.ProductPage.objects.filter(\n shopelectro_product__in=top_products\n )\n )\n\n categories = models.Category.objects.get_root_categories_by_products(\n top_products)\n\n prepared_top_products = []\n if not mobile_view:\n prepared_top_products = [\n (product, images.get(product.page), categories.get(product))\n for product in top_products\n ]\n\n return {\n **context,\n 'category_tile': config.MAIN_PAGE_TILE,\n 'prepared_top_products': prepared_top_products,\n }\n\n\ndef merge_products_and_images(products):\n images = Image.objects.get_main_images_by_pages(\n models.ProductPage.objects.filter(shopelectro_product__in=products)\n )\n\n return [\n (product, images.get(product.page))\n for product in products\n ]\n\n\n@set_csrf_cookie\nclass CategoryPage(catalog.CategoryPage):\n\n def get_context_data(self, **kwargs):\n\n def get_category_canonical_url(slug, tags):\n # patch canonical url for page.\n # Using reverse(), not request.path to avoid \"sorting\" url option\n return reverse(\n 'category', kwargs=dict(\n slug=slug,\n **tags and dict(tags=tags) or {}\n )\n )\n\n \"\"\"Add sorting options and view_types in context.\"\"\"\n context = super(CategoryPage, self).get_context_data(**kwargs)\n products_on_page = get_products_count(self.request)\n\n # tile is default view_type\n view_type = self.request.session.get('view_type', 'tile')\n\n category = context['category']\n\n sorting = int(self.kwargs.get('sorting', 0))\n sorting_option = config.category_sorting(sorting)\n\n all_products = (\n models.Product.objects\n .prefetch_related('page__images')\n .select_related('page')\n .get_by_category(category, ordering=(sorting_option, ))\n )\n\n group_tags_pairs = (\n models.Tag.objects\n .filter(products__in=all_products)\n .get_group_tags_pairs()\n )\n\n tags = self.kwargs.get('tags')\n tags_metadata = {\n 'titles': '',\n }\n\n if tags:\n slugs = models.Tag.parse_url_tags(tags)\n tags = models.Tag.objects.filter(slug__in=slugs)\n\n all_products = (\n all_products\n .filter(tags__in=tags)\n # Use distinct because filtering by QuerySet tags,\n # that related with products by many-to-many relation.\n .distinct(sorting_option.lstrip('-'))\n )\n\n tags_titles = models.Tag.serialize_title_tags(\n tags.get_group_tags_pairs()\n )\n\n tags_metadata['titles'] = tags_titles\n\n def template_context(page, tags):\n return {\n 'page': page,\n 'tags': tags,\n }\n\n page = context['page']\n page.get_template_render_context = partial(\n template_context, page, tags_metadata)\n\n products = all_products.get_offset(0, products_on_page)\n\n return {\n **context,\n 'product_image_pairs': merge_products_and_images(products),\n 'group_tags_pairs': group_tags_pairs,\n 'total_products': all_products.count(),\n 'sorting_options': config.category_sorting(),\n 'sort': sorting,\n 'tags': tags,\n 'view_type': view_type,\n 'tags_metadata': tags_metadata,\n 'canonical_url': get_category_canonical_url(\n slug=page.slug, tags=self.kwargs.get('tags')\n ),\n }\n\n\ndef load_more(request, category_slug, offset=0, sorting=0, tags=None):\n \"\"\"\n Load more products of a given category.\n\n :param sorting: preferred sorting index from CATEGORY_SORTING tuple\n :param request: HttpRequest object\n :param category_slug: Slug for a given category\n :param offset: used for slicing QuerySet.\n :return:\n \"\"\"\n products_on_page = get_products_count(request)\n\n category = get_object_or_404(models.CategoryPage, slug=category_slug).model\n sorting_option = config.category_sorting(int(sorting))\n\n products = (\n models.Product.objects\n .prefetch_related('page__images')\n .select_related('page')\n .get_by_category(category, ordering=(sorting_option,))\n )\n\n if tags:\n tag_entities = models.Tag.objects.filter(\n slug__in=models.Tag.parse_url_tags(tags)\n )\n\n products = (\n products\n .filter(tags__in=tag_entities)\n # Use distinct because filtering by QuerySet tags,\n # that related with products by many-to-many relation.\n .distinct(sorting_option.lstrip('-'))\n )\n\n products = products.get_offset(int(offset), products_on_page)\n view = request.session.get('view_type', 'tile')\n\n return render(request, 'catalog/category_products.html', {\n 'product_image_pairs': merge_products_and_images(products),\n 'view_type': view,\n 'prods': products_on_page,\n })\n\n\n@require_POST\ndef save_feedback(request):\n def get_keys_from_post(*args):\n return {arg: request.POST.get(arg, '') for arg in args}\n\n product_id = request.POST.get('id')\n product = models.Product.objects.filter(id=product_id).first()\n if not (product_id and product):\n return HttpResponse(status=422)\n\n fields = ['rating', 'name', 'dignities', 'limitations', 'general']\n feedback_data = get_keys_from_post(*fields)\n\n models.ProductFeedback.objects.create(product=product, **feedback_data)\n return HttpResponse('ok')\n\n\n@require_POST\ndef delete_feedback(request):\n if not request.user.is_authenticated:\n return HttpResponseForbidden('Not today, sly guy...')\n\n feedback_id = request.POST.get('id')\n feedback = models.ProductFeedback.objects.filter(id=feedback_id).first()\n if not (feedback_id and feedback):\n return HttpResponse(status=422)\n\n feedback.delete()\n return HttpResponse('Feedback with id={} was deleted.'.format(feedback_id))\n\n\nclass ProductsWithoutImages(catalog.ProductsWithoutImages):\n model = models.Product\n\n\nclass ProductsWithoutText(catalog.ProductsWithoutText):\n model = models.Product\n", "path": "shopelectro/views/catalog.py"}]}
| 2,941 | 320 |
gh_patches_debug_56203
|
rasdani/github-patches
|
git_diff
|
pypi__warehouse-3130
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Change "Edit" to "Manage" in "Your Projects"
Change the button/link text "Edit" to "Manage" in "Your Projects".
From IRC conversation with @alanbato, @ewdurbin and @nlhkabu .
~~~
<EWDurbin> I think perhaps “Manage” might be a better name for the button that currently says “Edit"
<EWDurbin> Just right off the bat. Since well you can’t really Edit anything, just delete files/releases/projects
<di_codes> ^ agreed
<alanbato> Makes sense to me, Edit misguides people into thinking they can change project attributes imho
<nlh> yep 100% agree
<sumanah> nlh: and I agree with them but I want to hear your thoughts -- you're the one who's done user testing, so do you think people would understand "manage"?
<nlh> i'll open a PR :)
<nlh> yes
<sumanah> thanks nlh!
<nlh> it's also more consistent with the URL structure
~~~
But I do not see a pull request from Nicole yet, so I declare this a:
**Good First Issue**: This issue is good for first time contributors. If there is not a corresponding pull request for this issue, it is up for grabs. For directions for getting set up, see our [Getting Started Guide](https://warehouse.pypa.io/development/getting-started/). If you are working on this issue and have questions, please feel free to ask them here, in [`#pypa-dev` on Freenode](https://webchat.freenode.net/?channels=%23pypa-dev), or on the [pypa-dev mailing list](https://groups.google.com/forum/#!forum/pypa-dev).
Fix for #3118
Fixes #3118.
Updated the projects.html and the relating sass snippet to show **manage** in stead of **edit** when in **Your projects**.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `warehouse/packaging/views.py`
Content:
```
1 # Licensed under the Apache License, Version 2.0 (the "License");
2 # you may not use this file except in compliance with the License.
3 # You may obtain a copy of the License at
4 #
5 # http://www.apache.org/licenses/LICENSE-2.0
6 #
7 # Unless required by applicable law or agreed to in writing, software
8 # distributed under the License is distributed on an "AS IS" BASIS,
9 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 # See the License for the specific language governing permissions and
11 # limitations under the License.
12
13 from first import first
14 from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound
15 from pyramid.view import view_config
16 from sqlalchemy.orm.exc import NoResultFound
17
18 from warehouse.accounts.models import User
19 from warehouse.cache.origin import origin_cache
20 from warehouse.packaging.models import Release, Role
21
22
23 @view_config(
24 route_name="packaging.project",
25 renderer="packaging/detail.html",
26 decorator=[
27 origin_cache(
28 1 * 24 * 60 * 60, # 1 day
29 stale_while_revalidate=1 * 24 * 60 * 60, # 1 day
30 stale_if_error=5 * 24 * 60 * 60, # 5 days
31 ),
32 ],
33 )
34 def project_detail(project, request):
35 if project.name != request.matchdict.get("name", project.name):
36 return HTTPMovedPermanently(
37 request.current_route_path(name=project.name),
38 )
39
40 try:
41 release = (
42 request.db.query(Release)
43 .filter(Release.project == project)
44 .order_by(
45 Release.is_prerelease.nullslast(),
46 Release._pypi_ordering.desc())
47 .limit(1)
48 .one()
49 )
50 except NoResultFound:
51 return HTTPNotFound()
52
53 return release_detail(release, request)
54
55
56 @view_config(
57 route_name="packaging.release",
58 renderer="packaging/detail.html",
59 decorator=[
60 origin_cache(
61 1 * 24 * 60 * 60, # 1 day
62 stale_while_revalidate=1 * 24 * 60 * 60, # 1 day
63 stale_if_error=5 * 24 * 60 * 60, # 5 days
64 ),
65 ],
66 )
67 def release_detail(release, request):
68 project = release.project
69
70 if not {project.name, release.version} <= set(request.matchdict.values()):
71 return HTTPMovedPermanently(
72 request.current_route_path(
73 name=project.name, version=release.version,
74 ),
75 )
76
77 # Get all of the registered versions for this Project, in order of newest
78 # to oldest.
79 all_releases = (
80 request.db.query(Release)
81 .filter(Release.project == project)
82 .with_entities(
83 Release.version,
84 Release.is_prerelease,
85 Release.created)
86 .order_by(Release._pypi_ordering.desc())
87 .all()
88 )
89
90 # Get the latest non-prerelease of this Project, or the latest release if
91 # all releases are prereleases.
92 latest_release = first(
93 all_releases,
94 key=lambda r: not r.is_prerelease,
95 default=all_releases[0],
96 )
97
98 # Get all of the maintainers for this project.
99 maintainers = [
100 r.user
101 for r in (
102 request.db.query(Role)
103 .join(User)
104 .filter(Role.project == project)
105 .distinct(User.username)
106 .order_by(User.username)
107 .all()
108 )
109 ]
110
111 # Get the license from the classifiers or metadata, preferring classifiers.
112 license = None
113 if release.license:
114 # Make a best effort when the entire license text is given
115 # by using the first line only.
116 license = release.license.split('\n')[0]
117 license_classifiers = [c.split(" :: ")[-1] for c in release.classifiers
118 if c.startswith("License")]
119 if license_classifiers:
120 license = ', '.join(license_classifiers)
121
122 return {
123 "project": project,
124 "release": release,
125 "files": release.files.all(),
126 "latest_release": latest_release,
127 "all_releases": all_releases,
128 "maintainers": maintainers,
129 "license": license,
130 }
131
132
133 @view_config(
134 route_name="includes.edit-project-button",
135 renderer="includes/edit-project-button.html",
136 uses_session=True,
137 permission="manage",
138 )
139 def edit_project_button(project, request):
140 return {'project': project}
141
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py
--- a/warehouse/packaging/views.py
+++ b/warehouse/packaging/views.py
@@ -132,7 +132,7 @@
@view_config(
route_name="includes.edit-project-button",
- renderer="includes/edit-project-button.html",
+ renderer="includes/manage-project-button.html",
uses_session=True,
permission="manage",
)
|
{"golden_diff": "diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py\n--- a/warehouse/packaging/views.py\n+++ b/warehouse/packaging/views.py\n@@ -132,7 +132,7 @@\n \n @view_config(\n route_name=\"includes.edit-project-button\",\n- renderer=\"includes/edit-project-button.html\",\n+ renderer=\"includes/manage-project-button.html\",\n uses_session=True,\n permission=\"manage\",\n )\n", "issue": "Change \"Edit\" to \"Manage\" in \"Your Projects\"\nChange the button/link text \"Edit\" to \"Manage\" in \"Your Projects\".\r\n\r\nFrom IRC conversation with @alanbato, @ewdurbin and @nlhkabu .\r\n\r\n~~~\r\n<EWDurbin> I think perhaps \u201cManage\u201d might be a better name for the button that currently says \u201cEdit\"\r\n<EWDurbin> Just right off the bat. Since well you can\u2019t really Edit anything, just delete files/releases/projects\r\n<di_codes> ^ agreed\r\n<alanbato> Makes sense to me, Edit misguides people into thinking they can change project attributes imho\r\n<nlh> yep 100% agree\r\n<sumanah> nlh: and I agree with them but I want to hear your thoughts -- you're the one who's done user testing, so do you think people would understand \"manage\"?\r\n<nlh> i'll open a PR :)\r\n<nlh> yes\r\n<sumanah> thanks nlh!\r\n<nlh> it's also more consistent with the URL structure\r\n~~~\r\n\r\nBut I do not see a pull request from Nicole yet, so I declare this a:\r\n\r\n**Good First Issue**: This issue is good for first time contributors. If there is not a corresponding pull request for this issue, it is up for grabs. For directions for getting set up, see our [Getting Started Guide](https://warehouse.pypa.io/development/getting-started/). If you are working on this issue and have questions, please feel free to ask them here, in [`#pypa-dev` on Freenode](https://webchat.freenode.net/?channels=%23pypa-dev), or on the [pypa-dev mailing list](https://groups.google.com/forum/#!forum/pypa-dev).\nFix for #3118\nFixes #3118.\r\n\r\nUpdated the projects.html and the relating sass snippet to show **manage** in stead of **edit** when in **Your projects**.\n", "before_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom first import first\nfrom pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound\nfrom pyramid.view import view_config\nfrom sqlalchemy.orm.exc import NoResultFound\n\nfrom warehouse.accounts.models import User\nfrom warehouse.cache.origin import origin_cache\nfrom warehouse.packaging.models import Release, Role\n\n\n@view_config(\n route_name=\"packaging.project\",\n renderer=\"packaging/detail.html\",\n decorator=[\n origin_cache(\n 1 * 24 * 60 * 60, # 1 day\n stale_while_revalidate=1 * 24 * 60 * 60, # 1 day\n stale_if_error=5 * 24 * 60 * 60, # 5 days\n ),\n ],\n)\ndef project_detail(project, request):\n if project.name != request.matchdict.get(\"name\", project.name):\n return HTTPMovedPermanently(\n request.current_route_path(name=project.name),\n )\n\n try:\n release = (\n request.db.query(Release)\n .filter(Release.project == project)\n .order_by(\n Release.is_prerelease.nullslast(),\n Release._pypi_ordering.desc())\n .limit(1)\n .one()\n )\n except NoResultFound:\n return HTTPNotFound()\n\n return release_detail(release, request)\n\n\n@view_config(\n route_name=\"packaging.release\",\n renderer=\"packaging/detail.html\",\n decorator=[\n origin_cache(\n 1 * 24 * 60 * 60, # 1 day\n stale_while_revalidate=1 * 24 * 60 * 60, # 1 day\n stale_if_error=5 * 24 * 60 * 60, # 5 days\n ),\n ],\n)\ndef release_detail(release, request):\n project = release.project\n\n if not {project.name, release.version} <= set(request.matchdict.values()):\n return HTTPMovedPermanently(\n request.current_route_path(\n name=project.name, version=release.version,\n ),\n )\n\n # Get all of the registered versions for this Project, in order of newest\n # to oldest.\n all_releases = (\n request.db.query(Release)\n .filter(Release.project == project)\n .with_entities(\n Release.version,\n Release.is_prerelease,\n Release.created)\n .order_by(Release._pypi_ordering.desc())\n .all()\n )\n\n # Get the latest non-prerelease of this Project, or the latest release if\n # all releases are prereleases.\n latest_release = first(\n all_releases,\n key=lambda r: not r.is_prerelease,\n default=all_releases[0],\n )\n\n # Get all of the maintainers for this project.\n maintainers = [\n r.user\n for r in (\n request.db.query(Role)\n .join(User)\n .filter(Role.project == project)\n .distinct(User.username)\n .order_by(User.username)\n .all()\n )\n ]\n\n # Get the license from the classifiers or metadata, preferring classifiers.\n license = None\n if release.license:\n # Make a best effort when the entire license text is given\n # by using the first line only.\n license = release.license.split('\\n')[0]\n license_classifiers = [c.split(\" :: \")[-1] for c in release.classifiers\n if c.startswith(\"License\")]\n if license_classifiers:\n license = ', '.join(license_classifiers)\n\n return {\n \"project\": project,\n \"release\": release,\n \"files\": release.files.all(),\n \"latest_release\": latest_release,\n \"all_releases\": all_releases,\n \"maintainers\": maintainers,\n \"license\": license,\n }\n\n\n@view_config(\n route_name=\"includes.edit-project-button\",\n renderer=\"includes/edit-project-button.html\",\n uses_session=True,\n permission=\"manage\",\n)\ndef edit_project_button(project, request):\n return {'project': project}\n", "path": "warehouse/packaging/views.py"}], "after_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom first import first\nfrom pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound\nfrom pyramid.view import view_config\nfrom sqlalchemy.orm.exc import NoResultFound\n\nfrom warehouse.accounts.models import User\nfrom warehouse.cache.origin import origin_cache\nfrom warehouse.packaging.models import Release, Role\n\n\n@view_config(\n route_name=\"packaging.project\",\n renderer=\"packaging/detail.html\",\n decorator=[\n origin_cache(\n 1 * 24 * 60 * 60, # 1 day\n stale_while_revalidate=1 * 24 * 60 * 60, # 1 day\n stale_if_error=5 * 24 * 60 * 60, # 5 days\n ),\n ],\n)\ndef project_detail(project, request):\n if project.name != request.matchdict.get(\"name\", project.name):\n return HTTPMovedPermanently(\n request.current_route_path(name=project.name),\n )\n\n try:\n release = (\n request.db.query(Release)\n .filter(Release.project == project)\n .order_by(\n Release.is_prerelease.nullslast(),\n Release._pypi_ordering.desc())\n .limit(1)\n .one()\n )\n except NoResultFound:\n return HTTPNotFound()\n\n return release_detail(release, request)\n\n\n@view_config(\n route_name=\"packaging.release\",\n renderer=\"packaging/detail.html\",\n decorator=[\n origin_cache(\n 1 * 24 * 60 * 60, # 1 day\n stale_while_revalidate=1 * 24 * 60 * 60, # 1 day\n stale_if_error=5 * 24 * 60 * 60, # 5 days\n ),\n ],\n)\ndef release_detail(release, request):\n project = release.project\n\n if not {project.name, release.version} <= set(request.matchdict.values()):\n return HTTPMovedPermanently(\n request.current_route_path(\n name=project.name, version=release.version,\n ),\n )\n\n # Get all of the registered versions for this Project, in order of newest\n # to oldest.\n all_releases = (\n request.db.query(Release)\n .filter(Release.project == project)\n .with_entities(\n Release.version,\n Release.is_prerelease,\n Release.created)\n .order_by(Release._pypi_ordering.desc())\n .all()\n )\n\n # Get the latest non-prerelease of this Project, or the latest release if\n # all releases are prereleases.\n latest_release = first(\n all_releases,\n key=lambda r: not r.is_prerelease,\n default=all_releases[0],\n )\n\n # Get all of the maintainers for this project.\n maintainers = [\n r.user\n for r in (\n request.db.query(Role)\n .join(User)\n .filter(Role.project == project)\n .distinct(User.username)\n .order_by(User.username)\n .all()\n )\n ]\n\n # Get the license from the classifiers or metadata, preferring classifiers.\n license = None\n if release.license:\n # Make a best effort when the entire license text is given\n # by using the first line only.\n license = release.license.split('\\n')[0]\n license_classifiers = [c.split(\" :: \")[-1] for c in release.classifiers\n if c.startswith(\"License\")]\n if license_classifiers:\n license = ', '.join(license_classifiers)\n\n return {\n \"project\": project,\n \"release\": release,\n \"files\": release.files.all(),\n \"latest_release\": latest_release,\n \"all_releases\": all_releases,\n \"maintainers\": maintainers,\n \"license\": license,\n }\n\n\n@view_config(\n route_name=\"includes.edit-project-button\",\n renderer=\"includes/manage-project-button.html\",\n uses_session=True,\n permission=\"manage\",\n)\ndef edit_project_button(project, request):\n return {'project': project}\n", "path": "warehouse/packaging/views.py"}]}
| 2,012 | 99 |
gh_patches_debug_31146
|
rasdani/github-patches
|
git_diff
|
mathesar-foundation__mathesar-2594
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
install.sh fails with empty secret_key on Mac OS Ventura
## Description
Mathesar fails to start because secret_key is empty in .env file after running `install.sh`. The script also fails due to that and steps after that do not run.
Note: This happens on Mac OS Ventura, but seems to work fine on Big Sur
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `db/engine.py`
Content:
```
1 import copy
2
3 from sqlalchemy import create_engine as sa_create_engine
4
5 from db.types.custom.base import CUSTOM_DB_TYPE_TO_SA_CLASS
6
7
8 def get_connection_string(username, password, hostname, database, port='5432'):
9 return f"postgresql://{username}:{password}@{hostname}:{port}/{database}"
10
11
12 def create_future_engine_with_custom_types(
13 username, password, hostname, database, port, *args, **kwargs
14 ):
15 engine = create_future_engine(
16 username, password, hostname, database, port, *args, **kwargs
17 )
18 # We need to add our custom types to any engine created for SQLALchemy use
19 # so that they can be used for reflection
20 add_custom_types_to_ischema_names(engine)
21 return engine
22
23
24 # TODO would an engine without ischema names updated ever be used? make it private if not
25 def create_future_engine(
26 username, password, hostname, database, port, *args, **kwargs
27 ):
28 conn_str = get_connection_string(
29 username, password, hostname, database, port
30 )
31 kwargs.update(future=True)
32 return create_engine(conn_str, *args, **kwargs)
33
34
35 # NOTE: used in testing, hence public
36 def create_engine(conn_str, *args, **kwargs):
37 """
38 Wrapper over sqlalchemy.create_engine that stops SA from propagating changes to ischema_names
39 across all engines. This is important for testing: without this intervention, fixtures become
40 randomly corrupted.
41 """
42 engine = sa_create_engine(conn_str, *args, **kwargs)
43 _make_ischema_names_unique(engine)
44 return engine
45
46
47 # TODO should refactor for this to be private
48 def add_custom_types_to_ischema_names(engine):
49 """
50 Updating the ischema_names dict changes which Postgres types are reflected into which SA
51 classes.
52 """
53 for db_type, sa_class in CUSTOM_DB_TYPE_TO_SA_CLASS.items():
54 db_type_id = db_type.id
55 engine.dialect.ischema_names[db_type_id] = sa_class
56
57
58 def get_dummy_engine():
59 """
60 In some cases we only need an engine to access the Postgres dialect. E.g. when examining the
61 ischema_names dict. In those cases, following is enough:
62 """
63 engine = create_engine("postgresql://", future=True)
64 add_custom_types_to_ischema_names(engine)
65 return engine
66
67
68 def _make_ischema_names_unique(engine):
69 """
70 For some reason, engine.dialect.ischema_names reference the same dict across different engines.
71 This resets it to a referentially unique copy of itself.
72 """
73 ischema_names = engine.dialect.ischema_names
74 ischema_names_copy = copy.deepcopy(ischema_names)
75 setattr(engine.dialect, "ischema_names", ischema_names_copy)
76
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/db/engine.py b/db/engine.py
--- a/db/engine.py
+++ b/db/engine.py
@@ -1,14 +1,11 @@
import copy
from sqlalchemy import create_engine as sa_create_engine
+from sqlalchemy.engine import URL
from db.types.custom.base import CUSTOM_DB_TYPE_TO_SA_CLASS
-def get_connection_string(username, password, hostname, database, port='5432'):
- return f"postgresql://{username}:{password}@{hostname}:{port}/{database}"
-
-
def create_future_engine_with_custom_types(
username, password, hostname, database, port, *args, **kwargs
):
@@ -25,21 +22,26 @@
def create_future_engine(
username, password, hostname, database, port, *args, **kwargs
):
- conn_str = get_connection_string(
- username, password, hostname, database, port
+ conn_url = URL.create(
+ "postgresql",
+ username=username,
+ password=password,
+ host=hostname,
+ database=database,
+ port=port,
)
kwargs.update(future=True)
- return create_engine(conn_str, *args, **kwargs)
+ return create_engine(conn_url, *args, **kwargs)
# NOTE: used in testing, hence public
-def create_engine(conn_str, *args, **kwargs):
+def create_engine(conn_url, *args, **kwargs):
"""
Wrapper over sqlalchemy.create_engine that stops SA from propagating changes to ischema_names
across all engines. This is important for testing: without this intervention, fixtures become
randomly corrupted.
"""
- engine = sa_create_engine(conn_str, *args, **kwargs)
+ engine = sa_create_engine(conn_url, *args, **kwargs)
_make_ischema_names_unique(engine)
return engine
|
{"golden_diff": "diff --git a/db/engine.py b/db/engine.py\n--- a/db/engine.py\n+++ b/db/engine.py\n@@ -1,14 +1,11 @@\n import copy\n \n from sqlalchemy import create_engine as sa_create_engine\n+from sqlalchemy.engine import URL\n \n from db.types.custom.base import CUSTOM_DB_TYPE_TO_SA_CLASS\n \n \n-def get_connection_string(username, password, hostname, database, port='5432'):\n- return f\"postgresql://{username}:{password}@{hostname}:{port}/{database}\"\n-\n-\n def create_future_engine_with_custom_types(\n username, password, hostname, database, port, *args, **kwargs\n ):\n@@ -25,21 +22,26 @@\n def create_future_engine(\n username, password, hostname, database, port, *args, **kwargs\n ):\n- conn_str = get_connection_string(\n- username, password, hostname, database, port\n+ conn_url = URL.create(\n+ \"postgresql\",\n+ username=username,\n+ password=password,\n+ host=hostname,\n+ database=database,\n+ port=port,\n )\n kwargs.update(future=True)\n- return create_engine(conn_str, *args, **kwargs)\n+ return create_engine(conn_url, *args, **kwargs)\n \n \n # NOTE: used in testing, hence public\n-def create_engine(conn_str, *args, **kwargs):\n+def create_engine(conn_url, *args, **kwargs):\n \"\"\"\n Wrapper over sqlalchemy.create_engine that stops SA from propagating changes to ischema_names\n across all engines. This is important for testing: without this intervention, fixtures become\n randomly corrupted.\n \"\"\"\n- engine = sa_create_engine(conn_str, *args, **kwargs)\n+ engine = sa_create_engine(conn_url, *args, **kwargs)\n _make_ischema_names_unique(engine)\n return engine\n", "issue": "install.sh fails with empty secret_key on Mac OS Ventura\n## Description\r\n\r\nMathesar fails to start because secret_key is empty in .env file after running `install.sh`. The script also fails due to that and steps after that do not run.\r\n\r\nNote: This happens on Mac OS Ventura, but seems to work fine on Big Sur\n", "before_files": [{"content": "import copy\n\nfrom sqlalchemy import create_engine as sa_create_engine\n\nfrom db.types.custom.base import CUSTOM_DB_TYPE_TO_SA_CLASS\n\n\ndef get_connection_string(username, password, hostname, database, port='5432'):\n return f\"postgresql://{username}:{password}@{hostname}:{port}/{database}\"\n\n\ndef create_future_engine_with_custom_types(\n username, password, hostname, database, port, *args, **kwargs\n):\n engine = create_future_engine(\n username, password, hostname, database, port, *args, **kwargs\n )\n # We need to add our custom types to any engine created for SQLALchemy use\n # so that they can be used for reflection\n add_custom_types_to_ischema_names(engine)\n return engine\n\n\n# TODO would an engine without ischema names updated ever be used? make it private if not\ndef create_future_engine(\n username, password, hostname, database, port, *args, **kwargs\n):\n conn_str = get_connection_string(\n username, password, hostname, database, port\n )\n kwargs.update(future=True)\n return create_engine(conn_str, *args, **kwargs)\n\n\n# NOTE: used in testing, hence public\ndef create_engine(conn_str, *args, **kwargs):\n \"\"\"\n Wrapper over sqlalchemy.create_engine that stops SA from propagating changes to ischema_names\n across all engines. This is important for testing: without this intervention, fixtures become\n randomly corrupted.\n \"\"\"\n engine = sa_create_engine(conn_str, *args, **kwargs)\n _make_ischema_names_unique(engine)\n return engine\n\n\n# TODO should refactor for this to be private\ndef add_custom_types_to_ischema_names(engine):\n \"\"\"\n Updating the ischema_names dict changes which Postgres types are reflected into which SA\n classes.\n \"\"\"\n for db_type, sa_class in CUSTOM_DB_TYPE_TO_SA_CLASS.items():\n db_type_id = db_type.id\n engine.dialect.ischema_names[db_type_id] = sa_class\n\n\ndef get_dummy_engine():\n \"\"\"\n In some cases we only need an engine to access the Postgres dialect. E.g. when examining the\n ischema_names dict. In those cases, following is enough:\n \"\"\"\n engine = create_engine(\"postgresql://\", future=True)\n add_custom_types_to_ischema_names(engine)\n return engine\n\n\ndef _make_ischema_names_unique(engine):\n \"\"\"\n For some reason, engine.dialect.ischema_names reference the same dict across different engines.\n This resets it to a referentially unique copy of itself.\n \"\"\"\n ischema_names = engine.dialect.ischema_names\n ischema_names_copy = copy.deepcopy(ischema_names)\n setattr(engine.dialect, \"ischema_names\", ischema_names_copy)\n", "path": "db/engine.py"}], "after_files": [{"content": "import copy\n\nfrom sqlalchemy import create_engine as sa_create_engine\nfrom sqlalchemy.engine import URL\n\nfrom db.types.custom.base import CUSTOM_DB_TYPE_TO_SA_CLASS\n\n\ndef create_future_engine_with_custom_types(\n username, password, hostname, database, port, *args, **kwargs\n):\n engine = create_future_engine(\n username, password, hostname, database, port, *args, **kwargs\n )\n # We need to add our custom types to any engine created for SQLALchemy use\n # so that they can be used for reflection\n add_custom_types_to_ischema_names(engine)\n return engine\n\n\n# TODO would an engine without ischema names updated ever be used? make it private if not\ndef create_future_engine(\n username, password, hostname, database, port, *args, **kwargs\n):\n conn_url = URL.create(\n \"postgresql\",\n username=username,\n password=password,\n host=hostname,\n database=database,\n port=port,\n )\n kwargs.update(future=True)\n return create_engine(conn_url, *args, **kwargs)\n\n\n# NOTE: used in testing, hence public\ndef create_engine(conn_url, *args, **kwargs):\n \"\"\"\n Wrapper over sqlalchemy.create_engine that stops SA from propagating changes to ischema_names\n across all engines. This is important for testing: without this intervention, fixtures become\n randomly corrupted.\n \"\"\"\n engine = sa_create_engine(conn_url, *args, **kwargs)\n _make_ischema_names_unique(engine)\n return engine\n\n\n# TODO should refactor for this to be private\ndef add_custom_types_to_ischema_names(engine):\n \"\"\"\n Updating the ischema_names dict changes which Postgres types are reflected into which SA\n classes.\n \"\"\"\n for db_type, sa_class in CUSTOM_DB_TYPE_TO_SA_CLASS.items():\n db_type_id = db_type.id\n engine.dialect.ischema_names[db_type_id] = sa_class\n\n\ndef get_dummy_engine():\n \"\"\"\n In some cases we only need an engine to access the Postgres dialect. E.g. when examining the\n ischema_names dict. In those cases, following is enough:\n \"\"\"\n engine = create_engine(\"postgresql://\", future=True)\n add_custom_types_to_ischema_names(engine)\n return engine\n\n\ndef _make_ischema_names_unique(engine):\n \"\"\"\n For some reason, engine.dialect.ischema_names reference the same dict across different engines.\n This resets it to a referentially unique copy of itself.\n \"\"\"\n ischema_names = engine.dialect.ischema_names\n ischema_names_copy = copy.deepcopy(ischema_names)\n setattr(engine.dialect, \"ischema_names\", ischema_names_copy)\n", "path": "db/engine.py"}]}
| 1,063 | 396 |
gh_patches_debug_27518
|
rasdani/github-patches
|
git_diff
|
PaddlePaddle__PaddleOCR-9898
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
paddleocr中gen_lable.py对icdar2015数据转换有误

我用gen_lable.py转换的数据是这样子的,我感觉好像是错误的
不应该是:ch4_training_images/img_1.jpg [{"transcription": "Genaxis Theatre", "points": [[377, 117], [463, 117], [465, 130], [378, 130]
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ppocr/modeling/heads/__init__.py`
Content:
```
1 # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 __all__ = ['build_head']
16
17
18 def build_head(config):
19 # det head
20 from .det_db_head import DBHead, CBNHeadLocal
21 from .det_east_head import EASTHead
22 from .det_sast_head import SASTHead
23 from .det_pse_head import PSEHead
24 from .det_fce_head import FCEHead
25 from .e2e_pg_head import PGHead
26 from .det_ct_head import CT_Head
27 # rec head
28 from .rec_ctc_head import CTCHead
29 from .rec_att_head import AttentionHead
30 from .rec_srn_head import SRNHead
31 from .rec_nrtr_head import Transformer
32 from .rec_sar_head import SARHead
33 from .rec_aster_head import AsterHead
34 from .rec_pren_head import PRENHead
35 from .rec_multi_head import MultiHead
36 from .rec_spin_att_head import SPINAttentionHead
37 from .rec_abinet_head import ABINetHead
38 from .rec_robustscanner_head import RobustScannerHead
39 from .rec_visionlan_head import VLHead
40 from .rec_rfl_head import RFLHead
41 from .rec_can_head import CANHead
42 from .rec_satrn_head import SATRNHead
43
44 # cls head
45 from .cls_head import ClsHead
46
47 #kie head
48 from .kie_sdmgr_head import SDMGRHead
49
50 from .table_att_head import TableAttentionHead, SLAHead
51 from .table_master_head import TableMasterHead
52
53 support_dict = [
54 'DBHead', 'PSEHead', 'FCEHead', 'EASTHead', 'SASTHead', 'CTCHead',
55 'ClsHead', 'AttentionHead', 'SRNHead', 'PGHead', 'Transformer',
56 'TableAttentionHead', 'SARHead', 'AsterHead', 'SDMGRHead', 'PRENHead',
57 'MultiHead', 'ABINetHead', 'TableMasterHead', 'SPINAttentionHead',
58 'VLHead', 'SLAHead', 'RobustScannerHead', 'CT_Head', 'RFLHead',
59 'DRRGHead', 'CANHead', 'SATRNHead', 'CBNHeadLocal'
60 ]
61
62 if config['name'] == 'DRRGHead':
63 from .det_drrg_head import DRRGHead
64 support_dict.append('DRRGHead')
65
66 #table head
67
68 module_name = config.pop('name')
69 assert module_name in support_dict, Exception('head only support {}'.format(
70 support_dict))
71 module_class = eval(module_name)(**config)
72 return module_class
73
```
Path: `ppocr/modeling/heads/det_db_head.py`
Content:
```
1 # copyright (c) 2019 PaddlePaddle Authors. All Rights Reserve.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from __future__ import absolute_import
16 from __future__ import division
17 from __future__ import print_function
18
19 import math
20 import paddle
21 from paddle import nn
22 import paddle.nn.functional as F
23 from paddle import ParamAttr
24 from ppocr.modeling.backbones.det_mobilenet_v3 import ConvBNLayer
25
26
27 def get_bias_attr(k):
28 stdv = 1.0 / math.sqrt(k * 1.0)
29 initializer = paddle.nn.initializer.Uniform(-stdv, stdv)
30 bias_attr = ParamAttr(initializer=initializer)
31 return bias_attr
32
33
34 class Head(nn.Layer):
35 def __init__(self, in_channels, kernel_list=[3, 2, 2], **kwargs):
36 super(Head, self).__init__()
37
38 self.conv1 = nn.Conv2D(
39 in_channels=in_channels,
40 out_channels=in_channels // 4,
41 kernel_size=kernel_list[0],
42 padding=int(kernel_list[0] // 2),
43 weight_attr=ParamAttr(),
44 bias_attr=False)
45 self.conv_bn1 = nn.BatchNorm(
46 num_channels=in_channels // 4,
47 param_attr=ParamAttr(
48 initializer=paddle.nn.initializer.Constant(value=1.0)),
49 bias_attr=ParamAttr(
50 initializer=paddle.nn.initializer.Constant(value=1e-4)),
51 act='relu')
52
53 self.conv2 = nn.Conv2DTranspose(
54 in_channels=in_channels // 4,
55 out_channels=in_channels // 4,
56 kernel_size=kernel_list[1],
57 stride=2,
58 weight_attr=ParamAttr(
59 initializer=paddle.nn.initializer.KaimingUniform()),
60 bias_attr=get_bias_attr(in_channels // 4))
61 self.conv_bn2 = nn.BatchNorm(
62 num_channels=in_channels // 4,
63 param_attr=ParamAttr(
64 initializer=paddle.nn.initializer.Constant(value=1.0)),
65 bias_attr=ParamAttr(
66 initializer=paddle.nn.initializer.Constant(value=1e-4)),
67 act="relu")
68 self.conv3 = nn.Conv2DTranspose(
69 in_channels=in_channels // 4,
70 out_channels=1,
71 kernel_size=kernel_list[2],
72 stride=2,
73 weight_attr=ParamAttr(
74 initializer=paddle.nn.initializer.KaimingUniform()),
75 bias_attr=get_bias_attr(in_channels // 4), )
76
77 def forward(self, x, return_f=False):
78 x = self.conv1(x)
79 x = self.conv_bn1(x)
80 x = self.conv2(x)
81 x = self.conv_bn2(x)
82 if return_f is True:
83 f = x
84 x = self.conv3(x)
85 x = F.sigmoid(x)
86 if return_f is True:
87 return x, f
88 return x
89
90
91 class DBHead(nn.Layer):
92 """
93 Differentiable Binarization (DB) for text detection:
94 see https://arxiv.org/abs/1911.08947
95 args:
96 params(dict): super parameters for build DB network
97 """
98
99 def __init__(self, in_channels, k=50, **kwargs):
100 super(DBHead, self).__init__()
101 self.k = k
102 self.binarize = Head(in_channels, **kwargs)
103 self.thresh = Head(in_channels, **kwargs)
104
105 def step_function(self, x, y):
106 return paddle.reciprocal(1 + paddle.exp(-self.k * (x - y)))
107
108 def forward(self, x, targets=None):
109 shrink_maps = self.binarize(x)
110 if not self.training:
111 return {'maps': shrink_maps}
112
113 threshold_maps = self.thresh(x)
114 binary_maps = self.step_function(shrink_maps, threshold_maps)
115 y = paddle.concat([shrink_maps, threshold_maps, binary_maps], axis=1)
116 return {'maps': y}
117
118
119 class LocalModule(nn.Layer):
120 def __init__(self, in_c, mid_c, use_distance=True):
121 super(self.__class__, self).__init__()
122 self.last_3 = ConvBNLayer(in_c + 1, mid_c, 3, 1, 1, act='relu')
123 self.last_1 = nn.Conv2D(mid_c, 1, 1, 1, 0)
124
125 def forward(self, x, init_map, distance_map):
126 outf = paddle.concat([init_map, x], axis=1)
127 # last Conv
128 out = self.last_1(self.last_3(outf))
129 return out
130
131
132 class CBNHeadLocal(DBHead):
133 def __init__(self, in_channels, k=50, mode='small', **kwargs):
134 super(CBNHeadLocal, self).__init__(in_channels, k, **kwargs)
135 self.mode = mode
136
137 self.up_conv = nn.Upsample(scale_factor=2, mode="nearest", align_mode=1)
138 if self.mode == 'large':
139 self.cbn_layer = LocalModule(in_channels // 4, in_channels // 4)
140 elif self.mode == 'small':
141 self.cbn_layer = LocalModule(in_channels // 4, in_channels // 8)
142
143 def forward(self, x, targets=None):
144 shrink_maps, f = self.binarize(x, return_f=True)
145 base_maps = shrink_maps
146 cbn_maps = self.cbn_layer(self.up_conv(f), shrink_maps, None)
147 cbn_maps = F.sigmoid(cbn_maps)
148 if not self.training:
149 return {'maps': 0.5 * (base_maps + cbn_maps), 'cbn_maps': cbn_maps}
150
151 threshold_maps = self.thresh(x)
152 binary_maps = self.step_function(shrink_maps, threshold_maps)
153 y = paddle.concat([cbn_maps, threshold_maps, binary_maps], axis=1)
154 return {'maps': y, 'distance_maps': cbn_maps, 'cbn_maps': binary_maps}
155
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/ppocr/modeling/heads/__init__.py b/ppocr/modeling/heads/__init__.py
--- a/ppocr/modeling/heads/__init__.py
+++ b/ppocr/modeling/heads/__init__.py
@@ -17,7 +17,7 @@
def build_head(config):
# det head
- from .det_db_head import DBHead, CBNHeadLocal
+ from .det_db_head import DBHead, PFHeadLocal
from .det_east_head import EASTHead
from .det_sast_head import SASTHead
from .det_pse_head import PSEHead
@@ -56,7 +56,7 @@
'TableAttentionHead', 'SARHead', 'AsterHead', 'SDMGRHead', 'PRENHead',
'MultiHead', 'ABINetHead', 'TableMasterHead', 'SPINAttentionHead',
'VLHead', 'SLAHead', 'RobustScannerHead', 'CT_Head', 'RFLHead',
- 'DRRGHead', 'CANHead', 'SATRNHead', 'CBNHeadLocal'
+ 'DRRGHead', 'CANHead', 'SATRNHead', 'PFHeadLocal'
]
if config['name'] == 'DRRGHead':
diff --git a/ppocr/modeling/heads/det_db_head.py b/ppocr/modeling/heads/det_db_head.py
--- a/ppocr/modeling/heads/det_db_head.py
+++ b/ppocr/modeling/heads/det_db_head.py
@@ -129,9 +129,9 @@
return out
-class CBNHeadLocal(DBHead):
+class PFHeadLocal(DBHead):
def __init__(self, in_channels, k=50, mode='small', **kwargs):
- super(CBNHeadLocal, self).__init__(in_channels, k, **kwargs)
+ super(PFHeadLocal, self).__init__(in_channels, k, **kwargs)
self.mode = mode
self.up_conv = nn.Upsample(scale_factor=2, mode="nearest", align_mode=1)
|
{"golden_diff": "diff --git a/ppocr/modeling/heads/__init__.py b/ppocr/modeling/heads/__init__.py\n--- a/ppocr/modeling/heads/__init__.py\n+++ b/ppocr/modeling/heads/__init__.py\n@@ -17,7 +17,7 @@\n \n def build_head(config):\n # det head\n- from .det_db_head import DBHead, CBNHeadLocal\n+ from .det_db_head import DBHead, PFHeadLocal\n from .det_east_head import EASTHead\n from .det_sast_head import SASTHead\n from .det_pse_head import PSEHead\n@@ -56,7 +56,7 @@\n 'TableAttentionHead', 'SARHead', 'AsterHead', 'SDMGRHead', 'PRENHead',\n 'MultiHead', 'ABINetHead', 'TableMasterHead', 'SPINAttentionHead',\n 'VLHead', 'SLAHead', 'RobustScannerHead', 'CT_Head', 'RFLHead',\n- 'DRRGHead', 'CANHead', 'SATRNHead', 'CBNHeadLocal'\n+ 'DRRGHead', 'CANHead', 'SATRNHead', 'PFHeadLocal'\n ]\n \n if config['name'] == 'DRRGHead':\ndiff --git a/ppocr/modeling/heads/det_db_head.py b/ppocr/modeling/heads/det_db_head.py\n--- a/ppocr/modeling/heads/det_db_head.py\n+++ b/ppocr/modeling/heads/det_db_head.py\n@@ -129,9 +129,9 @@\n return out\n \n \n-class CBNHeadLocal(DBHead):\n+class PFHeadLocal(DBHead):\n def __init__(self, in_channels, k=50, mode='small', **kwargs):\n- super(CBNHeadLocal, self).__init__(in_channels, k, **kwargs)\n+ super(PFHeadLocal, self).__init__(in_channels, k, **kwargs)\n self.mode = mode\n \n self.up_conv = nn.Upsample(scale_factor=2, mode=\"nearest\", align_mode=1)\n", "issue": "paddleocr\u4e2dgen_lable.py\u5bf9icdar2015\u6570\u636e\u8f6c\u6362\u6709\u8bef\n\r\n\r\n\u6211\u7528gen_lable.py\u8f6c\u6362\u7684\u6570\u636e\u662f\u8fd9\u6837\u5b50\u7684\uff0c\u6211\u611f\u89c9\u597d\u50cf\u662f\u9519\u8bef\u7684\r\n\r\n\u4e0d\u5e94\u8be5\u662f\uff1ach4_training_images/img_1.jpg\t[{\"transcription\": \"Genaxis Theatre\", \"points\": [[377, 117], [463, 117], [465, 130], [378, 130]\n", "before_files": [{"content": "# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n__all__ = ['build_head']\n\n\ndef build_head(config):\n # det head\n from .det_db_head import DBHead, CBNHeadLocal\n from .det_east_head import EASTHead\n from .det_sast_head import SASTHead\n from .det_pse_head import PSEHead\n from .det_fce_head import FCEHead\n from .e2e_pg_head import PGHead\n from .det_ct_head import CT_Head\n # rec head\n from .rec_ctc_head import CTCHead\n from .rec_att_head import AttentionHead\n from .rec_srn_head import SRNHead\n from .rec_nrtr_head import Transformer\n from .rec_sar_head import SARHead\n from .rec_aster_head import AsterHead\n from .rec_pren_head import PRENHead\n from .rec_multi_head import MultiHead\n from .rec_spin_att_head import SPINAttentionHead\n from .rec_abinet_head import ABINetHead\n from .rec_robustscanner_head import RobustScannerHead\n from .rec_visionlan_head import VLHead\n from .rec_rfl_head import RFLHead\n from .rec_can_head import CANHead\n from .rec_satrn_head import SATRNHead\n\n # cls head\n from .cls_head import ClsHead\n\n #kie head\n from .kie_sdmgr_head import SDMGRHead\n\n from .table_att_head import TableAttentionHead, SLAHead\n from .table_master_head import TableMasterHead\n\n support_dict = [\n 'DBHead', 'PSEHead', 'FCEHead', 'EASTHead', 'SASTHead', 'CTCHead',\n 'ClsHead', 'AttentionHead', 'SRNHead', 'PGHead', 'Transformer',\n 'TableAttentionHead', 'SARHead', 'AsterHead', 'SDMGRHead', 'PRENHead',\n 'MultiHead', 'ABINetHead', 'TableMasterHead', 'SPINAttentionHead',\n 'VLHead', 'SLAHead', 'RobustScannerHead', 'CT_Head', 'RFLHead',\n 'DRRGHead', 'CANHead', 'SATRNHead', 'CBNHeadLocal'\n ]\n\n if config['name'] == 'DRRGHead':\n from .det_drrg_head import DRRGHead\n support_dict.append('DRRGHead')\n\n #table head\n\n module_name = config.pop('name')\n assert module_name in support_dict, Exception('head only support {}'.format(\n support_dict))\n module_class = eval(module_name)(**config)\n return module_class\n", "path": "ppocr/modeling/heads/__init__.py"}, {"content": "# copyright (c) 2019 PaddlePaddle Authors. All Rights Reserve.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport math\nimport paddle\nfrom paddle import nn\nimport paddle.nn.functional as F\nfrom paddle import ParamAttr\nfrom ppocr.modeling.backbones.det_mobilenet_v3 import ConvBNLayer\n\n\ndef get_bias_attr(k):\n stdv = 1.0 / math.sqrt(k * 1.0)\n initializer = paddle.nn.initializer.Uniform(-stdv, stdv)\n bias_attr = ParamAttr(initializer=initializer)\n return bias_attr\n\n\nclass Head(nn.Layer):\n def __init__(self, in_channels, kernel_list=[3, 2, 2], **kwargs):\n super(Head, self).__init__()\n\n self.conv1 = nn.Conv2D(\n in_channels=in_channels,\n out_channels=in_channels // 4,\n kernel_size=kernel_list[0],\n padding=int(kernel_list[0] // 2),\n weight_attr=ParamAttr(),\n bias_attr=False)\n self.conv_bn1 = nn.BatchNorm(\n num_channels=in_channels // 4,\n param_attr=ParamAttr(\n initializer=paddle.nn.initializer.Constant(value=1.0)),\n bias_attr=ParamAttr(\n initializer=paddle.nn.initializer.Constant(value=1e-4)),\n act='relu')\n\n self.conv2 = nn.Conv2DTranspose(\n in_channels=in_channels // 4,\n out_channels=in_channels // 4,\n kernel_size=kernel_list[1],\n stride=2,\n weight_attr=ParamAttr(\n initializer=paddle.nn.initializer.KaimingUniform()),\n bias_attr=get_bias_attr(in_channels // 4))\n self.conv_bn2 = nn.BatchNorm(\n num_channels=in_channels // 4,\n param_attr=ParamAttr(\n initializer=paddle.nn.initializer.Constant(value=1.0)),\n bias_attr=ParamAttr(\n initializer=paddle.nn.initializer.Constant(value=1e-4)),\n act=\"relu\")\n self.conv3 = nn.Conv2DTranspose(\n in_channels=in_channels // 4,\n out_channels=1,\n kernel_size=kernel_list[2],\n stride=2,\n weight_attr=ParamAttr(\n initializer=paddle.nn.initializer.KaimingUniform()),\n bias_attr=get_bias_attr(in_channels // 4), )\n\n def forward(self, x, return_f=False):\n x = self.conv1(x)\n x = self.conv_bn1(x)\n x = self.conv2(x)\n x = self.conv_bn2(x)\n if return_f is True:\n f = x\n x = self.conv3(x)\n x = F.sigmoid(x)\n if return_f is True:\n return x, f\n return x\n\n\nclass DBHead(nn.Layer):\n \"\"\"\n Differentiable Binarization (DB) for text detection:\n see https://arxiv.org/abs/1911.08947\n args:\n params(dict): super parameters for build DB network\n \"\"\"\n\n def __init__(self, in_channels, k=50, **kwargs):\n super(DBHead, self).__init__()\n self.k = k\n self.binarize = Head(in_channels, **kwargs)\n self.thresh = Head(in_channels, **kwargs)\n\n def step_function(self, x, y):\n return paddle.reciprocal(1 + paddle.exp(-self.k * (x - y)))\n\n def forward(self, x, targets=None):\n shrink_maps = self.binarize(x)\n if not self.training:\n return {'maps': shrink_maps}\n\n threshold_maps = self.thresh(x)\n binary_maps = self.step_function(shrink_maps, threshold_maps)\n y = paddle.concat([shrink_maps, threshold_maps, binary_maps], axis=1)\n return {'maps': y}\n\n\nclass LocalModule(nn.Layer):\n def __init__(self, in_c, mid_c, use_distance=True):\n super(self.__class__, self).__init__()\n self.last_3 = ConvBNLayer(in_c + 1, mid_c, 3, 1, 1, act='relu')\n self.last_1 = nn.Conv2D(mid_c, 1, 1, 1, 0)\n\n def forward(self, x, init_map, distance_map):\n outf = paddle.concat([init_map, x], axis=1)\n # last Conv\n out = self.last_1(self.last_3(outf))\n return out\n\n\nclass CBNHeadLocal(DBHead):\n def __init__(self, in_channels, k=50, mode='small', **kwargs):\n super(CBNHeadLocal, self).__init__(in_channels, k, **kwargs)\n self.mode = mode\n\n self.up_conv = nn.Upsample(scale_factor=2, mode=\"nearest\", align_mode=1)\n if self.mode == 'large':\n self.cbn_layer = LocalModule(in_channels // 4, in_channels // 4)\n elif self.mode == 'small':\n self.cbn_layer = LocalModule(in_channels // 4, in_channels // 8)\n\n def forward(self, x, targets=None):\n shrink_maps, f = self.binarize(x, return_f=True)\n base_maps = shrink_maps\n cbn_maps = self.cbn_layer(self.up_conv(f), shrink_maps, None)\n cbn_maps = F.sigmoid(cbn_maps)\n if not self.training:\n return {'maps': 0.5 * (base_maps + cbn_maps), 'cbn_maps': cbn_maps}\n\n threshold_maps = self.thresh(x)\n binary_maps = self.step_function(shrink_maps, threshold_maps)\n y = paddle.concat([cbn_maps, threshold_maps, binary_maps], axis=1)\n return {'maps': y, 'distance_maps': cbn_maps, 'cbn_maps': binary_maps}\n", "path": "ppocr/modeling/heads/det_db_head.py"}], "after_files": [{"content": "# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n__all__ = ['build_head']\n\n\ndef build_head(config):\n # det head\n from .det_db_head import DBHead, PFHeadLocal\n from .det_east_head import EASTHead\n from .det_sast_head import SASTHead\n from .det_pse_head import PSEHead\n from .det_fce_head import FCEHead\n from .e2e_pg_head import PGHead\n from .det_ct_head import CT_Head\n # rec head\n from .rec_ctc_head import CTCHead\n from .rec_att_head import AttentionHead\n from .rec_srn_head import SRNHead\n from .rec_nrtr_head import Transformer\n from .rec_sar_head import SARHead\n from .rec_aster_head import AsterHead\n from .rec_pren_head import PRENHead\n from .rec_multi_head import MultiHead\n from .rec_spin_att_head import SPINAttentionHead\n from .rec_abinet_head import ABINetHead\n from .rec_robustscanner_head import RobustScannerHead\n from .rec_visionlan_head import VLHead\n from .rec_rfl_head import RFLHead\n from .rec_can_head import CANHead\n from .rec_satrn_head import SATRNHead\n\n # cls head\n from .cls_head import ClsHead\n\n #kie head\n from .kie_sdmgr_head import SDMGRHead\n\n from .table_att_head import TableAttentionHead, SLAHead\n from .table_master_head import TableMasterHead\n\n support_dict = [\n 'DBHead', 'PSEHead', 'FCEHead', 'EASTHead', 'SASTHead', 'CTCHead',\n 'ClsHead', 'AttentionHead', 'SRNHead', 'PGHead', 'Transformer',\n 'TableAttentionHead', 'SARHead', 'AsterHead', 'SDMGRHead', 'PRENHead',\n 'MultiHead', 'ABINetHead', 'TableMasterHead', 'SPINAttentionHead',\n 'VLHead', 'SLAHead', 'RobustScannerHead', 'CT_Head', 'RFLHead',\n 'DRRGHead', 'CANHead', 'SATRNHead', 'PFHeadLocal'\n ]\n\n if config['name'] == 'DRRGHead':\n from .det_drrg_head import DRRGHead\n support_dict.append('DRRGHead')\n\n #table head\n\n module_name = config.pop('name')\n assert module_name in support_dict, Exception('head only support {}'.format(\n support_dict))\n module_class = eval(module_name)(**config)\n return module_class\n", "path": "ppocr/modeling/heads/__init__.py"}, {"content": "# copyright (c) 2019 PaddlePaddle Authors. All Rights Reserve.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport math\nimport paddle\nfrom paddle import nn\nimport paddle.nn.functional as F\nfrom paddle import ParamAttr\nfrom ppocr.modeling.backbones.det_mobilenet_v3 import ConvBNLayer\n\n\ndef get_bias_attr(k):\n stdv = 1.0 / math.sqrt(k * 1.0)\n initializer = paddle.nn.initializer.Uniform(-stdv, stdv)\n bias_attr = ParamAttr(initializer=initializer)\n return bias_attr\n\n\nclass Head(nn.Layer):\n def __init__(self, in_channels, kernel_list=[3, 2, 2], **kwargs):\n super(Head, self).__init__()\n\n self.conv1 = nn.Conv2D(\n in_channels=in_channels,\n out_channels=in_channels // 4,\n kernel_size=kernel_list[0],\n padding=int(kernel_list[0] // 2),\n weight_attr=ParamAttr(),\n bias_attr=False)\n self.conv_bn1 = nn.BatchNorm(\n num_channels=in_channels // 4,\n param_attr=ParamAttr(\n initializer=paddle.nn.initializer.Constant(value=1.0)),\n bias_attr=ParamAttr(\n initializer=paddle.nn.initializer.Constant(value=1e-4)),\n act='relu')\n\n self.conv2 = nn.Conv2DTranspose(\n in_channels=in_channels // 4,\n out_channels=in_channels // 4,\n kernel_size=kernel_list[1],\n stride=2,\n weight_attr=ParamAttr(\n initializer=paddle.nn.initializer.KaimingUniform()),\n bias_attr=get_bias_attr(in_channels // 4))\n self.conv_bn2 = nn.BatchNorm(\n num_channels=in_channels // 4,\n param_attr=ParamAttr(\n initializer=paddle.nn.initializer.Constant(value=1.0)),\n bias_attr=ParamAttr(\n initializer=paddle.nn.initializer.Constant(value=1e-4)),\n act=\"relu\")\n self.conv3 = nn.Conv2DTranspose(\n in_channels=in_channels // 4,\n out_channels=1,\n kernel_size=kernel_list[2],\n stride=2,\n weight_attr=ParamAttr(\n initializer=paddle.nn.initializer.KaimingUniform()),\n bias_attr=get_bias_attr(in_channels // 4), )\n\n def forward(self, x, return_f=False):\n x = self.conv1(x)\n x = self.conv_bn1(x)\n x = self.conv2(x)\n x = self.conv_bn2(x)\n if return_f is True:\n f = x\n x = self.conv3(x)\n x = F.sigmoid(x)\n if return_f is True:\n return x, f\n return x\n\n\nclass DBHead(nn.Layer):\n \"\"\"\n Differentiable Binarization (DB) for text detection:\n see https://arxiv.org/abs/1911.08947\n args:\n params(dict): super parameters for build DB network\n \"\"\"\n\n def __init__(self, in_channels, k=50, **kwargs):\n super(DBHead, self).__init__()\n self.k = k\n self.binarize = Head(in_channels, **kwargs)\n self.thresh = Head(in_channels, **kwargs)\n\n def step_function(self, x, y):\n return paddle.reciprocal(1 + paddle.exp(-self.k * (x - y)))\n\n def forward(self, x, targets=None):\n shrink_maps = self.binarize(x)\n if not self.training:\n return {'maps': shrink_maps}\n\n threshold_maps = self.thresh(x)\n binary_maps = self.step_function(shrink_maps, threshold_maps)\n y = paddle.concat([shrink_maps, threshold_maps, binary_maps], axis=1)\n return {'maps': y}\n\n\nclass LocalModule(nn.Layer):\n def __init__(self, in_c, mid_c, use_distance=True):\n super(self.__class__, self).__init__()\n self.last_3 = ConvBNLayer(in_c + 1, mid_c, 3, 1, 1, act='relu')\n self.last_1 = nn.Conv2D(mid_c, 1, 1, 1, 0)\n\n def forward(self, x, init_map, distance_map):\n outf = paddle.concat([init_map, x], axis=1)\n # last Conv\n out = self.last_1(self.last_3(outf))\n return out\n\n\nclass PFHeadLocal(DBHead):\n def __init__(self, in_channels, k=50, mode='small', **kwargs):\n super(PFHeadLocal, self).__init__(in_channels, k, **kwargs)\n self.mode = mode\n\n self.up_conv = nn.Upsample(scale_factor=2, mode=\"nearest\", align_mode=1)\n if self.mode == 'large':\n self.cbn_layer = LocalModule(in_channels // 4, in_channels // 4)\n elif self.mode == 'small':\n self.cbn_layer = LocalModule(in_channels // 4, in_channels // 8)\n\n def forward(self, x, targets=None):\n shrink_maps, f = self.binarize(x, return_f=True)\n base_maps = shrink_maps\n cbn_maps = self.cbn_layer(self.up_conv(f), shrink_maps, None)\n cbn_maps = F.sigmoid(cbn_maps)\n if not self.training:\n return {'maps': 0.5 * (base_maps + cbn_maps), 'cbn_maps': cbn_maps}\n\n threshold_maps = self.thresh(x)\n binary_maps = self.step_function(shrink_maps, threshold_maps)\n y = paddle.concat([cbn_maps, threshold_maps, binary_maps], axis=1)\n return {'maps': y, 'distance_maps': cbn_maps, 'cbn_maps': binary_maps}\n", "path": "ppocr/modeling/heads/det_db_head.py"}]}
| 3,091 | 468 |
gh_patches_debug_21767
|
rasdani/github-patches
|
git_diff
|
Pylons__pyramid-3061
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
update pyramid's default execution policy to attempt to render any uncaught exception
If an exception propagates all the way to the execution policy there should be a last-ditch effort to render a response... this is part of the larger consistency change to pyramid_tm and pyramid_retry. With the addition of `request.invoke_exception_view` it's hopefully clear that exception views should be callable from various parts of the lifecycle and pyramid_retry will attempt to invoke exception views at the end if an exception is uncaught. Also pyramid_tm will attempt to invoke exceptions on commit/abort to handle those specifically.
I've documented the process here in a little ascii diagram[1] reproduced here:
```
pyramid_retry +--------> if exc not retryable, render response
if exc retryable, ignore exc and try again
+
|
|
v
pyramid_debugtoolbar
+
|
|
v
pyramid_tm +--------> if exc caught then abort and reraise
if request.exception then abort and return response
+ if abort or commit raises, render response
|
|
v
excview +--------> if exc caught, render response and set request.exception
if no exception then reraise
+
|
|
v
app
```
[1] https://gist.github.com/mmerickel/2e83f4af6c7d5eae34947b8f1f075f61
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pyramid/router.py`
Content:
```
1 from zope.interface import (
2 implementer,
3 providedBy,
4 )
5
6 from pyramid.interfaces import (
7 IDebugLogger,
8 IExecutionPolicy,
9 IRequest,
10 IRequestExtensions,
11 IRootFactory,
12 IRouteRequest,
13 IRouter,
14 IRequestFactory,
15 IRoutesMapper,
16 ITraverser,
17 ITweens,
18 )
19
20 from pyramid.events import (
21 ContextFound,
22 NewRequest,
23 NewResponse,
24 BeforeTraversal,
25 )
26
27 from pyramid.httpexceptions import HTTPNotFound
28 from pyramid.request import Request
29 from pyramid.view import _call_view
30 from pyramid.request import apply_request_extensions
31 from pyramid.threadlocal import manager
32
33 from pyramid.traversal import (
34 DefaultRootFactory,
35 ResourceTreeTraverser,
36 )
37
38 @implementer(IRouter)
39 class Router(object):
40
41 debug_notfound = False
42 debug_routematch = False
43
44 threadlocal_manager = manager
45
46 def __init__(self, registry):
47 q = registry.queryUtility
48 self.logger = q(IDebugLogger)
49 self.root_factory = q(IRootFactory, default=DefaultRootFactory)
50 self.routes_mapper = q(IRoutesMapper)
51 self.request_factory = q(IRequestFactory, default=Request)
52 self.request_extensions = q(IRequestExtensions)
53 self.execution_policy = q(
54 IExecutionPolicy, default=default_execution_policy)
55 self.orig_handle_request = self.handle_request
56 tweens = q(ITweens)
57 if tweens is not None:
58 self.handle_request = tweens(self.handle_request, registry)
59 self.root_policy = self.root_factory # b/w compat
60 self.registry = registry
61 settings = registry.settings
62 if settings is not None:
63 self.debug_notfound = settings['debug_notfound']
64 self.debug_routematch = settings['debug_routematch']
65
66 def handle_request(self, request):
67 attrs = request.__dict__
68 registry = attrs['registry']
69
70 request.request_iface = IRequest
71 context = None
72 routes_mapper = self.routes_mapper
73 debug_routematch = self.debug_routematch
74 adapters = registry.adapters
75 has_listeners = registry.has_listeners
76 notify = registry.notify
77 logger = self.logger
78
79 has_listeners and notify(NewRequest(request))
80 # find the root object
81 root_factory = self.root_factory
82 if routes_mapper is not None:
83 info = routes_mapper(request)
84 match, route = info['match'], info['route']
85 if route is None:
86 if debug_routematch:
87 msg = ('no route matched for url %s' %
88 request.url)
89 logger and logger.debug(msg)
90 else:
91 attrs['matchdict'] = match
92 attrs['matched_route'] = route
93
94 if debug_routematch:
95 msg = (
96 'route matched for url %s; '
97 'route_name: %r, '
98 'path_info: %r, '
99 'pattern: %r, '
100 'matchdict: %r, '
101 'predicates: %r' % (
102 request.url,
103 route.name,
104 request.path_info,
105 route.pattern,
106 match,
107 ', '.join([p.text() for p in route.predicates]))
108 )
109 logger and logger.debug(msg)
110
111 request.request_iface = registry.queryUtility(
112 IRouteRequest,
113 name=route.name,
114 default=IRequest)
115
116 root_factory = route.factory or self.root_factory
117
118 # Notify anyone listening that we are about to start traversal
119 #
120 # Notify before creating root_factory in case we want to do something
121 # special on a route we may have matched. See
122 # https://github.com/Pylons/pyramid/pull/1876 for ideas of what is
123 # possible.
124 has_listeners and notify(BeforeTraversal(request))
125
126 # Create the root factory
127 root = root_factory(request)
128 attrs['root'] = root
129
130 # We are about to traverse and find a context
131 traverser = adapters.queryAdapter(root, ITraverser)
132 if traverser is None:
133 traverser = ResourceTreeTraverser(root)
134 tdict = traverser(request)
135
136 context, view_name, subpath, traversed, vroot, vroot_path = (
137 tdict['context'],
138 tdict['view_name'],
139 tdict['subpath'],
140 tdict['traversed'],
141 tdict['virtual_root'],
142 tdict['virtual_root_path']
143 )
144
145 attrs.update(tdict)
146
147 # Notify anyone listening that we have a context and traversal is
148 # complete
149 has_listeners and notify(ContextFound(request))
150
151 # find a view callable
152 context_iface = providedBy(context)
153 response = _call_view(
154 registry,
155 request,
156 context,
157 context_iface,
158 view_name
159 )
160
161 if response is None:
162 if self.debug_notfound:
163 msg = (
164 'debug_notfound of url %s; path_info: %r, '
165 'context: %r, view_name: %r, subpath: %r, '
166 'traversed: %r, root: %r, vroot: %r, '
167 'vroot_path: %r' % (
168 request.url, request.path_info, context,
169 view_name, subpath, traversed, root, vroot,
170 vroot_path)
171 )
172 logger and logger.debug(msg)
173 else:
174 msg = request.path_info
175 raise HTTPNotFound(msg)
176
177 return response
178
179 def invoke_subrequest(self, request, use_tweens=False):
180 """Obtain a response object from the Pyramid application based on
181 information in the ``request`` object provided. The ``request``
182 object must be an object that implements the Pyramid request
183 interface (such as a :class:`pyramid.request.Request` instance). If
184 ``use_tweens`` is ``True``, the request will be sent to the
185 :term:`tween` in the tween stack closest to the request ingress. If
186 ``use_tweens`` is ``False``, the request will be sent to the main
187 router handler, and no tweens will be invoked.
188
189 See the API for pyramid.request for complete documentation.
190 """
191 request.registry = self.registry
192 request.invoke_subrequest = self.invoke_subrequest
193 return self.invoke_request(
194 request,
195 _use_tweens=use_tweens,
196 _apply_extensions=True,
197 )
198
199 def make_request(self, environ):
200 request = self.request_factory(environ)
201 request.registry = self.registry
202 request.invoke_subrequest = self.invoke_subrequest
203 extensions = self.request_extensions
204 if extensions is not None:
205 apply_request_extensions(request, extensions=extensions)
206 return request
207
208 def invoke_request(self, request,
209 _use_tweens=True, _apply_extensions=False):
210 registry = self.registry
211 has_listeners = self.registry.has_listeners
212 notify = self.registry.notify
213 threadlocals = {'registry': registry, 'request': request}
214 manager = self.threadlocal_manager
215 manager.push(threadlocals)
216
217 if _use_tweens:
218 handle_request = self.handle_request
219 else:
220 handle_request = self.orig_handle_request
221
222 try:
223
224 try:
225 extensions = self.request_extensions
226 if _apply_extensions and extensions is not None:
227 apply_request_extensions(request, extensions=extensions)
228 response = handle_request(request)
229
230 if request.response_callbacks:
231 request._process_response_callbacks(response)
232
233 has_listeners and notify(NewResponse(request, response))
234
235 return response
236
237 finally:
238 if request.finished_callbacks:
239 request._process_finished_callbacks()
240
241 finally:
242 manager.pop()
243
244 def __call__(self, environ, start_response):
245 """
246 Accept ``environ`` and ``start_response``; create a
247 :term:`request` and route the request to a :app:`Pyramid`
248 view based on introspection of :term:`view configuration`
249 within the application registry; call ``start_response`` and
250 return an iterable.
251 """
252 response = self.execution_policy(environ, self)
253 return response(environ, start_response)
254
255
256 def default_execution_policy(environ, router):
257 request = router.make_request(environ)
258 return router.invoke_request(request)
259
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pyramid/router.py b/pyramid/router.py
--- a/pyramid/router.py
+++ b/pyramid/router.py
@@ -1,3 +1,4 @@
+import sys
from zope.interface import (
implementer,
providedBy,
@@ -24,6 +25,7 @@
BeforeTraversal,
)
+from pyramid.compat import reraise
from pyramid.httpexceptions import HTTPNotFound
from pyramid.request import Request
from pyramid.view import _call_view
@@ -252,7 +254,15 @@
response = self.execution_policy(environ, self)
return response(environ, start_response)
-
def default_execution_policy(environ, router):
request = router.make_request(environ)
- return router.invoke_request(request)
+ try:
+ return router.invoke_request(request)
+ except Exception:
+ exc_info = sys.exc_info()
+ try:
+ return request.invoke_exception_view(exc_info)
+ except HTTPNotFound:
+ reraise(*exc_info)
+ finally:
+ del exc_info # avoid local ref cycle
|
{"golden_diff": "diff --git a/pyramid/router.py b/pyramid/router.py\n--- a/pyramid/router.py\n+++ b/pyramid/router.py\n@@ -1,3 +1,4 @@\n+import sys\n from zope.interface import (\n implementer,\n providedBy,\n@@ -24,6 +25,7 @@\n BeforeTraversal,\n )\n \n+from pyramid.compat import reraise\n from pyramid.httpexceptions import HTTPNotFound\n from pyramid.request import Request\n from pyramid.view import _call_view\n@@ -252,7 +254,15 @@\n response = self.execution_policy(environ, self)\n return response(environ, start_response)\n \n-\n def default_execution_policy(environ, router):\n request = router.make_request(environ)\n- return router.invoke_request(request)\n+ try:\n+ return router.invoke_request(request)\n+ except Exception:\n+ exc_info = sys.exc_info()\n+ try:\n+ return request.invoke_exception_view(exc_info)\n+ except HTTPNotFound:\n+ reraise(*exc_info)\n+ finally:\n+ del exc_info # avoid local ref cycle\n", "issue": "update pyramid's default execution policy to attempt to render any uncaught exception\nIf an exception propagates all the way to the execution policy there should be a last-ditch effort to render a response... this is part of the larger consistency change to pyramid_tm and pyramid_retry. With the addition of `request.invoke_exception_view` it's hopefully clear that exception views should be callable from various parts of the lifecycle and pyramid_retry will attempt to invoke exception views at the end if an exception is uncaught. Also pyramid_tm will attempt to invoke exceptions on commit/abort to handle those specifically.\r\n\r\nI've documented the process here in a little ascii diagram[1] reproduced here:\r\n```\r\n pyramid_retry +--------> if exc not retryable, render response\r\n if exc retryable, ignore exc and try again\r\n +\r\n |\r\n |\r\n v\r\n\r\npyramid_debugtoolbar\r\n\r\n +\r\n |\r\n |\r\n v\r\n\r\n pyramid_tm +--------> if exc caught then abort and reraise\r\n if request.exception then abort and return response\r\n + if abort or commit raises, render response\r\n |\r\n |\r\n v\r\n\r\n excview +--------> if exc caught, render response and set request.exception\r\n if no exception then reraise\r\n +\r\n |\r\n |\r\n v\r\n\r\n app\r\n```\r\n\r\n[1] https://gist.github.com/mmerickel/2e83f4af6c7d5eae34947b8f1f075f61\n", "before_files": [{"content": "from zope.interface import (\n implementer,\n providedBy,\n )\n\nfrom pyramid.interfaces import (\n IDebugLogger,\n IExecutionPolicy,\n IRequest,\n IRequestExtensions,\n IRootFactory,\n IRouteRequest,\n IRouter,\n IRequestFactory,\n IRoutesMapper,\n ITraverser,\n ITweens,\n )\n\nfrom pyramid.events import (\n ContextFound,\n NewRequest,\n NewResponse,\n BeforeTraversal,\n )\n\nfrom pyramid.httpexceptions import HTTPNotFound\nfrom pyramid.request import Request\nfrom pyramid.view import _call_view\nfrom pyramid.request import apply_request_extensions\nfrom pyramid.threadlocal import manager\n\nfrom pyramid.traversal import (\n DefaultRootFactory,\n ResourceTreeTraverser,\n )\n\n@implementer(IRouter)\nclass Router(object):\n\n debug_notfound = False\n debug_routematch = False\n\n threadlocal_manager = manager\n\n def __init__(self, registry):\n q = registry.queryUtility\n self.logger = q(IDebugLogger)\n self.root_factory = q(IRootFactory, default=DefaultRootFactory)\n self.routes_mapper = q(IRoutesMapper)\n self.request_factory = q(IRequestFactory, default=Request)\n self.request_extensions = q(IRequestExtensions)\n self.execution_policy = q(\n IExecutionPolicy, default=default_execution_policy)\n self.orig_handle_request = self.handle_request\n tweens = q(ITweens)\n if tweens is not None:\n self.handle_request = tweens(self.handle_request, registry)\n self.root_policy = self.root_factory # b/w compat\n self.registry = registry\n settings = registry.settings\n if settings is not None:\n self.debug_notfound = settings['debug_notfound']\n self.debug_routematch = settings['debug_routematch']\n\n def handle_request(self, request):\n attrs = request.__dict__\n registry = attrs['registry']\n\n request.request_iface = IRequest\n context = None\n routes_mapper = self.routes_mapper\n debug_routematch = self.debug_routematch\n adapters = registry.adapters\n has_listeners = registry.has_listeners\n notify = registry.notify\n logger = self.logger\n\n has_listeners and notify(NewRequest(request))\n # find the root object\n root_factory = self.root_factory\n if routes_mapper is not None:\n info = routes_mapper(request)\n match, route = info['match'], info['route']\n if route is None:\n if debug_routematch:\n msg = ('no route matched for url %s' %\n request.url)\n logger and logger.debug(msg)\n else:\n attrs['matchdict'] = match\n attrs['matched_route'] = route\n\n if debug_routematch:\n msg = (\n 'route matched for url %s; '\n 'route_name: %r, '\n 'path_info: %r, '\n 'pattern: %r, '\n 'matchdict: %r, '\n 'predicates: %r' % (\n request.url,\n route.name,\n request.path_info,\n route.pattern,\n match,\n ', '.join([p.text() for p in route.predicates]))\n )\n logger and logger.debug(msg)\n\n request.request_iface = registry.queryUtility(\n IRouteRequest,\n name=route.name,\n default=IRequest)\n\n root_factory = route.factory or self.root_factory\n\n # Notify anyone listening that we are about to start traversal\n #\n # Notify before creating root_factory in case we want to do something\n # special on a route we may have matched. See\n # https://github.com/Pylons/pyramid/pull/1876 for ideas of what is\n # possible.\n has_listeners and notify(BeforeTraversal(request))\n\n # Create the root factory\n root = root_factory(request)\n attrs['root'] = root\n\n # We are about to traverse and find a context\n traverser = adapters.queryAdapter(root, ITraverser)\n if traverser is None:\n traverser = ResourceTreeTraverser(root)\n tdict = traverser(request)\n\n context, view_name, subpath, traversed, vroot, vroot_path = (\n tdict['context'],\n tdict['view_name'],\n tdict['subpath'],\n tdict['traversed'],\n tdict['virtual_root'],\n tdict['virtual_root_path']\n )\n\n attrs.update(tdict)\n\n # Notify anyone listening that we have a context and traversal is\n # complete\n has_listeners and notify(ContextFound(request))\n\n # find a view callable\n context_iface = providedBy(context)\n response = _call_view(\n registry,\n request,\n context,\n context_iface,\n view_name\n )\n\n if response is None:\n if self.debug_notfound:\n msg = (\n 'debug_notfound of url %s; path_info: %r, '\n 'context: %r, view_name: %r, subpath: %r, '\n 'traversed: %r, root: %r, vroot: %r, '\n 'vroot_path: %r' % (\n request.url, request.path_info, context,\n view_name, subpath, traversed, root, vroot,\n vroot_path)\n )\n logger and logger.debug(msg)\n else:\n msg = request.path_info\n raise HTTPNotFound(msg)\n\n return response\n\n def invoke_subrequest(self, request, use_tweens=False):\n \"\"\"Obtain a response object from the Pyramid application based on\n information in the ``request`` object provided. The ``request``\n object must be an object that implements the Pyramid request\n interface (such as a :class:`pyramid.request.Request` instance). If\n ``use_tweens`` is ``True``, the request will be sent to the\n :term:`tween` in the tween stack closest to the request ingress. If\n ``use_tweens`` is ``False``, the request will be sent to the main\n router handler, and no tweens will be invoked.\n\n See the API for pyramid.request for complete documentation.\n \"\"\"\n request.registry = self.registry\n request.invoke_subrequest = self.invoke_subrequest\n return self.invoke_request(\n request,\n _use_tweens=use_tweens,\n _apply_extensions=True,\n )\n\n def make_request(self, environ):\n request = self.request_factory(environ)\n request.registry = self.registry\n request.invoke_subrequest = self.invoke_subrequest\n extensions = self.request_extensions\n if extensions is not None:\n apply_request_extensions(request, extensions=extensions)\n return request\n\n def invoke_request(self, request,\n _use_tweens=True, _apply_extensions=False):\n registry = self.registry\n has_listeners = self.registry.has_listeners\n notify = self.registry.notify\n threadlocals = {'registry': registry, 'request': request}\n manager = self.threadlocal_manager\n manager.push(threadlocals)\n\n if _use_tweens:\n handle_request = self.handle_request\n else:\n handle_request = self.orig_handle_request\n\n try:\n\n try:\n extensions = self.request_extensions\n if _apply_extensions and extensions is not None:\n apply_request_extensions(request, extensions=extensions)\n response = handle_request(request)\n\n if request.response_callbacks:\n request._process_response_callbacks(response)\n\n has_listeners and notify(NewResponse(request, response))\n\n return response\n\n finally:\n if request.finished_callbacks:\n request._process_finished_callbacks()\n\n finally:\n manager.pop()\n\n def __call__(self, environ, start_response):\n \"\"\"\n Accept ``environ`` and ``start_response``; create a\n :term:`request` and route the request to a :app:`Pyramid`\n view based on introspection of :term:`view configuration`\n within the application registry; call ``start_response`` and\n return an iterable.\n \"\"\"\n response = self.execution_policy(environ, self)\n return response(environ, start_response)\n\n\ndef default_execution_policy(environ, router):\n request = router.make_request(environ)\n return router.invoke_request(request)\n", "path": "pyramid/router.py"}], "after_files": [{"content": "import sys\nfrom zope.interface import (\n implementer,\n providedBy,\n )\n\nfrom pyramid.interfaces import (\n IDebugLogger,\n IExecutionPolicy,\n IRequest,\n IRequestExtensions,\n IRootFactory,\n IRouteRequest,\n IRouter,\n IRequestFactory,\n IRoutesMapper,\n ITraverser,\n ITweens,\n )\n\nfrom pyramid.events import (\n ContextFound,\n NewRequest,\n NewResponse,\n BeforeTraversal,\n )\n\nfrom pyramid.compat import reraise\nfrom pyramid.httpexceptions import HTTPNotFound\nfrom pyramid.request import Request\nfrom pyramid.view import _call_view\nfrom pyramid.request import apply_request_extensions\nfrom pyramid.threadlocal import manager\n\nfrom pyramid.traversal import (\n DefaultRootFactory,\n ResourceTreeTraverser,\n )\n\n@implementer(IRouter)\nclass Router(object):\n\n debug_notfound = False\n debug_routematch = False\n\n threadlocal_manager = manager\n\n def __init__(self, registry):\n q = registry.queryUtility\n self.logger = q(IDebugLogger)\n self.root_factory = q(IRootFactory, default=DefaultRootFactory)\n self.routes_mapper = q(IRoutesMapper)\n self.request_factory = q(IRequestFactory, default=Request)\n self.request_extensions = q(IRequestExtensions)\n self.execution_policy = q(\n IExecutionPolicy, default=default_execution_policy)\n self.orig_handle_request = self.handle_request\n tweens = q(ITweens)\n if tweens is not None:\n self.handle_request = tweens(self.handle_request, registry)\n self.root_policy = self.root_factory # b/w compat\n self.registry = registry\n settings = registry.settings\n if settings is not None:\n self.debug_notfound = settings['debug_notfound']\n self.debug_routematch = settings['debug_routematch']\n\n def handle_request(self, request):\n attrs = request.__dict__\n registry = attrs['registry']\n\n request.request_iface = IRequest\n context = None\n routes_mapper = self.routes_mapper\n debug_routematch = self.debug_routematch\n adapters = registry.adapters\n has_listeners = registry.has_listeners\n notify = registry.notify\n logger = self.logger\n\n has_listeners and notify(NewRequest(request))\n # find the root object\n root_factory = self.root_factory\n if routes_mapper is not None:\n info = routes_mapper(request)\n match, route = info['match'], info['route']\n if route is None:\n if debug_routematch:\n msg = ('no route matched for url %s' %\n request.url)\n logger and logger.debug(msg)\n else:\n attrs['matchdict'] = match\n attrs['matched_route'] = route\n\n if debug_routematch:\n msg = (\n 'route matched for url %s; '\n 'route_name: %r, '\n 'path_info: %r, '\n 'pattern: %r, '\n 'matchdict: %r, '\n 'predicates: %r' % (\n request.url,\n route.name,\n request.path_info,\n route.pattern,\n match,\n ', '.join([p.text() for p in route.predicates]))\n )\n logger and logger.debug(msg)\n\n request.request_iface = registry.queryUtility(\n IRouteRequest,\n name=route.name,\n default=IRequest)\n\n root_factory = route.factory or self.root_factory\n\n # Notify anyone listening that we are about to start traversal\n #\n # Notify before creating root_factory in case we want to do something\n # special on a route we may have matched. See\n # https://github.com/Pylons/pyramid/pull/1876 for ideas of what is\n # possible.\n has_listeners and notify(BeforeTraversal(request))\n\n # Create the root factory\n root = root_factory(request)\n attrs['root'] = root\n\n # We are about to traverse and find a context\n traverser = adapters.queryAdapter(root, ITraverser)\n if traverser is None:\n traverser = ResourceTreeTraverser(root)\n tdict = traverser(request)\n\n context, view_name, subpath, traversed, vroot, vroot_path = (\n tdict['context'],\n tdict['view_name'],\n tdict['subpath'],\n tdict['traversed'],\n tdict['virtual_root'],\n tdict['virtual_root_path']\n )\n\n attrs.update(tdict)\n\n # Notify anyone listening that we have a context and traversal is\n # complete\n has_listeners and notify(ContextFound(request))\n\n # find a view callable\n context_iface = providedBy(context)\n response = _call_view(\n registry,\n request,\n context,\n context_iface,\n view_name\n )\n\n if response is None:\n if self.debug_notfound:\n msg = (\n 'debug_notfound of url %s; path_info: %r, '\n 'context: %r, view_name: %r, subpath: %r, '\n 'traversed: %r, root: %r, vroot: %r, '\n 'vroot_path: %r' % (\n request.url, request.path_info, context,\n view_name, subpath, traversed, root, vroot,\n vroot_path)\n )\n logger and logger.debug(msg)\n else:\n msg = request.path_info\n raise HTTPNotFound(msg)\n\n return response\n\n def invoke_subrequest(self, request, use_tweens=False):\n \"\"\"Obtain a response object from the Pyramid application based on\n information in the ``request`` object provided. The ``request``\n object must be an object that implements the Pyramid request\n interface (such as a :class:`pyramid.request.Request` instance). If\n ``use_tweens`` is ``True``, the request will be sent to the\n :term:`tween` in the tween stack closest to the request ingress. If\n ``use_tweens`` is ``False``, the request will be sent to the main\n router handler, and no tweens will be invoked.\n\n See the API for pyramid.request for complete documentation.\n \"\"\"\n request.registry = self.registry\n request.invoke_subrequest = self.invoke_subrequest\n return self.invoke_request(\n request,\n _use_tweens=use_tweens,\n _apply_extensions=True,\n )\n\n def make_request(self, environ):\n request = self.request_factory(environ)\n request.registry = self.registry\n request.invoke_subrequest = self.invoke_subrequest\n extensions = self.request_extensions\n if extensions is not None:\n apply_request_extensions(request, extensions=extensions)\n return request\n\n def invoke_request(self, request,\n _use_tweens=True, _apply_extensions=False):\n registry = self.registry\n has_listeners = self.registry.has_listeners\n notify = self.registry.notify\n threadlocals = {'registry': registry, 'request': request}\n manager = self.threadlocal_manager\n manager.push(threadlocals)\n\n if _use_tweens:\n handle_request = self.handle_request\n else:\n handle_request = self.orig_handle_request\n\n try:\n\n try:\n extensions = self.request_extensions\n if _apply_extensions and extensions is not None:\n apply_request_extensions(request, extensions=extensions)\n response = handle_request(request)\n\n if request.response_callbacks:\n request._process_response_callbacks(response)\n\n has_listeners and notify(NewResponse(request, response))\n\n return response\n\n finally:\n if request.finished_callbacks:\n request._process_finished_callbacks()\n\n finally:\n manager.pop()\n\n def __call__(self, environ, start_response):\n \"\"\"\n Accept ``environ`` and ``start_response``; create a\n :term:`request` and route the request to a :app:`Pyramid`\n view based on introspection of :term:`view configuration`\n within the application registry; call ``start_response`` and\n return an iterable.\n \"\"\"\n response = self.execution_policy(environ, self)\n return response(environ, start_response)\n\ndef default_execution_policy(environ, router):\n request = router.make_request(environ)\n try:\n return router.invoke_request(request)\n except Exception:\n exc_info = sys.exc_info()\n try:\n return request.invoke_exception_view(exc_info)\n except HTTPNotFound:\n reraise(*exc_info)\n finally:\n del exc_info # avoid local ref cycle\n", "path": "pyramid/router.py"}]}
| 3,040 | 240 |
gh_patches_debug_37667
|
rasdani/github-patches
|
git_diff
|
translate__pootle-4576
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Cannot run `revision --restore`
With the recent addition to check for the revision counter (#4545), this became a bit meta: running `revision --restore` fails because one has to run `revision --restore` before :grin:
```
$ ./manage.py revision --restore
SystemCheckError: System check identified some issues:
CRITICALS:
?: (pootle.C016) Revision is missing or has an incorrect value.
HINT: Run `revision --restore` to reset the revision counter.
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pootle/apps/pootle_app/management/commands/revision.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 #
4 # Copyright (C) Pootle contributors.
5 #
6 # This file is a part of the Pootle project. It is distributed under the GPL3
7 # or later license. See the LICENSE file for a copy of the license and the
8 # AUTHORS file for copyright and authorship information.
9
10
11 import os
12 os.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'
13
14 from django.core.management.base import BaseCommand
15
16 from pootle.core.models import Revision
17
18
19 class Command(BaseCommand):
20 help = "Print Pootle's current revision."
21
22 def add_arguments(self, parser):
23 parser.add_argument(
24 '--restore',
25 action='store_true',
26 default=False,
27 dest='restore',
28 help='Restore the current revision number from the DB.',
29 )
30
31 def handle(self, **options):
32 if options['restore']:
33 from pootle_store.models import Unit
34 Revision.set(Unit.max_revision())
35
36 self.stdout.write('%s' % Revision.get())
37
```
Path: `pootle/apps/pootle_app/management/commands/__init__.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 #
4 # Copyright (C) Pootle contributors.
5 #
6 # This file is a part of the Pootle project. It is distributed under the GPL3
7 # or later license. See the LICENSE file for a copy of the license and the
8 # AUTHORS file for copyright and authorship information.
9
10 import datetime
11 import logging
12
13 from django.core.management.base import BaseCommand
14
15 from pootle.runner import set_sync_mode
16 from pootle_project.models import Project
17 from pootle_translationproject.models import TranslationProject
18
19
20 class PootleCommand(BaseCommand):
21 """Base class for handling recursive pootle store management commands."""
22
23 process_disabled_projects = False
24
25 def add_arguments(self, parser):
26 parser.add_argument(
27 '--project',
28 action='append',
29 dest='projects',
30 help='Project to refresh',
31 )
32 parser.add_argument(
33 '--language',
34 action='append',
35 dest='languages',
36 help='Language to refresh',
37 )
38 parser.add_argument(
39 "--noinput",
40 action="store_true",
41 default=False,
42 help=u"Never prompt for input",
43 )
44 parser.add_argument(
45 "--no-rq",
46 action="store_true",
47 default=False,
48 help=(u"Run all jobs in a single process, without "
49 "using rq workers"),
50 )
51
52 def __init__(self, *args, **kwargs):
53 self.languages = []
54 self.projects = []
55 super(PootleCommand, self).__init__(*args, **kwargs)
56
57 def do_translation_project(self, tp, **options):
58 process_stores = True
59
60 if hasattr(self, "handle_translation_project"):
61 logging.info(u"Running %s over %s", self.name, tp)
62 try:
63 process_stores = self.handle_translation_project(tp, **options)
64 except Exception:
65 logging.exception(u"Failed to run %s over %s", self.name, tp)
66 return
67
68 if not process_stores:
69 return
70
71 if hasattr(self, "handle_all_stores"):
72 logging.info(u"Running %s over %s's files", self.name, tp)
73 try:
74 self.handle_all_stores(tp, **options)
75 except Exception:
76 logging.exception(u"Failed to run %s over %s's files",
77 self.name, tp)
78 return
79 elif hasattr(self, "handle_store"):
80 store_query = tp.stores.live()
81 for store in store_query.iterator():
82 logging.info(u"Running %s over %s",
83 self.name, store.pootle_path)
84 try:
85 self.handle_store(store, **options)
86 except Exception:
87 logging.exception(u"Failed to run %s over %s",
88 self.name, store.pootle_path)
89
90 def handle(self, **options):
91 # adjust debug level to the verbosity option
92 debug_levels = {
93 0: logging.ERROR,
94 1: logging.WARNING,
95 2: logging.INFO,
96 3: logging.DEBUG
97 }
98 logging.getLogger().setLevel(
99 debug_levels.get(options['verbosity'], logging.DEBUG)
100 )
101
102 # reduce size of parse pool early on
103 self.name = self.__class__.__module__.split('.')[-1]
104 from pootle_store.fields import TranslationStoreFieldFile
105 TranslationStoreFieldFile._store_cache.maxsize = 2
106 TranslationStoreFieldFile._store_cache.cullsize = 2
107 TranslationProject._non_db_state_cache.maxsize = 2
108 TranslationProject._non_db_state_cache.cullsize = 2
109
110 self.projects = options.pop('projects', [])
111 self.languages = options.pop('languages', [])
112
113 # info start
114 start = datetime.datetime.now()
115 logging.info('Start running of %s', self.name)
116
117 self.handle_all(**options)
118
119 # info finish
120 end = datetime.datetime.now()
121 logging.info('All done for %s in %s', self.name, end - start)
122
123 def handle_all(self, **options):
124 if options["no_rq"]:
125 set_sync_mode(options['noinput'])
126
127 if self.process_disabled_projects:
128 project_query = Project.objects.all()
129 else:
130 project_query = Project.objects.enabled()
131
132 if self.projects:
133 project_query = project_query.filter(code__in=self.projects)
134
135 for project in project_query.iterator():
136 tp_query = project.translationproject_set.live() \
137 .order_by('language__code')
138
139 if self.languages:
140 tp_query = tp_query.filter(language__code__in=self.languages)
141
142 for tp in tp_query.iterator():
143 self.do_translation_project(tp, **options)
144
```
Path: `pootle/apps/pootle_app/management/commands/initdb.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 #
4 # Copyright (C) Pootle contributors.
5 #
6 # This file is a part of the Pootle project. It is distributed under the GPL3
7 # or later license. See the LICENSE file for a copy of the license and the
8 # AUTHORS file for copyright and authorship information.
9
10 import os
11
12 # This must be run before importing Django.
13 os.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'
14
15 from django.core.management.base import BaseCommand
16
17 from pootle.core.initdb import InitDB
18
19
20 class Command(BaseCommand):
21 help = 'Populates the database with initial values: users, projects, ...'
22
23 def add_arguments(self, parser):
24 parser.add_argument(
25 '--no-projects',
26 action='store_false',
27 dest='create_projects',
28 default=True,
29 help="Do not create the default 'terminology' and 'tutorial' "
30 "projects.",
31 )
32
33 def check(self, app_configs=None, tags=None, display_num_errors=False,
34 include_deployment_checks=False):
35 from django.core.checks.registry import registry
36
37 tags = registry.tags_available()
38 tags.remove('data')
39 super(Command, self).check(
40 app_configs=app_configs,
41 tags=tags,
42 display_num_errors=display_num_errors,
43 include_deployment_checks=include_deployment_checks)
44
45 def handle(self, **options):
46 self.stdout.write('Populating the database.')
47 InitDB().init_db(options["create_projects"])
48 self.stdout.write('Successfully populated the database.')
49 self.stdout.write("To create an admin user, use the `pootle "
50 "createsuperuser` command.")
51
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/pootle/apps/pootle_app/management/commands/__init__.py b/pootle/apps/pootle_app/management/commands/__init__.py
--- a/pootle/apps/pootle_app/management/commands/__init__.py
+++ b/pootle/apps/pootle_app/management/commands/__init__.py
@@ -17,6 +17,21 @@
from pootle_translationproject.models import TranslationProject
+class SkipChecksMixin(object):
+ def check(self, app_configs=None, tags=None, display_num_errors=False,
+ include_deployment_checks=False):
+ skip_tags = getattr(self, 'skip_system_check_tags', None)
+ if skip_tags is not None:
+ from django.core.checks.registry import registry
+ tags = registry.tags_available() - set(skip_tags)
+
+ super(SkipChecksMixin, self).check(
+ app_configs=app_configs,
+ tags=tags,
+ display_num_errors=display_num_errors,
+ include_deployment_checks=include_deployment_checks)
+
+
class PootleCommand(BaseCommand):
"""Base class for handling recursive pootle store management commands."""
diff --git a/pootle/apps/pootle_app/management/commands/initdb.py b/pootle/apps/pootle_app/management/commands/initdb.py
--- a/pootle/apps/pootle_app/management/commands/initdb.py
+++ b/pootle/apps/pootle_app/management/commands/initdb.py
@@ -15,10 +15,12 @@
from django.core.management.base import BaseCommand
from pootle.core.initdb import InitDB
+from . import SkipChecksMixin
-class Command(BaseCommand):
+class Command(SkipChecksMixin, BaseCommand):
help = 'Populates the database with initial values: users, projects, ...'
+ skip_system_check_tags = ('data', )
def add_arguments(self, parser):
parser.add_argument(
@@ -30,18 +32,6 @@
"projects.",
)
- def check(self, app_configs=None, tags=None, display_num_errors=False,
- include_deployment_checks=False):
- from django.core.checks.registry import registry
-
- tags = registry.tags_available()
- tags.remove('data')
- super(Command, self).check(
- app_configs=app_configs,
- tags=tags,
- display_num_errors=display_num_errors,
- include_deployment_checks=include_deployment_checks)
-
def handle(self, **options):
self.stdout.write('Populating the database.')
InitDB().init_db(options["create_projects"])
diff --git a/pootle/apps/pootle_app/management/commands/revision.py b/pootle/apps/pootle_app/management/commands/revision.py
--- a/pootle/apps/pootle_app/management/commands/revision.py
+++ b/pootle/apps/pootle_app/management/commands/revision.py
@@ -14,10 +14,12 @@
from django.core.management.base import BaseCommand
from pootle.core.models import Revision
+from . import SkipChecksMixin
-class Command(BaseCommand):
+class Command(SkipChecksMixin, BaseCommand):
help = "Print Pootle's current revision."
+ skip_system_check_tags = ('data', )
def add_arguments(self, parser):
parser.add_argument(
|
{"golden_diff": "diff --git a/pootle/apps/pootle_app/management/commands/__init__.py b/pootle/apps/pootle_app/management/commands/__init__.py\n--- a/pootle/apps/pootle_app/management/commands/__init__.py\n+++ b/pootle/apps/pootle_app/management/commands/__init__.py\n@@ -17,6 +17,21 @@\n from pootle_translationproject.models import TranslationProject\n \n \n+class SkipChecksMixin(object):\n+ def check(self, app_configs=None, tags=None, display_num_errors=False,\n+ include_deployment_checks=False):\n+ skip_tags = getattr(self, 'skip_system_check_tags', None)\n+ if skip_tags is not None:\n+ from django.core.checks.registry import registry\n+ tags = registry.tags_available() - set(skip_tags)\n+\n+ super(SkipChecksMixin, self).check(\n+ app_configs=app_configs,\n+ tags=tags,\n+ display_num_errors=display_num_errors,\n+ include_deployment_checks=include_deployment_checks)\n+\n+\n class PootleCommand(BaseCommand):\n \"\"\"Base class for handling recursive pootle store management commands.\"\"\"\n \ndiff --git a/pootle/apps/pootle_app/management/commands/initdb.py b/pootle/apps/pootle_app/management/commands/initdb.py\n--- a/pootle/apps/pootle_app/management/commands/initdb.py\n+++ b/pootle/apps/pootle_app/management/commands/initdb.py\n@@ -15,10 +15,12 @@\n from django.core.management.base import BaseCommand\n \n from pootle.core.initdb import InitDB\n+from . import SkipChecksMixin\n \n \n-class Command(BaseCommand):\n+class Command(SkipChecksMixin, BaseCommand):\n help = 'Populates the database with initial values: users, projects, ...'\n+ skip_system_check_tags = ('data', )\n \n def add_arguments(self, parser):\n parser.add_argument(\n@@ -30,18 +32,6 @@\n \"projects.\",\n )\n \n- def check(self, app_configs=None, tags=None, display_num_errors=False,\n- include_deployment_checks=False):\n- from django.core.checks.registry import registry\n-\n- tags = registry.tags_available()\n- tags.remove('data')\n- super(Command, self).check(\n- app_configs=app_configs,\n- tags=tags,\n- display_num_errors=display_num_errors,\n- include_deployment_checks=include_deployment_checks)\n-\n def handle(self, **options):\n self.stdout.write('Populating the database.')\n InitDB().init_db(options[\"create_projects\"])\ndiff --git a/pootle/apps/pootle_app/management/commands/revision.py b/pootle/apps/pootle_app/management/commands/revision.py\n--- a/pootle/apps/pootle_app/management/commands/revision.py\n+++ b/pootle/apps/pootle_app/management/commands/revision.py\n@@ -14,10 +14,12 @@\n from django.core.management.base import BaseCommand\n \n from pootle.core.models import Revision\n+from . import SkipChecksMixin\n \n \n-class Command(BaseCommand):\n+class Command(SkipChecksMixin, BaseCommand):\n help = \"Print Pootle's current revision.\"\n+ skip_system_check_tags = ('data', )\n \n def add_arguments(self, parser):\n parser.add_argument(\n", "issue": "Cannot run `revision --restore`\nWith the recent addition to check for the revision counter (#4545), this became a bit meta: running `revision --restore` fails because one has to run `revision --restore` before :grin: \n\n```\n$ ./manage.py revision --restore\nSystemCheckError: System check identified some issues:\n\nCRITICALS:\n?: (pootle.C016) Revision is missing or has an incorrect value.\n HINT: Run `revision --restore` to reset the revision counter.\n```\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\n\nimport os\nos.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'\n\nfrom django.core.management.base import BaseCommand\n\nfrom pootle.core.models import Revision\n\n\nclass Command(BaseCommand):\n help = \"Print Pootle's current revision.\"\n\n def add_arguments(self, parser):\n parser.add_argument(\n '--restore',\n action='store_true',\n default=False,\n dest='restore',\n help='Restore the current revision number from the DB.',\n )\n\n def handle(self, **options):\n if options['restore']:\n from pootle_store.models import Unit\n Revision.set(Unit.max_revision())\n\n self.stdout.write('%s' % Revision.get())\n", "path": "pootle/apps/pootle_app/management/commands/revision.py"}, {"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nimport datetime\nimport logging\n\nfrom django.core.management.base import BaseCommand\n\nfrom pootle.runner import set_sync_mode\nfrom pootle_project.models import Project\nfrom pootle_translationproject.models import TranslationProject\n\n\nclass PootleCommand(BaseCommand):\n \"\"\"Base class for handling recursive pootle store management commands.\"\"\"\n\n process_disabled_projects = False\n\n def add_arguments(self, parser):\n parser.add_argument(\n '--project',\n action='append',\n dest='projects',\n help='Project to refresh',\n )\n parser.add_argument(\n '--language',\n action='append',\n dest='languages',\n help='Language to refresh',\n )\n parser.add_argument(\n \"--noinput\",\n action=\"store_true\",\n default=False,\n help=u\"Never prompt for input\",\n )\n parser.add_argument(\n \"--no-rq\",\n action=\"store_true\",\n default=False,\n help=(u\"Run all jobs in a single process, without \"\n \"using rq workers\"),\n )\n\n def __init__(self, *args, **kwargs):\n self.languages = []\n self.projects = []\n super(PootleCommand, self).__init__(*args, **kwargs)\n\n def do_translation_project(self, tp, **options):\n process_stores = True\n\n if hasattr(self, \"handle_translation_project\"):\n logging.info(u\"Running %s over %s\", self.name, tp)\n try:\n process_stores = self.handle_translation_project(tp, **options)\n except Exception:\n logging.exception(u\"Failed to run %s over %s\", self.name, tp)\n return\n\n if not process_stores:\n return\n\n if hasattr(self, \"handle_all_stores\"):\n logging.info(u\"Running %s over %s's files\", self.name, tp)\n try:\n self.handle_all_stores(tp, **options)\n except Exception:\n logging.exception(u\"Failed to run %s over %s's files\",\n self.name, tp)\n return\n elif hasattr(self, \"handle_store\"):\n store_query = tp.stores.live()\n for store in store_query.iterator():\n logging.info(u\"Running %s over %s\",\n self.name, store.pootle_path)\n try:\n self.handle_store(store, **options)\n except Exception:\n logging.exception(u\"Failed to run %s over %s\",\n self.name, store.pootle_path)\n\n def handle(self, **options):\n # adjust debug level to the verbosity option\n debug_levels = {\n 0: logging.ERROR,\n 1: logging.WARNING,\n 2: logging.INFO,\n 3: logging.DEBUG\n }\n logging.getLogger().setLevel(\n debug_levels.get(options['verbosity'], logging.DEBUG)\n )\n\n # reduce size of parse pool early on\n self.name = self.__class__.__module__.split('.')[-1]\n from pootle_store.fields import TranslationStoreFieldFile\n TranslationStoreFieldFile._store_cache.maxsize = 2\n TranslationStoreFieldFile._store_cache.cullsize = 2\n TranslationProject._non_db_state_cache.maxsize = 2\n TranslationProject._non_db_state_cache.cullsize = 2\n\n self.projects = options.pop('projects', [])\n self.languages = options.pop('languages', [])\n\n # info start\n start = datetime.datetime.now()\n logging.info('Start running of %s', self.name)\n\n self.handle_all(**options)\n\n # info finish\n end = datetime.datetime.now()\n logging.info('All done for %s in %s', self.name, end - start)\n\n def handle_all(self, **options):\n if options[\"no_rq\"]:\n set_sync_mode(options['noinput'])\n\n if self.process_disabled_projects:\n project_query = Project.objects.all()\n else:\n project_query = Project.objects.enabled()\n\n if self.projects:\n project_query = project_query.filter(code__in=self.projects)\n\n for project in project_query.iterator():\n tp_query = project.translationproject_set.live() \\\n .order_by('language__code')\n\n if self.languages:\n tp_query = tp_query.filter(language__code__in=self.languages)\n\n for tp in tp_query.iterator():\n self.do_translation_project(tp, **options)\n", "path": "pootle/apps/pootle_app/management/commands/__init__.py"}, {"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nimport os\n\n# This must be run before importing Django.\nos.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'\n\nfrom django.core.management.base import BaseCommand\n\nfrom pootle.core.initdb import InitDB\n\n\nclass Command(BaseCommand):\n help = 'Populates the database with initial values: users, projects, ...'\n\n def add_arguments(self, parser):\n parser.add_argument(\n '--no-projects',\n action='store_false',\n dest='create_projects',\n default=True,\n help=\"Do not create the default 'terminology' and 'tutorial' \"\n \"projects.\",\n )\n\n def check(self, app_configs=None, tags=None, display_num_errors=False,\n include_deployment_checks=False):\n from django.core.checks.registry import registry\n\n tags = registry.tags_available()\n tags.remove('data')\n super(Command, self).check(\n app_configs=app_configs,\n tags=tags,\n display_num_errors=display_num_errors,\n include_deployment_checks=include_deployment_checks)\n\n def handle(self, **options):\n self.stdout.write('Populating the database.')\n InitDB().init_db(options[\"create_projects\"])\n self.stdout.write('Successfully populated the database.')\n self.stdout.write(\"To create an admin user, use the `pootle \"\n \"createsuperuser` command.\")\n", "path": "pootle/apps/pootle_app/management/commands/initdb.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\n\nimport os\nos.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'\n\nfrom django.core.management.base import BaseCommand\n\nfrom pootle.core.models import Revision\nfrom . import SkipChecksMixin\n\n\nclass Command(SkipChecksMixin, BaseCommand):\n help = \"Print Pootle's current revision.\"\n skip_system_check_tags = ('data', )\n\n def add_arguments(self, parser):\n parser.add_argument(\n '--restore',\n action='store_true',\n default=False,\n dest='restore',\n help='Restore the current revision number from the DB.',\n )\n\n def handle(self, **options):\n if options['restore']:\n from pootle_store.models import Unit\n Revision.set(Unit.max_revision())\n\n self.stdout.write('%s' % Revision.get())\n", "path": "pootle/apps/pootle_app/management/commands/revision.py"}, {"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nimport datetime\nimport logging\n\nfrom django.core.management.base import BaseCommand\n\nfrom pootle.runner import set_sync_mode\nfrom pootle_project.models import Project\nfrom pootle_translationproject.models import TranslationProject\n\n\nclass SkipChecksMixin(object):\n def check(self, app_configs=None, tags=None, display_num_errors=False,\n include_deployment_checks=False):\n skip_tags = getattr(self, 'skip_system_check_tags', None)\n if skip_tags is not None:\n from django.core.checks.registry import registry\n tags = registry.tags_available() - set(skip_tags)\n\n super(SkipChecksMixin, self).check(\n app_configs=app_configs,\n tags=tags,\n display_num_errors=display_num_errors,\n include_deployment_checks=include_deployment_checks)\n\n\nclass PootleCommand(BaseCommand):\n \"\"\"Base class for handling recursive pootle store management commands.\"\"\"\n\n process_disabled_projects = False\n\n def add_arguments(self, parser):\n parser.add_argument(\n '--project',\n action='append',\n dest='projects',\n help='Project to refresh',\n )\n parser.add_argument(\n '--language',\n action='append',\n dest='languages',\n help='Language to refresh',\n )\n parser.add_argument(\n \"--noinput\",\n action=\"store_true\",\n default=False,\n help=u\"Never prompt for input\",\n )\n parser.add_argument(\n \"--no-rq\",\n action=\"store_true\",\n default=False,\n help=(u\"Run all jobs in a single process, without \"\n \"using rq workers\"),\n )\n\n def __init__(self, *args, **kwargs):\n self.languages = []\n self.projects = []\n super(PootleCommand, self).__init__(*args, **kwargs)\n\n def do_translation_project(self, tp, **options):\n process_stores = True\n\n if hasattr(self, \"handle_translation_project\"):\n logging.info(u\"Running %s over %s\", self.name, tp)\n try:\n process_stores = self.handle_translation_project(tp, **options)\n except Exception:\n logging.exception(u\"Failed to run %s over %s\", self.name, tp)\n return\n\n if not process_stores:\n return\n\n if hasattr(self, \"handle_all_stores\"):\n logging.info(u\"Running %s over %s's files\", self.name, tp)\n try:\n self.handle_all_stores(tp, **options)\n except Exception:\n logging.exception(u\"Failed to run %s over %s's files\",\n self.name, tp)\n return\n elif hasattr(self, \"handle_store\"):\n store_query = tp.stores.live()\n for store in store_query.iterator():\n logging.info(u\"Running %s over %s\",\n self.name, store.pootle_path)\n try:\n self.handle_store(store, **options)\n except Exception:\n logging.exception(u\"Failed to run %s over %s\",\n self.name, store.pootle_path)\n\n def handle(self, **options):\n # adjust debug level to the verbosity option\n debug_levels = {\n 0: logging.ERROR,\n 1: logging.WARNING,\n 2: logging.INFO,\n 3: logging.DEBUG\n }\n logging.getLogger().setLevel(\n debug_levels.get(options['verbosity'], logging.DEBUG)\n )\n\n # reduce size of parse pool early on\n self.name = self.__class__.__module__.split('.')[-1]\n from pootle_store.fields import TranslationStoreFieldFile\n TranslationStoreFieldFile._store_cache.maxsize = 2\n TranslationStoreFieldFile._store_cache.cullsize = 2\n TranslationProject._non_db_state_cache.maxsize = 2\n TranslationProject._non_db_state_cache.cullsize = 2\n\n self.projects = options.pop('projects', [])\n self.languages = options.pop('languages', [])\n\n # info start\n start = datetime.datetime.now()\n logging.info('Start running of %s', self.name)\n\n self.handle_all(**options)\n\n # info finish\n end = datetime.datetime.now()\n logging.info('All done for %s in %s', self.name, end - start)\n\n def handle_all(self, **options):\n if options[\"no_rq\"]:\n set_sync_mode(options['noinput'])\n\n if self.process_disabled_projects:\n project_query = Project.objects.all()\n else:\n project_query = Project.objects.enabled()\n\n if self.projects:\n project_query = project_query.filter(code__in=self.projects)\n\n for project in project_query.iterator():\n tp_query = project.translationproject_set.live() \\\n .order_by('language__code')\n\n if self.languages:\n tp_query = tp_query.filter(language__code__in=self.languages)\n\n for tp in tp_query.iterator():\n self.do_translation_project(tp, **options)\n", "path": "pootle/apps/pootle_app/management/commands/__init__.py"}, {"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nimport os\n\n# This must be run before importing Django.\nos.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'\n\nfrom django.core.management.base import BaseCommand\n\nfrom pootle.core.initdb import InitDB\nfrom . import SkipChecksMixin\n\n\nclass Command(SkipChecksMixin, BaseCommand):\n help = 'Populates the database with initial values: users, projects, ...'\n skip_system_check_tags = ('data', )\n\n def add_arguments(self, parser):\n parser.add_argument(\n '--no-projects',\n action='store_false',\n dest='create_projects',\n default=True,\n help=\"Do not create the default 'terminology' and 'tutorial' \"\n \"projects.\",\n )\n\n def handle(self, **options):\n self.stdout.write('Populating the database.')\n InitDB().init_db(options[\"create_projects\"])\n self.stdout.write('Successfully populated the database.')\n self.stdout.write(\"To create an admin user, use the `pootle \"\n \"createsuperuser` command.\")\n", "path": "pootle/apps/pootle_app/management/commands/initdb.py"}]}
| 2,509 | 745 |
gh_patches_debug_27814
|
rasdani/github-patches
|
git_diff
|
scikit-image__scikit-image-804
|
We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Viewer: ColorHistogram plugin does not update with new image
If a new image is opened when using the ColorHistogram viewer plugin, the color histogram plugin is not updated and if a region of the histogram is selected, the displayed image in the main viewer reverts back to the original image.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `skimage/viewer/plugins/color_histogram.py`
Content:
```
1 import numpy as np
2 import matplotlib.pyplot as plt
3
4 from skimage import color
5 from skimage import exposure
6 from .plotplugin import PlotPlugin
7 from ..canvastools import RectangleTool
8
9
10 class ColorHistogram(PlotPlugin):
11 name = 'Color Histogram'
12
13 def __init__(self, max_pct=0.99, **kwargs):
14 super(ColorHistogram, self).__init__(height=400, **kwargs)
15 self.max_pct = max_pct
16
17 print(self.help())
18
19 def attach(self, image_viewer):
20 super(ColorHistogram, self).attach(image_viewer)
21
22 self.rect_tool = RectangleTool(self.ax, on_release=self.ab_selected)
23 self.lab_image = color.rgb2lab(image_viewer.image)
24
25 # Calculate color histogram in the Lab colorspace:
26 L, a, b = self.lab_image.T
27 left, right = -100, 100
28 ab_extents = [left, right, right, left]
29 bins = np.arange(left, right)
30 hist, x_edges, y_edges = np.histogram2d(a.flatten(), b.flatten(), bins,
31 normed=True)
32
33 # Clip bin heights that dominate a-b histogram
34 max_val = pct_total_area(hist, percentile=self.max_pct)
35 hist = exposure.rescale_intensity(hist, in_range=(0, max_val))
36 self.ax.imshow(hist, extent=ab_extents, cmap=plt.cm.gray)
37
38 self.ax.set_title('Color Histogram')
39 self.ax.set_xlabel('b')
40 self.ax.set_ylabel('a')
41
42 def help(self):
43 helpstr = ("Color Histogram tool:",
44 "Select region of a-b colorspace to highlight on image.")
45 return '\n'.join(helpstr)
46
47 def ab_selected(self, extents):
48 x0, x1, y0, y1 = extents
49
50 lab_masked = self.lab_image.copy()
51 L, a, b = lab_masked.T
52
53 mask = ((a > y0) & (a < y1)) & ((b > x0) & (b < x1))
54 lab_masked[..., 1:][~mask.T] = 0
55
56 self.image_viewer.image = color.lab2rgb(lab_masked)
57
58
59 def pct_total_area(image, percentile=0.80):
60 """Return threshold value based on percentage of total area.
61
62 The specified percent of pixels less than the given intensity threshold.
63 """
64 idx = int((image.size - 1) * percentile)
65 sorted_pixels = np.sort(image.flat)
66 return sorted_pixels[idx]
67
68
69
70
```
Path: `skimage/viewer/plugins/base.py`
Content:
```
1 """
2 Base class for Plugins that interact with ImageViewer.
3 """
4 from warnings import warn
5
6 import numpy as np
7
8 from ..qt import QtGui
9 from ..qt.QtCore import Qt, Signal
10 from ..utils import RequiredAttr, init_qtapp
11
12
13 class Plugin(QtGui.QDialog):
14 """Base class for plugins that interact with an ImageViewer.
15
16 A plugin connects an image filter (or another function) to an image viewer.
17 Note that a Plugin is initialized *without* an image viewer and attached in
18 a later step. See example below for details.
19
20 Parameters
21 ----------
22 image_viewer : ImageViewer
23 Window containing image used in measurement/manipulation.
24 image_filter : function
25 Function that gets called to update image in image viewer. This value
26 can be `None` if, for example, you have a plugin that extracts
27 information from an image and doesn't manipulate it. Alternatively,
28 this function can be defined as a method in a Plugin subclass.
29 height, width : int
30 Size of plugin window in pixels. Note that Qt will automatically resize
31 a window to fit components. So if you're adding rows of components, you
32 can leave `height = 0` and just let Qt determine the final height.
33 useblit : bool
34 If True, use blitting to speed up animation. Only available on some
35 Matplotlib backends. If None, set to True when using Agg backend.
36 This only has an effect if you draw on top of an image viewer.
37
38 Attributes
39 ----------
40 image_viewer : ImageViewer
41 Window containing image used in measurement.
42 name : str
43 Name of plugin. This is displayed as the window title.
44 artist : list
45 List of Matplotlib artists and canvastools. Any artists created by the
46 plugin should be added to this list so that it gets cleaned up on
47 close.
48
49 Examples
50 --------
51 >>> from skimage.viewer import ImageViewer
52 >>> from skimage.viewer.widgets import Slider
53 >>> from skimage import data
54 >>>
55 >>> plugin = Plugin(image_filter=lambda img, threshold: img > threshold)
56 >>> plugin += Slider('threshold', 0, 255)
57 >>>
58 >>> image = data.coins()
59 >>> viewer = ImageViewer(image)
60 >>> viewer += plugin
61 >>> # viewer.show()
62
63 The plugin will automatically delegate parameters to `image_filter` based
64 on its parameter type, i.e., `ptype` (widgets for required arguments must
65 be added in the order they appear in the function). The image attached
66 to the viewer is **automatically passed as the first argument** to the
67 filter function.
68
69 #TODO: Add flag so image is not passed to filter function by default.
70
71 `ptype = 'kwarg'` is the default for most widgets so it's unnecessary here.
72
73 """
74 name = 'Plugin'
75 image_viewer = RequiredAttr("%s is not attached to ImageViewer" % name)
76
77 # Signals used when viewers are linked to the Plugin output.
78 image_changed = Signal(np.ndarray)
79 _started = Signal(int)
80
81 def __init__(self, image_filter=None, height=0, width=400, useblit=True,
82 dock='bottom'):
83 init_qtapp()
84 super(Plugin, self).__init__()
85
86 self.dock = dock
87
88 self.image_viewer = None
89 # If subclass defines `image_filter` method ignore input.
90 if not hasattr(self, 'image_filter'):
91 self.image_filter = image_filter
92 elif image_filter is not None:
93 warn("If the Plugin class defines an `image_filter` method, "
94 "then the `image_filter` argument is ignored.")
95
96 self.setWindowTitle(self.name)
97 self.layout = QtGui.QGridLayout(self)
98 self.resize(width, height)
99 self.row = 0
100
101 self.arguments = []
102 self.keyword_arguments= {}
103
104 self.useblit = useblit
105 self.cids = []
106 self.artists = []
107
108 def attach(self, image_viewer):
109 """Attach the plugin to an ImageViewer.
110
111 Note that the ImageViewer will automatically call this method when the
112 plugin is added to the ImageViewer. For example::
113
114 viewer += Plugin(...)
115
116 Also note that `attach` automatically calls the filter function so that
117 the image matches the filtered value specified by attached widgets.
118 """
119 self.setParent(image_viewer)
120 self.setWindowFlags(Qt.Dialog)
121
122 self.image_viewer = image_viewer
123 self.image_viewer.plugins.append(self)
124 #TODO: Always passing image as first argument may be bad assumption.
125 self.arguments = [self.image_viewer.original_image]
126
127 # Call filter so that filtered image matches widget values
128 self.filter_image()
129
130 def add_widget(self, widget):
131 """Add widget to plugin.
132
133 Alternatively, Plugin's `__add__` method is overloaded to add widgets::
134
135 plugin += Widget(...)
136
137 Widgets can adjust required or optional arguments of filter function or
138 parameters for the plugin. This is specified by the Widget's `ptype'.
139 """
140 if widget.ptype == 'kwarg':
141 name = widget.name.replace(' ', '_')
142 self.keyword_arguments[name] = widget
143 widget.callback = self.filter_image
144 elif widget.ptype == 'arg':
145 self.arguments.append(widget)
146 widget.callback = self.filter_image
147 elif widget.ptype == 'plugin':
148 widget.callback = self.update_plugin
149 widget.plugin = self
150 self.layout.addWidget(widget, self.row, 0)
151 self.row += 1
152
153 def __add__(self, widget):
154 self.add_widget(widget)
155 return self
156
157 def filter_image(self, *widget_arg):
158 """Call `image_filter` with widget args and kwargs
159
160 Note: `display_filtered_image` is automatically called.
161 """
162 # `widget_arg` is passed by the active widget but is unused since all
163 # filter arguments are pulled directly from attached the widgets.
164
165 if self.image_filter is None:
166 return
167 arguments = [self._get_value(a) for a in self.arguments]
168 kwargs = dict([(name, self._get_value(a))
169 for name, a in self.keyword_arguments.items()])
170 filtered = self.image_filter(*arguments, **kwargs)
171
172 self.display_filtered_image(filtered)
173 self.image_changed.emit(filtered)
174
175 def _get_value(self, param):
176 # If param is a widget, return its `val` attribute.
177 return param if not hasattr(param, 'val') else param.val
178
179 def _update_original_image(self, image):
180 """Update the original image argument passed to the filter function.
181
182 This method is called by the viewer when the original image is updated.
183 """
184 self.arguments[0] = image
185 self.filter_image()
186
187 @property
188 def filtered_image(self):
189 """Return filtered image."""
190 return self.image_viewer.image
191
192 def display_filtered_image(self, image):
193 """Display the filtered image on image viewer.
194
195 If you don't want to simply replace the displayed image with the
196 filtered image (e.g., you want to display a transparent overlay),
197 you can override this method.
198 """
199 self.image_viewer.image = image
200
201 def update_plugin(self, name, value):
202 """Update keyword parameters of the plugin itself.
203
204 These parameters will typically be implemented as class properties so
205 that they update the image or some other component.
206 """
207 setattr(self, name, value)
208
209 def show(self, main_window=True):
210 """Show plugin."""
211 super(Plugin, self).show()
212 self.activateWindow()
213 self.raise_()
214
215 # Emit signal with x-hint so new windows can be displayed w/o overlap.
216 size = self.frameGeometry()
217 x_hint = size.x() + size.width()
218 self._started.emit(x_hint)
219
220 def closeEvent(self, event):
221 """On close disconnect all artists and events from ImageViewer.
222
223 Note that artists must be appended to `self.artists`.
224 """
225 self.clean_up()
226 self.close()
227
228 def clean_up(self):
229 self.remove_image_artists()
230 self.image_viewer.plugins.remove(self)
231 self.image_viewer.reset_image()
232 self.image_viewer.redraw()
233
234 def remove_image_artists(self):
235 """Remove artists that are connected to the image viewer."""
236 for a in self.artists:
237 a.remove()
238
```
Path: `skimage/viewer/plugins/plotplugin.py`
Content:
```
1 import numpy as np
2 from ..qt import QtGui
3
4 from ..utils import new_plot
5 from .base import Plugin
6
7
8 __all__ = ['PlotPlugin']
9
10
11 class PlotPlugin(Plugin):
12 """Plugin for ImageViewer that contains a plot canvas.
13
14 Base class for plugins that contain a Matplotlib plot canvas, which can,
15 for example, display an image histogram.
16
17 See base Plugin class for additional details.
18 """
19
20 def __init__(self, image_filter=None, height=150, width=400, **kwargs):
21 super(PlotPlugin, self).__init__(image_filter=image_filter,
22 height=height, width=width, **kwargs)
23
24 self._height = height
25 self._width = width
26
27 def attach(self, image_viewer):
28 super(PlotPlugin, self).attach(image_viewer)
29 # Add plot for displaying intensity profile.
30 self.add_plot()
31
32 def redraw(self):
33 """Redraw plot."""
34 self.canvas.draw_idle()
35
36 def add_plot(self):
37 self.fig, self.ax = new_plot()
38 self.fig.set_figwidth(self._width / float(self.fig.dpi))
39 self.fig.set_figheight(self._height / float(self.fig.dpi))
40
41 self.canvas = self.fig.canvas
42 #TODO: Converted color is slightly different than Qt background.
43 qpalette = QtGui.QPalette()
44 qcolor = qpalette.color(QtGui.QPalette.Window)
45 bgcolor = qcolor.toRgb().value()
46 if np.isscalar(bgcolor):
47 bgcolor = str(bgcolor / 255.)
48 self.fig.patch.set_facecolor(bgcolor)
49 self.layout.addWidget(self.canvas, self.row, 0)
50
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
|
diff --git a/skimage/viewer/plugins/base.py b/skimage/viewer/plugins/base.py
--- a/skimage/viewer/plugins/base.py
+++ b/skimage/viewer/plugins/base.py
@@ -182,8 +182,13 @@
This method is called by the viewer when the original image is updated.
"""
self.arguments[0] = image
+ self._on_new_image(image)
self.filter_image()
+ def _on_new_image(self, image):
+ """Override this method to update your plugin for new images."""
+ pass
+
@property
def filtered_image(self):
"""Return filtered image."""
diff --git a/skimage/viewer/plugins/color_histogram.py b/skimage/viewer/plugins/color_histogram.py
--- a/skimage/viewer/plugins/color_histogram.py
+++ b/skimage/viewer/plugins/color_histogram.py
@@ -20,7 +20,10 @@
super(ColorHistogram, self).attach(image_viewer)
self.rect_tool = RectangleTool(self.ax, on_release=self.ab_selected)
- self.lab_image = color.rgb2lab(image_viewer.image)
+ self._on_new_image(image_viewer.image)
+
+ def _on_new_image(self, image):
+ self.lab_image = color.rgb2lab(image)
# Calculate color histogram in the Lab colorspace:
L, a, b = self.lab_image.T
diff --git a/skimage/viewer/plugins/plotplugin.py b/skimage/viewer/plugins/plotplugin.py
--- a/skimage/viewer/plugins/plotplugin.py
+++ b/skimage/viewer/plugins/plotplugin.py
@@ -47,3 +47,7 @@
bgcolor = str(bgcolor / 255.)
self.fig.patch.set_facecolor(bgcolor)
self.layout.addWidget(self.canvas, self.row, 0)
+
+ def _update_original_image(self, image):
+ super(PlotPlugin, self)._update_original_image(image)
+ self.redraw()
|
{"golden_diff": "diff --git a/skimage/viewer/plugins/base.py b/skimage/viewer/plugins/base.py\n--- a/skimage/viewer/plugins/base.py\n+++ b/skimage/viewer/plugins/base.py\n@@ -182,8 +182,13 @@\n This method is called by the viewer when the original image is updated.\n \"\"\"\n self.arguments[0] = image\n+ self._on_new_image(image)\n self.filter_image()\n \n+ def _on_new_image(self, image):\n+ \"\"\"Override this method to update your plugin for new images.\"\"\"\n+ pass\n+\n @property\n def filtered_image(self):\n \"\"\"Return filtered image.\"\"\"\ndiff --git a/skimage/viewer/plugins/color_histogram.py b/skimage/viewer/plugins/color_histogram.py\n--- a/skimage/viewer/plugins/color_histogram.py\n+++ b/skimage/viewer/plugins/color_histogram.py\n@@ -20,7 +20,10 @@\n super(ColorHistogram, self).attach(image_viewer)\n \n self.rect_tool = RectangleTool(self.ax, on_release=self.ab_selected)\n- self.lab_image = color.rgb2lab(image_viewer.image)\n+ self._on_new_image(image_viewer.image)\n+\n+ def _on_new_image(self, image):\n+ self.lab_image = color.rgb2lab(image)\n \n # Calculate color histogram in the Lab colorspace:\n L, a, b = self.lab_image.T\ndiff --git a/skimage/viewer/plugins/plotplugin.py b/skimage/viewer/plugins/plotplugin.py\n--- a/skimage/viewer/plugins/plotplugin.py\n+++ b/skimage/viewer/plugins/plotplugin.py\n@@ -47,3 +47,7 @@\n bgcolor = str(bgcolor / 255.)\n self.fig.patch.set_facecolor(bgcolor)\n self.layout.addWidget(self.canvas, self.row, 0)\n+\n+ def _update_original_image(self, image):\n+ super(PlotPlugin, self)._update_original_image(image)\n+ self.redraw()\n", "issue": "Viewer: ColorHistogram plugin does not update with new image\nIf a new image is opened when using the ColorHistogram viewer plugin, the color histogram plugin is not updated and if a region of the histogram is selected, the displayed image in the main viewer reverts back to the original image.\n\n", "before_files": [{"content": "import numpy as np\nimport matplotlib.pyplot as plt\n\nfrom skimage import color\nfrom skimage import exposure\nfrom .plotplugin import PlotPlugin\nfrom ..canvastools import RectangleTool\n\n\nclass ColorHistogram(PlotPlugin):\n name = 'Color Histogram'\n\n def __init__(self, max_pct=0.99, **kwargs):\n super(ColorHistogram, self).__init__(height=400, **kwargs)\n self.max_pct = max_pct\n\n print(self.help())\n\n def attach(self, image_viewer):\n super(ColorHistogram, self).attach(image_viewer)\n\n self.rect_tool = RectangleTool(self.ax, on_release=self.ab_selected)\n self.lab_image = color.rgb2lab(image_viewer.image)\n\n # Calculate color histogram in the Lab colorspace:\n L, a, b = self.lab_image.T\n left, right = -100, 100\n ab_extents = [left, right, right, left]\n bins = np.arange(left, right)\n hist, x_edges, y_edges = np.histogram2d(a.flatten(), b.flatten(), bins,\n normed=True)\n\n # Clip bin heights that dominate a-b histogram\n max_val = pct_total_area(hist, percentile=self.max_pct)\n hist = exposure.rescale_intensity(hist, in_range=(0, max_val))\n self.ax.imshow(hist, extent=ab_extents, cmap=plt.cm.gray)\n\n self.ax.set_title('Color Histogram')\n self.ax.set_xlabel('b')\n self.ax.set_ylabel('a')\n\n def help(self):\n helpstr = (\"Color Histogram tool:\",\n \"Select region of a-b colorspace to highlight on image.\")\n return '\\n'.join(helpstr)\n\n def ab_selected(self, extents):\n x0, x1, y0, y1 = extents\n\n lab_masked = self.lab_image.copy()\n L, a, b = lab_masked.T\n\n mask = ((a > y0) & (a < y1)) & ((b > x0) & (b < x1))\n lab_masked[..., 1:][~mask.T] = 0\n\n self.image_viewer.image = color.lab2rgb(lab_masked)\n\n\ndef pct_total_area(image, percentile=0.80):\n \"\"\"Return threshold value based on percentage of total area.\n\n The specified percent of pixels less than the given intensity threshold.\n \"\"\"\n idx = int((image.size - 1) * percentile)\n sorted_pixels = np.sort(image.flat)\n return sorted_pixels[idx]\n\n\n\n", "path": "skimage/viewer/plugins/color_histogram.py"}, {"content": "\"\"\"\nBase class for Plugins that interact with ImageViewer.\n\"\"\"\nfrom warnings import warn\n\nimport numpy as np\n\nfrom ..qt import QtGui\nfrom ..qt.QtCore import Qt, Signal\nfrom ..utils import RequiredAttr, init_qtapp\n\n\nclass Plugin(QtGui.QDialog):\n \"\"\"Base class for plugins that interact with an ImageViewer.\n\n A plugin connects an image filter (or another function) to an image viewer.\n Note that a Plugin is initialized *without* an image viewer and attached in\n a later step. See example below for details.\n\n Parameters\n ----------\n image_viewer : ImageViewer\n Window containing image used in measurement/manipulation.\n image_filter : function\n Function that gets called to update image in image viewer. This value\n can be `None` if, for example, you have a plugin that extracts\n information from an image and doesn't manipulate it. Alternatively,\n this function can be defined as a method in a Plugin subclass.\n height, width : int\n Size of plugin window in pixels. Note that Qt will automatically resize\n a window to fit components. So if you're adding rows of components, you\n can leave `height = 0` and just let Qt determine the final height.\n useblit : bool\n If True, use blitting to speed up animation. Only available on some\n Matplotlib backends. If None, set to True when using Agg backend.\n This only has an effect if you draw on top of an image viewer.\n\n Attributes\n ----------\n image_viewer : ImageViewer\n Window containing image used in measurement.\n name : str\n Name of plugin. This is displayed as the window title.\n artist : list\n List of Matplotlib artists and canvastools. Any artists created by the\n plugin should be added to this list so that it gets cleaned up on\n close.\n\n Examples\n --------\n >>> from skimage.viewer import ImageViewer\n >>> from skimage.viewer.widgets import Slider\n >>> from skimage import data\n >>>\n >>> plugin = Plugin(image_filter=lambda img, threshold: img > threshold)\n >>> plugin += Slider('threshold', 0, 255)\n >>>\n >>> image = data.coins()\n >>> viewer = ImageViewer(image)\n >>> viewer += plugin\n >>> # viewer.show()\n\n The plugin will automatically delegate parameters to `image_filter` based\n on its parameter type, i.e., `ptype` (widgets for required arguments must\n be added in the order they appear in the function). The image attached\n to the viewer is **automatically passed as the first argument** to the\n filter function.\n\n #TODO: Add flag so image is not passed to filter function by default.\n\n `ptype = 'kwarg'` is the default for most widgets so it's unnecessary here.\n\n \"\"\"\n name = 'Plugin'\n image_viewer = RequiredAttr(\"%s is not attached to ImageViewer\" % name)\n\n # Signals used when viewers are linked to the Plugin output.\n image_changed = Signal(np.ndarray)\n _started = Signal(int)\n\n def __init__(self, image_filter=None, height=0, width=400, useblit=True,\n dock='bottom'):\n init_qtapp()\n super(Plugin, self).__init__()\n\n self.dock = dock\n\n self.image_viewer = None\n # If subclass defines `image_filter` method ignore input.\n if not hasattr(self, 'image_filter'):\n self.image_filter = image_filter\n elif image_filter is not None:\n warn(\"If the Plugin class defines an `image_filter` method, \"\n \"then the `image_filter` argument is ignored.\")\n\n self.setWindowTitle(self.name)\n self.layout = QtGui.QGridLayout(self)\n self.resize(width, height)\n self.row = 0\n\n self.arguments = []\n self.keyword_arguments= {}\n\n self.useblit = useblit\n self.cids = []\n self.artists = []\n\n def attach(self, image_viewer):\n \"\"\"Attach the plugin to an ImageViewer.\n\n Note that the ImageViewer will automatically call this method when the\n plugin is added to the ImageViewer. For example::\n\n viewer += Plugin(...)\n\n Also note that `attach` automatically calls the filter function so that\n the image matches the filtered value specified by attached widgets.\n \"\"\"\n self.setParent(image_viewer)\n self.setWindowFlags(Qt.Dialog)\n\n self.image_viewer = image_viewer\n self.image_viewer.plugins.append(self)\n #TODO: Always passing image as first argument may be bad assumption.\n self.arguments = [self.image_viewer.original_image]\n\n # Call filter so that filtered image matches widget values\n self.filter_image()\n\n def add_widget(self, widget):\n \"\"\"Add widget to plugin.\n\n Alternatively, Plugin's `__add__` method is overloaded to add widgets::\n\n plugin += Widget(...)\n\n Widgets can adjust required or optional arguments of filter function or\n parameters for the plugin. This is specified by the Widget's `ptype'.\n \"\"\"\n if widget.ptype == 'kwarg':\n name = widget.name.replace(' ', '_')\n self.keyword_arguments[name] = widget\n widget.callback = self.filter_image\n elif widget.ptype == 'arg':\n self.arguments.append(widget)\n widget.callback = self.filter_image\n elif widget.ptype == 'plugin':\n widget.callback = self.update_plugin\n widget.plugin = self\n self.layout.addWidget(widget, self.row, 0)\n self.row += 1\n\n def __add__(self, widget):\n self.add_widget(widget)\n return self\n\n def filter_image(self, *widget_arg):\n \"\"\"Call `image_filter` with widget args and kwargs\n\n Note: `display_filtered_image` is automatically called.\n \"\"\"\n # `widget_arg` is passed by the active widget but is unused since all\n # filter arguments are pulled directly from attached the widgets.\n\n if self.image_filter is None:\n return\n arguments = [self._get_value(a) for a in self.arguments]\n kwargs = dict([(name, self._get_value(a))\n for name, a in self.keyword_arguments.items()])\n filtered = self.image_filter(*arguments, **kwargs)\n\n self.display_filtered_image(filtered)\n self.image_changed.emit(filtered)\n\n def _get_value(self, param):\n # If param is a widget, return its `val` attribute.\n return param if not hasattr(param, 'val') else param.val\n\n def _update_original_image(self, image):\n \"\"\"Update the original image argument passed to the filter function.\n\n This method is called by the viewer when the original image is updated.\n \"\"\"\n self.arguments[0] = image\n self.filter_image()\n\n @property\n def filtered_image(self):\n \"\"\"Return filtered image.\"\"\"\n return self.image_viewer.image\n\n def display_filtered_image(self, image):\n \"\"\"Display the filtered image on image viewer.\n\n If you don't want to simply replace the displayed image with the\n filtered image (e.g., you want to display a transparent overlay),\n you can override this method.\n \"\"\"\n self.image_viewer.image = image\n\n def update_plugin(self, name, value):\n \"\"\"Update keyword parameters of the plugin itself.\n\n These parameters will typically be implemented as class properties so\n that they update the image or some other component.\n \"\"\"\n setattr(self, name, value)\n\n def show(self, main_window=True):\n \"\"\"Show plugin.\"\"\"\n super(Plugin, self).show()\n self.activateWindow()\n self.raise_()\n\n # Emit signal with x-hint so new windows can be displayed w/o overlap.\n size = self.frameGeometry()\n x_hint = size.x() + size.width()\n self._started.emit(x_hint)\n\n def closeEvent(self, event):\n \"\"\"On close disconnect all artists and events from ImageViewer.\n\n Note that artists must be appended to `self.artists`.\n \"\"\"\n self.clean_up()\n self.close()\n\n def clean_up(self):\n self.remove_image_artists()\n self.image_viewer.plugins.remove(self)\n self.image_viewer.reset_image()\n self.image_viewer.redraw()\n\n def remove_image_artists(self):\n \"\"\"Remove artists that are connected to the image viewer.\"\"\"\n for a in self.artists:\n a.remove()\n", "path": "skimage/viewer/plugins/base.py"}, {"content": "import numpy as np\nfrom ..qt import QtGui\n\nfrom ..utils import new_plot\nfrom .base import Plugin\n\n\n__all__ = ['PlotPlugin']\n\n\nclass PlotPlugin(Plugin):\n \"\"\"Plugin for ImageViewer that contains a plot canvas.\n\n Base class for plugins that contain a Matplotlib plot canvas, which can,\n for example, display an image histogram.\n\n See base Plugin class for additional details.\n \"\"\"\n\n def __init__(self, image_filter=None, height=150, width=400, **kwargs):\n super(PlotPlugin, self).__init__(image_filter=image_filter,\n height=height, width=width, **kwargs)\n\n self._height = height\n self._width = width\n\n def attach(self, image_viewer):\n super(PlotPlugin, self).attach(image_viewer)\n # Add plot for displaying intensity profile.\n self.add_plot()\n\n def redraw(self):\n \"\"\"Redraw plot.\"\"\"\n self.canvas.draw_idle()\n\n def add_plot(self):\n self.fig, self.ax = new_plot()\n self.fig.set_figwidth(self._width / float(self.fig.dpi))\n self.fig.set_figheight(self._height / float(self.fig.dpi))\n\n self.canvas = self.fig.canvas\n #TODO: Converted color is slightly different than Qt background.\n qpalette = QtGui.QPalette()\n qcolor = qpalette.color(QtGui.QPalette.Window)\n bgcolor = qcolor.toRgb().value()\n if np.isscalar(bgcolor):\n bgcolor = str(bgcolor / 255.)\n self.fig.patch.set_facecolor(bgcolor)\n self.layout.addWidget(self.canvas, self.row, 0)\n", "path": "skimage/viewer/plugins/plotplugin.py"}], "after_files": [{"content": "import numpy as np\nimport matplotlib.pyplot as plt\n\nfrom skimage import color\nfrom skimage import exposure\nfrom .plotplugin import PlotPlugin\nfrom ..canvastools import RectangleTool\n\n\nclass ColorHistogram(PlotPlugin):\n name = 'Color Histogram'\n\n def __init__(self, max_pct=0.99, **kwargs):\n super(ColorHistogram, self).__init__(height=400, **kwargs)\n self.max_pct = max_pct\n\n print(self.help())\n\n def attach(self, image_viewer):\n super(ColorHistogram, self).attach(image_viewer)\n\n self.rect_tool = RectangleTool(self.ax, on_release=self.ab_selected)\n self._on_new_image(image_viewer.image)\n\n def _on_new_image(self, image):\n self.lab_image = color.rgb2lab(image)\n\n # Calculate color histogram in the Lab colorspace:\n L, a, b = self.lab_image.T\n left, right = -100, 100\n ab_extents = [left, right, right, left]\n bins = np.arange(left, right)\n hist, x_edges, y_edges = np.histogram2d(a.flatten(), b.flatten(), bins,\n normed=True)\n\n # Clip bin heights that dominate a-b histogram\n max_val = pct_total_area(hist, percentile=self.max_pct)\n hist = exposure.rescale_intensity(hist, in_range=(0, max_val))\n self.ax.imshow(hist, extent=ab_extents, cmap=plt.cm.gray)\n\n self.ax.set_title('Color Histogram')\n self.ax.set_xlabel('b')\n self.ax.set_ylabel('a')\n\n def help(self):\n helpstr = (\"Color Histogram tool:\",\n \"Select region of a-b colorspace to highlight on image.\")\n return '\\n'.join(helpstr)\n\n def ab_selected(self, extents):\n x0, x1, y0, y1 = extents\n\n lab_masked = self.lab_image.copy()\n L, a, b = lab_masked.T\n\n mask = ((a > y0) & (a < y1)) & ((b > x0) & (b < x1))\n lab_masked[..., 1:][~mask.T] = 0\n\n self.image_viewer.image = color.lab2rgb(lab_masked)\n\n\ndef pct_total_area(image, percentile=0.80):\n \"\"\"Return threshold value based on percentage of total area.\n\n The specified percent of pixels less than the given intensity threshold.\n \"\"\"\n idx = int((image.size - 1) * percentile)\n sorted_pixels = np.sort(image.flat)\n return sorted_pixels[idx]\n\n\n\n", "path": "skimage/viewer/plugins/color_histogram.py"}, {"content": "\"\"\"\nBase class for Plugins that interact with ImageViewer.\n\"\"\"\nfrom warnings import warn\n\nimport numpy as np\n\nfrom ..qt import QtGui\nfrom ..qt.QtCore import Qt, Signal\nfrom ..utils import RequiredAttr, init_qtapp\n\n\nclass Plugin(QtGui.QDialog):\n \"\"\"Base class for plugins that interact with an ImageViewer.\n\n A plugin connects an image filter (or another function) to an image viewer.\n Note that a Plugin is initialized *without* an image viewer and attached in\n a later step. See example below for details.\n\n Parameters\n ----------\n image_viewer : ImageViewer\n Window containing image used in measurement/manipulation.\n image_filter : function\n Function that gets called to update image in image viewer. This value\n can be `None` if, for example, you have a plugin that extracts\n information from an image and doesn't manipulate it. Alternatively,\n this function can be defined as a method in a Plugin subclass.\n height, width : int\n Size of plugin window in pixels. Note that Qt will automatically resize\n a window to fit components. So if you're adding rows of components, you\n can leave `height = 0` and just let Qt determine the final height.\n useblit : bool\n If True, use blitting to speed up animation. Only available on some\n Matplotlib backends. If None, set to True when using Agg backend.\n This only has an effect if you draw on top of an image viewer.\n\n Attributes\n ----------\n image_viewer : ImageViewer\n Window containing image used in measurement.\n name : str\n Name of plugin. This is displayed as the window title.\n artist : list\n List of Matplotlib artists and canvastools. Any artists created by the\n plugin should be added to this list so that it gets cleaned up on\n close.\n\n Examples\n --------\n >>> from skimage.viewer import ImageViewer\n >>> from skimage.viewer.widgets import Slider\n >>> from skimage import data\n >>>\n >>> plugin = Plugin(image_filter=lambda img, threshold: img > threshold)\n >>> plugin += Slider('threshold', 0, 255)\n >>>\n >>> image = data.coins()\n >>> viewer = ImageViewer(image)\n >>> viewer += plugin\n >>> # viewer.show()\n\n The plugin will automatically delegate parameters to `image_filter` based\n on its parameter type, i.e., `ptype` (widgets for required arguments must\n be added in the order they appear in the function). The image attached\n to the viewer is **automatically passed as the first argument** to the\n filter function.\n\n #TODO: Add flag so image is not passed to filter function by default.\n\n `ptype = 'kwarg'` is the default for most widgets so it's unnecessary here.\n\n \"\"\"\n name = 'Plugin'\n image_viewer = RequiredAttr(\"%s is not attached to ImageViewer\" % name)\n\n # Signals used when viewers are linked to the Plugin output.\n image_changed = Signal(np.ndarray)\n _started = Signal(int)\n\n def __init__(self, image_filter=None, height=0, width=400, useblit=True,\n dock='bottom'):\n init_qtapp()\n super(Plugin, self).__init__()\n\n self.dock = dock\n\n self.image_viewer = None\n # If subclass defines `image_filter` method ignore input.\n if not hasattr(self, 'image_filter'):\n self.image_filter = image_filter\n elif image_filter is not None:\n warn(\"If the Plugin class defines an `image_filter` method, \"\n \"then the `image_filter` argument is ignored.\")\n\n self.setWindowTitle(self.name)\n self.layout = QtGui.QGridLayout(self)\n self.resize(width, height)\n self.row = 0\n\n self.arguments = []\n self.keyword_arguments= {}\n\n self.useblit = useblit\n self.cids = []\n self.artists = []\n\n def attach(self, image_viewer):\n \"\"\"Attach the plugin to an ImageViewer.\n\n Note that the ImageViewer will automatically call this method when the\n plugin is added to the ImageViewer. For example::\n\n viewer += Plugin(...)\n\n Also note that `attach` automatically calls the filter function so that\n the image matches the filtered value specified by attached widgets.\n \"\"\"\n self.setParent(image_viewer)\n self.setWindowFlags(Qt.Dialog)\n\n self.image_viewer = image_viewer\n self.image_viewer.plugins.append(self)\n #TODO: Always passing image as first argument may be bad assumption.\n self.arguments = [self.image_viewer.original_image]\n\n # Call filter so that filtered image matches widget values\n self.filter_image()\n\n def add_widget(self, widget):\n \"\"\"Add widget to plugin.\n\n Alternatively, Plugin's `__add__` method is overloaded to add widgets::\n\n plugin += Widget(...)\n\n Widgets can adjust required or optional arguments of filter function or\n parameters for the plugin. This is specified by the Widget's `ptype'.\n \"\"\"\n if widget.ptype == 'kwarg':\n name = widget.name.replace(' ', '_')\n self.keyword_arguments[name] = widget\n widget.callback = self.filter_image\n elif widget.ptype == 'arg':\n self.arguments.append(widget)\n widget.callback = self.filter_image\n elif widget.ptype == 'plugin':\n widget.callback = self.update_plugin\n widget.plugin = self\n self.layout.addWidget(widget, self.row, 0)\n self.row += 1\n\n def __add__(self, widget):\n self.add_widget(widget)\n return self\n\n def filter_image(self, *widget_arg):\n \"\"\"Call `image_filter` with widget args and kwargs\n\n Note: `display_filtered_image` is automatically called.\n \"\"\"\n # `widget_arg` is passed by the active widget but is unused since all\n # filter arguments are pulled directly from attached the widgets.\n\n if self.image_filter is None:\n return\n arguments = [self._get_value(a) for a in self.arguments]\n kwargs = dict([(name, self._get_value(a))\n for name, a in self.keyword_arguments.items()])\n filtered = self.image_filter(*arguments, **kwargs)\n\n self.display_filtered_image(filtered)\n self.image_changed.emit(filtered)\n\n def _get_value(self, param):\n # If param is a widget, return its `val` attribute.\n return param if not hasattr(param, 'val') else param.val\n\n def _update_original_image(self, image):\n \"\"\"Update the original image argument passed to the filter function.\n\n This method is called by the viewer when the original image is updated.\n \"\"\"\n self.arguments[0] = image\n self._on_new_image(image)\n self.filter_image()\n\n def _on_new_image(self, image):\n \"\"\"Override this method to update your plugin for new images.\"\"\"\n pass\n\n @property\n def filtered_image(self):\n \"\"\"Return filtered image.\"\"\"\n return self.image_viewer.image\n\n def display_filtered_image(self, image):\n \"\"\"Display the filtered image on image viewer.\n\n If you don't want to simply replace the displayed image with the\n filtered image (e.g., you want to display a transparent overlay),\n you can override this method.\n \"\"\"\n self.image_viewer.image = image\n\n def update_plugin(self, name, value):\n \"\"\"Update keyword parameters of the plugin itself.\n\n These parameters will typically be implemented as class properties so\n that they update the image or some other component.\n \"\"\"\n setattr(self, name, value)\n\n def show(self, main_window=True):\n \"\"\"Show plugin.\"\"\"\n super(Plugin, self).show()\n self.activateWindow()\n self.raise_()\n\n # Emit signal with x-hint so new windows can be displayed w/o overlap.\n size = self.frameGeometry()\n x_hint = size.x() + size.width()\n self._started.emit(x_hint)\n\n def closeEvent(self, event):\n \"\"\"On close disconnect all artists and events from ImageViewer.\n\n Note that artists must be appended to `self.artists`.\n \"\"\"\n self.clean_up()\n self.close()\n\n def clean_up(self):\n self.remove_image_artists()\n self.image_viewer.plugins.remove(self)\n self.image_viewer.reset_image()\n self.image_viewer.redraw()\n\n def remove_image_artists(self):\n \"\"\"Remove artists that are connected to the image viewer.\"\"\"\n for a in self.artists:\n a.remove()\n", "path": "skimage/viewer/plugins/base.py"}, {"content": "import numpy as np\nfrom ..qt import QtGui\n\nfrom ..utils import new_plot\nfrom .base import Plugin\n\n\n__all__ = ['PlotPlugin']\n\n\nclass PlotPlugin(Plugin):\n \"\"\"Plugin for ImageViewer that contains a plot canvas.\n\n Base class for plugins that contain a Matplotlib plot canvas, which can,\n for example, display an image histogram.\n\n See base Plugin class for additional details.\n \"\"\"\n\n def __init__(self, image_filter=None, height=150, width=400, **kwargs):\n super(PlotPlugin, self).__init__(image_filter=image_filter,\n height=height, width=width, **kwargs)\n\n self._height = height\n self._width = width\n\n def attach(self, image_viewer):\n super(PlotPlugin, self).attach(image_viewer)\n # Add plot for displaying intensity profile.\n self.add_plot()\n\n def redraw(self):\n \"\"\"Redraw plot.\"\"\"\n self.canvas.draw_idle()\n\n def add_plot(self):\n self.fig, self.ax = new_plot()\n self.fig.set_figwidth(self._width / float(self.fig.dpi))\n self.fig.set_figheight(self._height / float(self.fig.dpi))\n\n self.canvas = self.fig.canvas\n #TODO: Converted color is slightly different than Qt background.\n qpalette = QtGui.QPalette()\n qcolor = qpalette.color(QtGui.QPalette.Window)\n bgcolor = qcolor.toRgb().value()\n if np.isscalar(bgcolor):\n bgcolor = str(bgcolor / 255.)\n self.fig.patch.set_facecolor(bgcolor)\n self.layout.addWidget(self.canvas, self.row, 0)\n\n def _update_original_image(self, image):\n super(PlotPlugin, self)._update_original_image(image)\n self.redraw()\n", "path": "skimage/viewer/plugins/plotplugin.py"}]}
| 3,901 | 439 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.