problem_id
stringlengths 18
22
| source
stringclasses 1
value | task_type
stringclasses 1
value | in_source_id
stringlengths 13
58
| prompt
stringlengths 1.1k
10.2k
| golden_diff
stringlengths 151
4.94k
| verification_info
stringlengths 582
21k
| num_tokens
int64 271
2.05k
| num_tokens_diff
int64 47
1.02k
|
---|---|---|---|---|---|---|---|---|
gh_patches_debug_37418 | rasdani/github-patches | git_diff | python-telegram-bot__python-telegram-bot-2202 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[BUG] Passing non-bytes file input leads to error
https://t.me/pythontelegrambotgroup/396541
TL;DR:
`send_document(open('text_file', 'rb'))` works but `send_document(open('text_file', 'r'))` raises is error.
This is, because we try to guess if the file is an image using `imghdr.what(None, stream)` in `InputFile.is_image`, which only works if `stream` is a bytes stream.
If I comment the `is_image` out, the file is sent without issue, so I guess we should just check if the input is bytes before calling `is_image`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `telegram/files/inputfile.py`
Content:
```
1 #!/usr/bin/env python
2 # pylint: disable=W0622,E0611
3 #
4 # A library that provides a Python interface to the Telegram Bot API
5 # Copyright (C) 2015-2020
6 # Leandro Toledo de Souza <[email protected]>
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU Lesser Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU Lesser Public License for more details.
17 #
18 # You should have received a copy of the GNU Lesser Public License
19 # along with this program. If not, see [http://www.gnu.org/licenses/].
20 """This module contains an object that represents a Telegram InputFile."""
21
22 import imghdr
23 import mimetypes
24 import os
25 from typing import IO, Optional, Tuple
26 from uuid import uuid4
27
28 from telegram import TelegramError
29
30 DEFAULT_MIME_TYPE = 'application/octet-stream'
31
32
33 class InputFile:
34 """This object represents a Telegram InputFile.
35
36 Attributes:
37 input_file_content (:obj:`bytes`): The binary content of the file to send.
38 filename (:obj:`str`): Optional. Filename for the file to be sent.
39 attach (:obj:`str`): Optional. Attach id for sending multiple files.
40
41 Args:
42 obj (:obj:`File handler`): An open file descriptor.
43 filename (:obj:`str`, optional): Filename for this InputFile.
44 attach (:obj:`bool`, optional): Whether this should be send as one file or is part of a
45 collection of files.
46
47 Raises:
48 TelegramError
49
50 """
51
52 def __init__(self, obj: IO, filename: str = None, attach: bool = None):
53 self.filename = None
54 self.input_file_content = obj.read()
55 self.attach = 'attached' + uuid4().hex if attach else None
56
57 if filename:
58 self.filename = filename
59 elif hasattr(obj, 'name') and not isinstance(obj.name, int):
60 self.filename = os.path.basename(obj.name)
61
62 try:
63 self.mimetype = self.is_image(self.input_file_content)
64 except TelegramError:
65 if self.filename:
66 self.mimetype = mimetypes.guess_type(self.filename)[0] or DEFAULT_MIME_TYPE
67 else:
68 self.mimetype = DEFAULT_MIME_TYPE
69 if not self.filename:
70 self.filename = self.mimetype.replace('/', '.')
71
72 @property
73 def field_tuple(self) -> Tuple[str, bytes, str]:
74 return self.filename, self.input_file_content, self.mimetype
75
76 @staticmethod
77 def is_image(stream: bytes) -> str:
78 """Check if the content file is an image by analyzing its headers.
79
80 Args:
81 stream (:obj:`bytes`): A byte stream representing the content of a file.
82
83 Returns:
84 :obj:`str`: The str mime-type of an image.
85
86 """
87 image = imghdr.what(None, stream)
88 if image:
89 return 'image/%s' % image
90
91 raise TelegramError('Could not parse file content')
92
93 @staticmethod
94 def is_file(obj: object) -> bool:
95 return hasattr(obj, 'read')
96
97 def to_dict(self) -> Optional[str]:
98 if self.attach:
99 return 'attach://' + self.attach
100 return None
101
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/telegram/files/inputfile.py b/telegram/files/inputfile.py
--- a/telegram/files/inputfile.py
+++ b/telegram/files/inputfile.py
@@ -20,14 +20,14 @@
"""This module contains an object that represents a Telegram InputFile."""
import imghdr
+import logging
import mimetypes
import os
from typing import IO, Optional, Tuple
from uuid import uuid4
-from telegram import TelegramError
-
DEFAULT_MIME_TYPE = 'application/octet-stream'
+logger = logging.getLogger(__name__)
class InputFile:
@@ -59,13 +59,14 @@
elif hasattr(obj, 'name') and not isinstance(obj.name, int):
self.filename = os.path.basename(obj.name)
- try:
- self.mimetype = self.is_image(self.input_file_content)
- except TelegramError:
- if self.filename:
- self.mimetype = mimetypes.guess_type(self.filename)[0] or DEFAULT_MIME_TYPE
- else:
- self.mimetype = DEFAULT_MIME_TYPE
+ image_mime_type = self.is_image(self.input_file_content)
+ if image_mime_type:
+ self.mimetype = image_mime_type
+ elif self.filename:
+ self.mimetype = mimetypes.guess_type(self.filename)[0] or DEFAULT_MIME_TYPE
+ else:
+ self.mimetype = DEFAULT_MIME_TYPE
+
if not self.filename:
self.filename = self.mimetype.replace('/', '.')
@@ -74,21 +75,27 @@
return self.filename, self.input_file_content, self.mimetype
@staticmethod
- def is_image(stream: bytes) -> str:
+ def is_image(stream: bytes) -> Optional[str]:
"""Check if the content file is an image by analyzing its headers.
Args:
stream (:obj:`bytes`): A byte stream representing the content of a file.
Returns:
- :obj:`str`: The str mime-type of an image.
+ :obj:`str` | :obj:`None`: The mime-type of an image, if the input is an image, or
+ :obj:`None` else.
"""
- image = imghdr.what(None, stream)
- if image:
- return 'image/%s' % image
-
- raise TelegramError('Could not parse file content')
+ try:
+ image = imghdr.what(None, stream)
+ if image:
+ return f'image/{image}'
+ return None
+ except Exception:
+ logger.debug(
+ "Could not parse file content. Assuming that file is not an image.", exc_info=True
+ )
+ return None
@staticmethod
def is_file(obj: object) -> bool:
| {"golden_diff": "diff --git a/telegram/files/inputfile.py b/telegram/files/inputfile.py\n--- a/telegram/files/inputfile.py\n+++ b/telegram/files/inputfile.py\n@@ -20,14 +20,14 @@\n \"\"\"This module contains an object that represents a Telegram InputFile.\"\"\"\n \n import imghdr\n+import logging\n import mimetypes\n import os\n from typing import IO, Optional, Tuple\n from uuid import uuid4\n \n-from telegram import TelegramError\n-\n DEFAULT_MIME_TYPE = 'application/octet-stream'\n+logger = logging.getLogger(__name__)\n \n \n class InputFile:\n@@ -59,13 +59,14 @@\n elif hasattr(obj, 'name') and not isinstance(obj.name, int):\n self.filename = os.path.basename(obj.name)\n \n- try:\n- self.mimetype = self.is_image(self.input_file_content)\n- except TelegramError:\n- if self.filename:\n- self.mimetype = mimetypes.guess_type(self.filename)[0] or DEFAULT_MIME_TYPE\n- else:\n- self.mimetype = DEFAULT_MIME_TYPE\n+ image_mime_type = self.is_image(self.input_file_content)\n+ if image_mime_type:\n+ self.mimetype = image_mime_type\n+ elif self.filename:\n+ self.mimetype = mimetypes.guess_type(self.filename)[0] or DEFAULT_MIME_TYPE\n+ else:\n+ self.mimetype = DEFAULT_MIME_TYPE\n+\n if not self.filename:\n self.filename = self.mimetype.replace('/', '.')\n \n@@ -74,21 +75,27 @@\n return self.filename, self.input_file_content, self.mimetype\n \n @staticmethod\n- def is_image(stream: bytes) -> str:\n+ def is_image(stream: bytes) -> Optional[str]:\n \"\"\"Check if the content file is an image by analyzing its headers.\n \n Args:\n stream (:obj:`bytes`): A byte stream representing the content of a file.\n \n Returns:\n- :obj:`str`: The str mime-type of an image.\n+ :obj:`str` | :obj:`None`: The mime-type of an image, if the input is an image, or\n+ :obj:`None` else.\n \n \"\"\"\n- image = imghdr.what(None, stream)\n- if image:\n- return 'image/%s' % image\n-\n- raise TelegramError('Could not parse file content')\n+ try:\n+ image = imghdr.what(None, stream)\n+ if image:\n+ return f'image/{image}'\n+ return None\n+ except Exception:\n+ logger.debug(\n+ \"Could not parse file content. Assuming that file is not an image.\", exc_info=True\n+ )\n+ return None\n \n @staticmethod\n def is_file(obj: object) -> bool:\n", "issue": "[BUG] Passing non-bytes file input leads to error\nhttps://t.me/pythontelegrambotgroup/396541\r\n\r\nTL;DR:\r\n\r\n`send_document(open('text_file', 'rb'))` works but `send_document(open('text_file', 'r'))` raises is error.\r\nThis is, because we try to guess if the file is an image using `imghdr.what(None, stream)` in `InputFile.is_image`, which only works if `stream` is a bytes stream.\r\nIf I comment the `is_image` out, the file is sent without issue, so I guess we should just check if the input is bytes before calling `is_image`\n", "before_files": [{"content": "#!/usr/bin/env python\n# pylint: disable=W0622,E0611\n#\n# A library that provides a Python interface to the Telegram Bot API\n# Copyright (C) 2015-2020\n# Leandro Toledo de Souza <[email protected]>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser Public License for more details.\n#\n# You should have received a copy of the GNU Lesser Public License\n# along with this program. If not, see [http://www.gnu.org/licenses/].\n\"\"\"This module contains an object that represents a Telegram InputFile.\"\"\"\n\nimport imghdr\nimport mimetypes\nimport os\nfrom typing import IO, Optional, Tuple\nfrom uuid import uuid4\n\nfrom telegram import TelegramError\n\nDEFAULT_MIME_TYPE = 'application/octet-stream'\n\n\nclass InputFile:\n \"\"\"This object represents a Telegram InputFile.\n\n Attributes:\n input_file_content (:obj:`bytes`): The binary content of the file to send.\n filename (:obj:`str`): Optional. Filename for the file to be sent.\n attach (:obj:`str`): Optional. Attach id for sending multiple files.\n\n Args:\n obj (:obj:`File handler`): An open file descriptor.\n filename (:obj:`str`, optional): Filename for this InputFile.\n attach (:obj:`bool`, optional): Whether this should be send as one file or is part of a\n collection of files.\n\n Raises:\n TelegramError\n\n \"\"\"\n\n def __init__(self, obj: IO, filename: str = None, attach: bool = None):\n self.filename = None\n self.input_file_content = obj.read()\n self.attach = 'attached' + uuid4().hex if attach else None\n\n if filename:\n self.filename = filename\n elif hasattr(obj, 'name') and not isinstance(obj.name, int):\n self.filename = os.path.basename(obj.name)\n\n try:\n self.mimetype = self.is_image(self.input_file_content)\n except TelegramError:\n if self.filename:\n self.mimetype = mimetypes.guess_type(self.filename)[0] or DEFAULT_MIME_TYPE\n else:\n self.mimetype = DEFAULT_MIME_TYPE\n if not self.filename:\n self.filename = self.mimetype.replace('/', '.')\n\n @property\n def field_tuple(self) -> Tuple[str, bytes, str]:\n return self.filename, self.input_file_content, self.mimetype\n\n @staticmethod\n def is_image(stream: bytes) -> str:\n \"\"\"Check if the content file is an image by analyzing its headers.\n\n Args:\n stream (:obj:`bytes`): A byte stream representing the content of a file.\n\n Returns:\n :obj:`str`: The str mime-type of an image.\n\n \"\"\"\n image = imghdr.what(None, stream)\n if image:\n return 'image/%s' % image\n\n raise TelegramError('Could not parse file content')\n\n @staticmethod\n def is_file(obj: object) -> bool:\n return hasattr(obj, 'read')\n\n def to_dict(self) -> Optional[str]:\n if self.attach:\n return 'attach://' + self.attach\n return None\n", "path": "telegram/files/inputfile.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# pylint: disable=W0622,E0611\n#\n# A library that provides a Python interface to the Telegram Bot API\n# Copyright (C) 2015-2020\n# Leandro Toledo de Souza <[email protected]>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser Public License for more details.\n#\n# You should have received a copy of the GNU Lesser Public License\n# along with this program. If not, see [http://www.gnu.org/licenses/].\n\"\"\"This module contains an object that represents a Telegram InputFile.\"\"\"\n\nimport imghdr\nimport logging\nimport mimetypes\nimport os\nfrom typing import IO, Optional, Tuple\nfrom uuid import uuid4\n\nDEFAULT_MIME_TYPE = 'application/octet-stream'\nlogger = logging.getLogger(__name__)\n\n\nclass InputFile:\n \"\"\"This object represents a Telegram InputFile.\n\n Attributes:\n input_file_content (:obj:`bytes`): The binary content of the file to send.\n filename (:obj:`str`): Optional. Filename for the file to be sent.\n attach (:obj:`str`): Optional. Attach id for sending multiple files.\n\n Args:\n obj (:obj:`File handler`): An open file descriptor.\n filename (:obj:`str`, optional): Filename for this InputFile.\n attach (:obj:`bool`, optional): Whether this should be send as one file or is part of a\n collection of files.\n\n Raises:\n TelegramError\n\n \"\"\"\n\n def __init__(self, obj: IO, filename: str = None, attach: bool = None):\n self.filename = None\n self.input_file_content = obj.read()\n self.attach = 'attached' + uuid4().hex if attach else None\n\n if filename:\n self.filename = filename\n elif hasattr(obj, 'name') and not isinstance(obj.name, int):\n self.filename = os.path.basename(obj.name)\n\n image_mime_type = self.is_image(self.input_file_content)\n if image_mime_type:\n self.mimetype = image_mime_type\n elif self.filename:\n self.mimetype = mimetypes.guess_type(self.filename)[0] or DEFAULT_MIME_TYPE\n else:\n self.mimetype = DEFAULT_MIME_TYPE\n\n if not self.filename:\n self.filename = self.mimetype.replace('/', '.')\n\n @property\n def field_tuple(self) -> Tuple[str, bytes, str]:\n return self.filename, self.input_file_content, self.mimetype\n\n @staticmethod\n def is_image(stream: bytes) -> Optional[str]:\n \"\"\"Check if the content file is an image by analyzing its headers.\n\n Args:\n stream (:obj:`bytes`): A byte stream representing the content of a file.\n\n Returns:\n :obj:`str` | :obj:`None`: The mime-type of an image, if the input is an image, or\n :obj:`None` else.\n\n \"\"\"\n try:\n image = imghdr.what(None, stream)\n if image:\n return f'image/{image}'\n return None\n except Exception:\n logger.debug(\n \"Could not parse file content. Assuming that file is not an image.\", exc_info=True\n )\n return None\n\n @staticmethod\n def is_file(obj: object) -> bool:\n return hasattr(obj, 'read')\n\n def to_dict(self) -> Optional[str]:\n if self.attach:\n return 'attach://' + self.attach\n return None\n", "path": "telegram/files/inputfile.py"}]} | 1,383 | 609 |
gh_patches_debug_5607 | rasdani/github-patches | git_diff | pallets__werkzeug-2643 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Stream argument to ProfilerMiddleware is not typed hinted correctly
The profiler middleware `src/werkzeug/middleware/profiler.py:80` documentation and implementation states that the argument `stream` can be set to `None` to disable output.
However the type hints specifies `stream: t.IO[str] = sys.stdout,` which is inconsistent with the documentation and the implementation.
To replicate the issue run a type hint aware validator against something initiating a `ProfilerMiddleware` with the `stream` argument set to `None`:
```
ProfilerMiddleware(
app,
stream=None
)
```
It will cause an error stating that `None` is not an acceptable value for `stream`.
It's a minor and easy fix that I'm happy to provide a PR for if deemed acceptable with the proposed change:
```
stream: t.Union[t.IO[str], None] = sys.stdout,
```
Environment:
- Python version: 3.10
- Werkzeug version: 2.2.3
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/werkzeug/middleware/profiler.py`
Content:
```
1 """
2 Application Profiler
3 ====================
4
5 This module provides a middleware that profiles each request with the
6 :mod:`cProfile` module. This can help identify bottlenecks in your code
7 that may be slowing down your application.
8
9 .. autoclass:: ProfilerMiddleware
10
11 :copyright: 2007 Pallets
12 :license: BSD-3-Clause
13 """
14 import os.path
15 import sys
16 import time
17 import typing as t
18 from pstats import Stats
19
20 try:
21 from cProfile import Profile
22 except ImportError:
23 from profile import Profile # type: ignore
24
25 if t.TYPE_CHECKING:
26 from _typeshed.wsgi import StartResponse
27 from _typeshed.wsgi import WSGIApplication
28 from _typeshed.wsgi import WSGIEnvironment
29
30
31 class ProfilerMiddleware:
32 """Wrap a WSGI application and profile the execution of each
33 request. Responses are buffered so that timings are more exact.
34
35 If ``stream`` is given, :class:`pstats.Stats` are written to it
36 after each request. If ``profile_dir`` is given, :mod:`cProfile`
37 data files are saved to that directory, one file per request.
38
39 The filename can be customized by passing ``filename_format``. If
40 it is a string, it will be formatted using :meth:`str.format` with
41 the following fields available:
42
43 - ``{method}`` - The request method; GET, POST, etc.
44 - ``{path}`` - The request path or 'root' should one not exist.
45 - ``{elapsed}`` - The elapsed time of the request.
46 - ``{time}`` - The time of the request.
47
48 If it is a callable, it will be called with the WSGI ``environ``
49 dict and should return a filename.
50
51 :param app: The WSGI application to wrap.
52 :param stream: Write stats to this stream. Disable with ``None``.
53 :param sort_by: A tuple of columns to sort stats by. See
54 :meth:`pstats.Stats.sort_stats`.
55 :param restrictions: A tuple of restrictions to filter stats by. See
56 :meth:`pstats.Stats.print_stats`.
57 :param profile_dir: Save profile data files to this directory.
58 :param filename_format: Format string for profile data file names,
59 or a callable returning a name. See explanation above.
60
61 .. code-block:: python
62
63 from werkzeug.middleware.profiler import ProfilerMiddleware
64 app = ProfilerMiddleware(app)
65
66 .. versionchanged:: 0.15
67 Stats are written even if ``profile_dir`` is given, and can be
68 disable by passing ``stream=None``.
69
70 .. versionadded:: 0.15
71 Added ``filename_format``.
72
73 .. versionadded:: 0.9
74 Added ``restrictions`` and ``profile_dir``.
75 """
76
77 def __init__(
78 self,
79 app: "WSGIApplication",
80 stream: t.IO[str] = sys.stdout,
81 sort_by: t.Iterable[str] = ("time", "calls"),
82 restrictions: t.Iterable[t.Union[str, int, float]] = (),
83 profile_dir: t.Optional[str] = None,
84 filename_format: str = "{method}.{path}.{elapsed:.0f}ms.{time:.0f}.prof",
85 ) -> None:
86 self._app = app
87 self._stream = stream
88 self._sort_by = sort_by
89 self._restrictions = restrictions
90 self._profile_dir = profile_dir
91 self._filename_format = filename_format
92
93 def __call__(
94 self, environ: "WSGIEnvironment", start_response: "StartResponse"
95 ) -> t.Iterable[bytes]:
96 response_body: t.List[bytes] = []
97
98 def catching_start_response(status, headers, exc_info=None): # type: ignore
99 start_response(status, headers, exc_info)
100 return response_body.append
101
102 def runapp() -> None:
103 app_iter = self._app(
104 environ, t.cast("StartResponse", catching_start_response)
105 )
106 response_body.extend(app_iter)
107
108 if hasattr(app_iter, "close"):
109 app_iter.close()
110
111 profile = Profile()
112 start = time.time()
113 profile.runcall(runapp)
114 body = b"".join(response_body)
115 elapsed = time.time() - start
116
117 if self._profile_dir is not None:
118 if callable(self._filename_format):
119 filename = self._filename_format(environ)
120 else:
121 filename = self._filename_format.format(
122 method=environ["REQUEST_METHOD"],
123 path=environ["PATH_INFO"].strip("/").replace("/", ".") or "root",
124 elapsed=elapsed * 1000.0,
125 time=time.time(),
126 )
127 filename = os.path.join(self._profile_dir, filename)
128 profile.dump_stats(filename)
129
130 if self._stream is not None:
131 stats = Stats(profile, stream=self._stream)
132 stats.sort_stats(*self._sort_by)
133 print("-" * 80, file=self._stream)
134 path_info = environ.get("PATH_INFO", "")
135 print(f"PATH: {path_info!r}", file=self._stream)
136 stats.print_stats(*self._restrictions)
137 print(f"{'-' * 80}\n", file=self._stream)
138
139 return [body]
140
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/werkzeug/middleware/profiler.py b/src/werkzeug/middleware/profiler.py
--- a/src/werkzeug/middleware/profiler.py
+++ b/src/werkzeug/middleware/profiler.py
@@ -77,7 +77,7 @@
def __init__(
self,
app: "WSGIApplication",
- stream: t.IO[str] = sys.stdout,
+ stream: t.Union[t.IO[str], None] = sys.stdout,
sort_by: t.Iterable[str] = ("time", "calls"),
restrictions: t.Iterable[t.Union[str, int, float]] = (),
profile_dir: t.Optional[str] = None,
| {"golden_diff": "diff --git a/src/werkzeug/middleware/profiler.py b/src/werkzeug/middleware/profiler.py\n--- a/src/werkzeug/middleware/profiler.py\n+++ b/src/werkzeug/middleware/profiler.py\n@@ -77,7 +77,7 @@\n def __init__(\n self,\n app: \"WSGIApplication\",\n- stream: t.IO[str] = sys.stdout,\n+ stream: t.Union[t.IO[str], None] = sys.stdout,\n sort_by: t.Iterable[str] = (\"time\", \"calls\"),\n restrictions: t.Iterable[t.Union[str, int, float]] = (),\n profile_dir: t.Optional[str] = None,\n", "issue": "Stream argument to ProfilerMiddleware is not typed hinted correctly\nThe profiler middleware `src/werkzeug/middleware/profiler.py:80` documentation and implementation states that the argument `stream` can be set to `None` to disable output.\r\n \r\nHowever the type hints specifies `stream: t.IO[str] = sys.stdout,` which is inconsistent with the documentation and the implementation.\r\n\r\nTo replicate the issue run a type hint aware validator against something initiating a `ProfilerMiddleware` with the `stream` argument set to `None`:\r\n```\r\nProfilerMiddleware(\r\n app,\r\n stream=None\r\n)\r\n```\r\nIt will cause an error stating that `None` is not an acceptable value for `stream`.\r\n\r\nIt's a minor and easy fix that I'm happy to provide a PR for if deemed acceptable with the proposed change:\r\n```\r\nstream: t.Union[t.IO[str], None] = sys.stdout,\r\n```\r\n\r\nEnvironment:\r\n\r\n- Python version: 3.10\r\n- Werkzeug version: 2.2.3\r\n\n", "before_files": [{"content": "\"\"\"\nApplication Profiler\n====================\n\nThis module provides a middleware that profiles each request with the\n:mod:`cProfile` module. This can help identify bottlenecks in your code\nthat may be slowing down your application.\n\n.. autoclass:: ProfilerMiddleware\n\n:copyright: 2007 Pallets\n:license: BSD-3-Clause\n\"\"\"\nimport os.path\nimport sys\nimport time\nimport typing as t\nfrom pstats import Stats\n\ntry:\n from cProfile import Profile\nexcept ImportError:\n from profile import Profile # type: ignore\n\nif t.TYPE_CHECKING:\n from _typeshed.wsgi import StartResponse\n from _typeshed.wsgi import WSGIApplication\n from _typeshed.wsgi import WSGIEnvironment\n\n\nclass ProfilerMiddleware:\n \"\"\"Wrap a WSGI application and profile the execution of each\n request. Responses are buffered so that timings are more exact.\n\n If ``stream`` is given, :class:`pstats.Stats` are written to it\n after each request. If ``profile_dir`` is given, :mod:`cProfile`\n data files are saved to that directory, one file per request.\n\n The filename can be customized by passing ``filename_format``. If\n it is a string, it will be formatted using :meth:`str.format` with\n the following fields available:\n\n - ``{method}`` - The request method; GET, POST, etc.\n - ``{path}`` - The request path or 'root' should one not exist.\n - ``{elapsed}`` - The elapsed time of the request.\n - ``{time}`` - The time of the request.\n\n If it is a callable, it will be called with the WSGI ``environ``\n dict and should return a filename.\n\n :param app: The WSGI application to wrap.\n :param stream: Write stats to this stream. Disable with ``None``.\n :param sort_by: A tuple of columns to sort stats by. See\n :meth:`pstats.Stats.sort_stats`.\n :param restrictions: A tuple of restrictions to filter stats by. See\n :meth:`pstats.Stats.print_stats`.\n :param profile_dir: Save profile data files to this directory.\n :param filename_format: Format string for profile data file names,\n or a callable returning a name. See explanation above.\n\n .. code-block:: python\n\n from werkzeug.middleware.profiler import ProfilerMiddleware\n app = ProfilerMiddleware(app)\n\n .. versionchanged:: 0.15\n Stats are written even if ``profile_dir`` is given, and can be\n disable by passing ``stream=None``.\n\n .. versionadded:: 0.15\n Added ``filename_format``.\n\n .. versionadded:: 0.9\n Added ``restrictions`` and ``profile_dir``.\n \"\"\"\n\n def __init__(\n self,\n app: \"WSGIApplication\",\n stream: t.IO[str] = sys.stdout,\n sort_by: t.Iterable[str] = (\"time\", \"calls\"),\n restrictions: t.Iterable[t.Union[str, int, float]] = (),\n profile_dir: t.Optional[str] = None,\n filename_format: str = \"{method}.{path}.{elapsed:.0f}ms.{time:.0f}.prof\",\n ) -> None:\n self._app = app\n self._stream = stream\n self._sort_by = sort_by\n self._restrictions = restrictions\n self._profile_dir = profile_dir\n self._filename_format = filename_format\n\n def __call__(\n self, environ: \"WSGIEnvironment\", start_response: \"StartResponse\"\n ) -> t.Iterable[bytes]:\n response_body: t.List[bytes] = []\n\n def catching_start_response(status, headers, exc_info=None): # type: ignore\n start_response(status, headers, exc_info)\n return response_body.append\n\n def runapp() -> None:\n app_iter = self._app(\n environ, t.cast(\"StartResponse\", catching_start_response)\n )\n response_body.extend(app_iter)\n\n if hasattr(app_iter, \"close\"):\n app_iter.close()\n\n profile = Profile()\n start = time.time()\n profile.runcall(runapp)\n body = b\"\".join(response_body)\n elapsed = time.time() - start\n\n if self._profile_dir is not None:\n if callable(self._filename_format):\n filename = self._filename_format(environ)\n else:\n filename = self._filename_format.format(\n method=environ[\"REQUEST_METHOD\"],\n path=environ[\"PATH_INFO\"].strip(\"/\").replace(\"/\", \".\") or \"root\",\n elapsed=elapsed * 1000.0,\n time=time.time(),\n )\n filename = os.path.join(self._profile_dir, filename)\n profile.dump_stats(filename)\n\n if self._stream is not None:\n stats = Stats(profile, stream=self._stream)\n stats.sort_stats(*self._sort_by)\n print(\"-\" * 80, file=self._stream)\n path_info = environ.get(\"PATH_INFO\", \"\")\n print(f\"PATH: {path_info!r}\", file=self._stream)\n stats.print_stats(*self._restrictions)\n print(f\"{'-' * 80}\\n\", file=self._stream)\n\n return [body]\n", "path": "src/werkzeug/middleware/profiler.py"}], "after_files": [{"content": "\"\"\"\nApplication Profiler\n====================\n\nThis module provides a middleware that profiles each request with the\n:mod:`cProfile` module. This can help identify bottlenecks in your code\nthat may be slowing down your application.\n\n.. autoclass:: ProfilerMiddleware\n\n:copyright: 2007 Pallets\n:license: BSD-3-Clause\n\"\"\"\nimport os.path\nimport sys\nimport time\nimport typing as t\nfrom pstats import Stats\n\ntry:\n from cProfile import Profile\nexcept ImportError:\n from profile import Profile # type: ignore\n\nif t.TYPE_CHECKING:\n from _typeshed.wsgi import StartResponse\n from _typeshed.wsgi import WSGIApplication\n from _typeshed.wsgi import WSGIEnvironment\n\n\nclass ProfilerMiddleware:\n \"\"\"Wrap a WSGI application and profile the execution of each\n request. Responses are buffered so that timings are more exact.\n\n If ``stream`` is given, :class:`pstats.Stats` are written to it\n after each request. If ``profile_dir`` is given, :mod:`cProfile`\n data files are saved to that directory, one file per request.\n\n The filename can be customized by passing ``filename_format``. If\n it is a string, it will be formatted using :meth:`str.format` with\n the following fields available:\n\n - ``{method}`` - The request method; GET, POST, etc.\n - ``{path}`` - The request path or 'root' should one not exist.\n - ``{elapsed}`` - The elapsed time of the request.\n - ``{time}`` - The time of the request.\n\n If it is a callable, it will be called with the WSGI ``environ``\n dict and should return a filename.\n\n :param app: The WSGI application to wrap.\n :param stream: Write stats to this stream. Disable with ``None``.\n :param sort_by: A tuple of columns to sort stats by. See\n :meth:`pstats.Stats.sort_stats`.\n :param restrictions: A tuple of restrictions to filter stats by. See\n :meth:`pstats.Stats.print_stats`.\n :param profile_dir: Save profile data files to this directory.\n :param filename_format: Format string for profile data file names,\n or a callable returning a name. See explanation above.\n\n .. code-block:: python\n\n from werkzeug.middleware.profiler import ProfilerMiddleware\n app = ProfilerMiddleware(app)\n\n .. versionchanged:: 0.15\n Stats are written even if ``profile_dir`` is given, and can be\n disable by passing ``stream=None``.\n\n .. versionadded:: 0.15\n Added ``filename_format``.\n\n .. versionadded:: 0.9\n Added ``restrictions`` and ``profile_dir``.\n \"\"\"\n\n def __init__(\n self,\n app: \"WSGIApplication\",\n stream: t.Union[t.IO[str], None] = sys.stdout,\n sort_by: t.Iterable[str] = (\"time\", \"calls\"),\n restrictions: t.Iterable[t.Union[str, int, float]] = (),\n profile_dir: t.Optional[str] = None,\n filename_format: str = \"{method}.{path}.{elapsed:.0f}ms.{time:.0f}.prof\",\n ) -> None:\n self._app = app\n self._stream = stream\n self._sort_by = sort_by\n self._restrictions = restrictions\n self._profile_dir = profile_dir\n self._filename_format = filename_format\n\n def __call__(\n self, environ: \"WSGIEnvironment\", start_response: \"StartResponse\"\n ) -> t.Iterable[bytes]:\n response_body: t.List[bytes] = []\n\n def catching_start_response(status, headers, exc_info=None): # type: ignore\n start_response(status, headers, exc_info)\n return response_body.append\n\n def runapp() -> None:\n app_iter = self._app(\n environ, t.cast(\"StartResponse\", catching_start_response)\n )\n response_body.extend(app_iter)\n\n if hasattr(app_iter, \"close\"):\n app_iter.close()\n\n profile = Profile()\n start = time.time()\n profile.runcall(runapp)\n body = b\"\".join(response_body)\n elapsed = time.time() - start\n\n if self._profile_dir is not None:\n if callable(self._filename_format):\n filename = self._filename_format(environ)\n else:\n filename = self._filename_format.format(\n method=environ[\"REQUEST_METHOD\"],\n path=environ[\"PATH_INFO\"].strip(\"/\").replace(\"/\", \".\") or \"root\",\n elapsed=elapsed * 1000.0,\n time=time.time(),\n )\n filename = os.path.join(self._profile_dir, filename)\n profile.dump_stats(filename)\n\n if self._stream is not None:\n stats = Stats(profile, stream=self._stream)\n stats.sort_stats(*self._sort_by)\n print(\"-\" * 80, file=self._stream)\n path_info = environ.get(\"PATH_INFO\", \"\")\n print(f\"PATH: {path_info!r}\", file=self._stream)\n stats.print_stats(*self._restrictions)\n print(f\"{'-' * 80}\\n\", file=self._stream)\n\n return [body]\n", "path": "src/werkzeug/middleware/profiler.py"}]} | 1,958 | 149 |
gh_patches_debug_218 | rasdani/github-patches | git_diff | TheAlgorithms__Python-7054 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add typing to maths/segmented_sieve.py
### Describe your change:
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [ ] Documentation change?
### Checklist:
* [ ] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [ ] This pull request is all my own work -- I have not plagiarized.
* [ ] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation.
* [ ] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `maths/segmented_sieve.py`
Content:
```
1 """Segmented Sieve."""
2
3 import math
4
5
6 def sieve(n):
7 """Segmented Sieve."""
8 in_prime = []
9 start = 2
10 end = int(math.sqrt(n)) # Size of every segment
11 temp = [True] * (end + 1)
12 prime = []
13
14 while start <= end:
15 if temp[start] is True:
16 in_prime.append(start)
17 for i in range(start * start, end + 1, start):
18 temp[i] = False
19 start += 1
20 prime += in_prime
21
22 low = end + 1
23 high = min(2 * end, n)
24
25 while low <= n:
26 temp = [True] * (high - low + 1)
27 for each in in_prime:
28
29 t = math.floor(low / each) * each
30 if t < low:
31 t += each
32
33 for j in range(t, high + 1, each):
34 temp[j - low] = False
35
36 for j in range(len(temp)):
37 if temp[j] is True:
38 prime.append(j + low)
39
40 low = high + 1
41 high = min(high + end, n)
42
43 return prime
44
45
46 print(sieve(10**6))
47
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/maths/segmented_sieve.py b/maths/segmented_sieve.py
--- a/maths/segmented_sieve.py
+++ b/maths/segmented_sieve.py
@@ -3,7 +3,7 @@
import math
-def sieve(n):
+def sieve(n: int) -> list[int]:
"""Segmented Sieve."""
in_prime = []
start = 2
| {"golden_diff": "diff --git a/maths/segmented_sieve.py b/maths/segmented_sieve.py\n--- a/maths/segmented_sieve.py\n+++ b/maths/segmented_sieve.py\n@@ -3,7 +3,7 @@\n import math\r\n \r\n \r\n-def sieve(n):\r\n+def sieve(n: int) -> list[int]:\r\n \"\"\"Segmented Sieve.\"\"\"\r\n in_prime = []\r\n start = 2\n", "issue": "Add typing to maths/segmented_sieve.py\n### Describe your change:\r\n\r\n\r\n\r\n* [ ] Add an algorithm?\r\n* [ ] Fix a bug or typo in an existing algorithm?\r\n* [ ] Documentation change?\r\n\r\n### Checklist:\r\n* [ ] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).\r\n* [ ] This pull request is all my own work -- I have not plagiarized.\r\n* [ ] I know that pull requests will not be merged if they fail the automated tests.\r\n* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.\r\n* [ ] All new Python files are placed inside an existing directory.\r\n* [ ] All filenames are in all lowercase characters with no spaces or dashes.\r\n* [ ] All functions and variable names follow Python naming conventions.\r\n* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).\r\n* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.\r\n* [ ] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation.\r\n* [ ] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.\r\n\n", "before_files": [{"content": "\"\"\"Segmented Sieve.\"\"\"\r\n\r\nimport math\r\n\r\n\r\ndef sieve(n):\r\n \"\"\"Segmented Sieve.\"\"\"\r\n in_prime = []\r\n start = 2\r\n end = int(math.sqrt(n)) # Size of every segment\r\n temp = [True] * (end + 1)\r\n prime = []\r\n\r\n while start <= end:\r\n if temp[start] is True:\r\n in_prime.append(start)\r\n for i in range(start * start, end + 1, start):\r\n temp[i] = False\r\n start += 1\r\n prime += in_prime\r\n\r\n low = end + 1\r\n high = min(2 * end, n)\r\n\r\n while low <= n:\r\n temp = [True] * (high - low + 1)\r\n for each in in_prime:\r\n\r\n t = math.floor(low / each) * each\r\n if t < low:\r\n t += each\r\n\r\n for j in range(t, high + 1, each):\r\n temp[j - low] = False\r\n\r\n for j in range(len(temp)):\r\n if temp[j] is True:\r\n prime.append(j + low)\r\n\r\n low = high + 1\r\n high = min(high + end, n)\r\n\r\n return prime\r\n\r\n\r\nprint(sieve(10**6))\r\n", "path": "maths/segmented_sieve.py"}], "after_files": [{"content": "\"\"\"Segmented Sieve.\"\"\"\r\n\r\nimport math\r\n\r\n\r\ndef sieve(n: int) -> list[int]:\r\n \"\"\"Segmented Sieve.\"\"\"\r\n in_prime = []\r\n start = 2\r\n end = int(math.sqrt(n)) # Size of every segment\r\n temp = [True] * (end + 1)\r\n prime = []\r\n\r\n while start <= end:\r\n if temp[start] is True:\r\n in_prime.append(start)\r\n for i in range(start * start, end + 1, start):\r\n temp[i] = False\r\n start += 1\r\n prime += in_prime\r\n\r\n low = end + 1\r\n high = min(2 * end, n)\r\n\r\n while low <= n:\r\n temp = [True] * (high - low + 1)\r\n for each in in_prime:\r\n\r\n t = math.floor(low / each) * each\r\n if t < low:\r\n t += each\r\n\r\n for j in range(t, high + 1, each):\r\n temp[j - low] = False\r\n\r\n for j in range(len(temp)):\r\n if temp[j] is True:\r\n prime.append(j + low)\r\n\r\n low = high + 1\r\n high = min(high + end, n)\r\n\r\n return prime\r\n\r\n\r\nprint(sieve(10**6))\r\n", "path": "maths/segmented_sieve.py"}]} | 920 | 92 |
gh_patches_debug_16406 | rasdani/github-patches | git_diff | open-telemetry__opentelemetry-python-1583 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
sitecustomize is being run multiple times
When `opentelemetry-instrument` is run, the path of `opentelemetry-python`'s `sitecustomize.py` file is added to `PYTHONPATH`. This works fine unless the command executed by `opentelemetry-instrument` is also calling the `python` executable, which would make this `sitecustomize` be executed more than once. This is bad because this means multiple instrumentations may happen.
I'll be modifying `sitecustomize` to remove its path from `PYTHONPATH` after it has been executed.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py`
Content:
```
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import sys
16 from logging import getLogger
17 from os import environ, path
18
19 from pkg_resources import iter_entry_points
20
21 from opentelemetry.environment_variables import (
22 OTEL_PYTHON_DISABLED_INSTRUMENTATIONS,
23 )
24
25 logger = getLogger(__file__)
26
27
28 def _load_distros():
29 for entry_point in iter_entry_points("opentelemetry_distro"):
30 try:
31 entry_point.load()().configure() # type: ignore
32 logger.debug("Distribution %s configured", entry_point.name)
33 except Exception as exc: # pylint: disable=broad-except
34 logger.exception(
35 "Distribution %s configuration failed", entry_point.name
36 )
37 raise exc
38
39
40 def _load_instrumentors():
41 package_to_exclude = environ.get(OTEL_PYTHON_DISABLED_INSTRUMENTATIONS, [])
42 if isinstance(package_to_exclude, str):
43 package_to_exclude = package_to_exclude.split(",")
44 # to handle users entering "requests , flask" or "requests, flask" with spaces
45 package_to_exclude = [x.strip() for x in package_to_exclude]
46
47 for entry_point in iter_entry_points("opentelemetry_instrumentor"):
48 try:
49 if entry_point.name in package_to_exclude:
50 logger.debug(
51 "Instrumentation skipped for library %s", entry_point.name
52 )
53 continue
54 entry_point.load()().instrument() # type: ignore
55 logger.debug("Instrumented %s", entry_point.name)
56 except Exception as exc: # pylint: disable=broad-except
57 logger.exception("Instrumenting of %s failed", entry_point.name)
58 raise exc
59
60
61 def _load_configurators():
62 configured = None
63 for entry_point in iter_entry_points("opentelemetry_configurator"):
64 if configured is not None:
65 logger.warning(
66 "Configuration of %s not loaded, %s already loaded",
67 entry_point.name,
68 configured,
69 )
70 continue
71 try:
72 entry_point.load()().configure() # type: ignore
73 configured = entry_point.name
74 except Exception as exc: # pylint: disable=broad-except
75 logger.exception("Configuration of %s failed", entry_point.name)
76 raise exc
77
78
79 def initialize():
80 try:
81 _load_distros()
82 _load_configurators()
83 _load_instrumentors()
84 except Exception: # pylint: disable=broad-except
85 logger.exception("Failed to auto initialize opentelemetry")
86
87
88 if (
89 hasattr(sys, "argv")
90 and sys.argv[0].split(path.sep)[-1] == "celery"
91 and "worker" in sys.argv[1:]
92 ):
93 from celery.signals import worker_process_init # pylint:disable=E0401
94
95 @worker_process_init.connect(weak=False)
96 def init_celery(*args, **kwargs):
97 initialize()
98
99
100 else:
101 initialize()
102
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py
--- a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py
+++ b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py
@@ -15,6 +15,8 @@
import sys
from logging import getLogger
from os import environ, path
+from os.path import abspath, dirname, pathsep
+from re import sub
from pkg_resources import iter_entry_points
@@ -83,6 +85,12 @@
_load_instrumentors()
except Exception: # pylint: disable=broad-except
logger.exception("Failed to auto initialize opentelemetry")
+ finally:
+ environ["PYTHONPATH"] = sub(
+ r"{}{}?".format(dirname(abspath(__file__)), pathsep),
+ "",
+ environ["PYTHONPATH"],
+ )
if (
| {"golden_diff": "diff --git a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py\n--- a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py\n+++ b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py\n@@ -15,6 +15,8 @@\n import sys\n from logging import getLogger\n from os import environ, path\n+from os.path import abspath, dirname, pathsep\n+from re import sub\n \n from pkg_resources import iter_entry_points\n \n@@ -83,6 +85,12 @@\n _load_instrumentors()\n except Exception: # pylint: disable=broad-except\n logger.exception(\"Failed to auto initialize opentelemetry\")\n+ finally:\n+ environ[\"PYTHONPATH\"] = sub(\n+ r\"{}{}?\".format(dirname(abspath(__file__)), pathsep),\n+ \"\",\n+ environ[\"PYTHONPATH\"],\n+ )\n \n \n if (\n", "issue": "sitecustomize is being run multiple times\nWhen `opentelemetry-instrument` is run, the path of `opentelemetry-python`'s `sitecustomize.py` file is added to `PYTHONPATH`. This works fine unless the command executed by `opentelemetry-instrument` is also calling the `python` executable, which would make this `sitecustomize` be executed more than once. This is bad because this means multiple instrumentations may happen.\r\n\r\nI'll be modifying `sitecustomize` to remove its path from `PYTHONPATH` after it has been executed.\n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport sys\nfrom logging import getLogger\nfrom os import environ, path\n\nfrom pkg_resources import iter_entry_points\n\nfrom opentelemetry.environment_variables import (\n OTEL_PYTHON_DISABLED_INSTRUMENTATIONS,\n)\n\nlogger = getLogger(__file__)\n\n\ndef _load_distros():\n for entry_point in iter_entry_points(\"opentelemetry_distro\"):\n try:\n entry_point.load()().configure() # type: ignore\n logger.debug(\"Distribution %s configured\", entry_point.name)\n except Exception as exc: # pylint: disable=broad-except\n logger.exception(\n \"Distribution %s configuration failed\", entry_point.name\n )\n raise exc\n\n\ndef _load_instrumentors():\n package_to_exclude = environ.get(OTEL_PYTHON_DISABLED_INSTRUMENTATIONS, [])\n if isinstance(package_to_exclude, str):\n package_to_exclude = package_to_exclude.split(\",\")\n # to handle users entering \"requests , flask\" or \"requests, flask\" with spaces\n package_to_exclude = [x.strip() for x in package_to_exclude]\n\n for entry_point in iter_entry_points(\"opentelemetry_instrumentor\"):\n try:\n if entry_point.name in package_to_exclude:\n logger.debug(\n \"Instrumentation skipped for library %s\", entry_point.name\n )\n continue\n entry_point.load()().instrument() # type: ignore\n logger.debug(\"Instrumented %s\", entry_point.name)\n except Exception as exc: # pylint: disable=broad-except\n logger.exception(\"Instrumenting of %s failed\", entry_point.name)\n raise exc\n\n\ndef _load_configurators():\n configured = None\n for entry_point in iter_entry_points(\"opentelemetry_configurator\"):\n if configured is not None:\n logger.warning(\n \"Configuration of %s not loaded, %s already loaded\",\n entry_point.name,\n configured,\n )\n continue\n try:\n entry_point.load()().configure() # type: ignore\n configured = entry_point.name\n except Exception as exc: # pylint: disable=broad-except\n logger.exception(\"Configuration of %s failed\", entry_point.name)\n raise exc\n\n\ndef initialize():\n try:\n _load_distros()\n _load_configurators()\n _load_instrumentors()\n except Exception: # pylint: disable=broad-except\n logger.exception(\"Failed to auto initialize opentelemetry\")\n\n\nif (\n hasattr(sys, \"argv\")\n and sys.argv[0].split(path.sep)[-1] == \"celery\"\n and \"worker\" in sys.argv[1:]\n):\n from celery.signals import worker_process_init # pylint:disable=E0401\n\n @worker_process_init.connect(weak=False)\n def init_celery(*args, **kwargs):\n initialize()\n\n\nelse:\n initialize()\n", "path": "opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py"}], "after_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport sys\nfrom logging import getLogger\nfrom os import environ, path\nfrom os.path import abspath, dirname, pathsep\nfrom re import sub\n\nfrom pkg_resources import iter_entry_points\n\nfrom opentelemetry.environment_variables import (\n OTEL_PYTHON_DISABLED_INSTRUMENTATIONS,\n)\n\nlogger = getLogger(__file__)\n\n\ndef _load_distros():\n for entry_point in iter_entry_points(\"opentelemetry_distro\"):\n try:\n entry_point.load()().configure() # type: ignore\n logger.debug(\"Distribution %s configured\", entry_point.name)\n except Exception as exc: # pylint: disable=broad-except\n logger.exception(\n \"Distribution %s configuration failed\", entry_point.name\n )\n raise exc\n\n\ndef _load_instrumentors():\n package_to_exclude = environ.get(OTEL_PYTHON_DISABLED_INSTRUMENTATIONS, [])\n if isinstance(package_to_exclude, str):\n package_to_exclude = package_to_exclude.split(\",\")\n # to handle users entering \"requests , flask\" or \"requests, flask\" with spaces\n package_to_exclude = [x.strip() for x in package_to_exclude]\n\n for entry_point in iter_entry_points(\"opentelemetry_instrumentor\"):\n try:\n if entry_point.name in package_to_exclude:\n logger.debug(\n \"Instrumentation skipped for library %s\", entry_point.name\n )\n continue\n entry_point.load()().instrument() # type: ignore\n logger.debug(\"Instrumented %s\", entry_point.name)\n except Exception as exc: # pylint: disable=broad-except\n logger.exception(\"Instrumenting of %s failed\", entry_point.name)\n raise exc\n\n\ndef _load_configurators():\n configured = None\n for entry_point in iter_entry_points(\"opentelemetry_configurator\"):\n if configured is not None:\n logger.warning(\n \"Configuration of %s not loaded, %s already loaded\",\n entry_point.name,\n configured,\n )\n continue\n try:\n entry_point.load()().configure() # type: ignore\n configured = entry_point.name\n except Exception as exc: # pylint: disable=broad-except\n logger.exception(\"Configuration of %s failed\", entry_point.name)\n raise exc\n\n\ndef initialize():\n try:\n _load_distros()\n _load_configurators()\n _load_instrumentors()\n except Exception: # pylint: disable=broad-except\n logger.exception(\"Failed to auto initialize opentelemetry\")\n finally:\n environ[\"PYTHONPATH\"] = sub(\n r\"{}{}?\".format(dirname(abspath(__file__)), pathsep),\n \"\",\n environ[\"PYTHONPATH\"],\n )\n\n\nif (\n hasattr(sys, \"argv\")\n and sys.argv[0].split(path.sep)[-1] == \"celery\"\n and \"worker\" in sys.argv[1:]\n):\n from celery.signals import worker_process_init # pylint:disable=E0401\n\n @worker_process_init.connect(weak=False)\n def init_celery(*args, **kwargs):\n initialize()\n\n\nelse:\n initialize()\n", "path": "opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py"}]} | 1,329 | 241 |
gh_patches_debug_10092 | rasdani/github-patches | git_diff | beeware__toga-645 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ImageView example not working on Linux
## Expected Behavior
<!--- If you're describing a bug, tell us what you expect to happen. -->
ImageView demos display both a local image file and one from a web url
<!--- If you're requesting a new feature, tell us why you'd like this feature. -->
## Current Behavior
<!--- If you're describing a bug, what currently happens? -->
Displays the wrong path variable when image not found
Concatenates local application path and url when attempting to display web url
problem in rehint() function, missing attribute get_height
I tried to address the first two issues with #532, still need to work on the 3rd.
## Steps to reproduce
<!--- Provide a set of steps describing how to reproduce this bug. If you have a live example, provide the link below -->
1. run the application in examples/imageview
2.
3.
## Your Environment
<!--- Provide details on your current environment you found the bug in -->
* Python Version (list the specific version number)
* Operating System and Version (select from the following and list the specific version number; if your OS is not listed, list that as well)
- [ ] macOS - version:
- [ x] Linux - distro: - version: Ubuntu 18.04
- [ ] Windows - version:
- [ ] Other - name: - version:
* Toga Target (the type of app you are trying to generate)
- [ ] android
- [ ] cocoa
- [ ] django
- [x ] gtk
- [ ] iOS
- [ ] tvOS
- [ ] watchOS
- [ ] winforms
- [ ] win32
- [ ] Other (please specify)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/imageview/imageview/app.py`
Content:
```
1 import os
2 import toga
3 from toga.style.pack import *
4
5 class ImageViewApp(toga.App):
6 def startup(self):
7 self.main_window = toga.MainWindow(title=self.name)
8
9 box = toga.Box()
10 box.style.padding = 40
11 box.style.update(alignment=CENTER)
12 box.style.update(direction=COLUMN)
13
14 # image from local path
15 # load brutus.png from the package
16 # We set the style width/height parameters for this one
17 image_from_path = toga.Image('resources/brutus.png')
18 imageview_from_path = toga.ImageView(image_from_path)
19 imageview_from_path.style.update(height=72)
20 imageview_from_path.style.update(width=72)
21 box.add(imageview_from_path)
22
23 # image from remote URL
24 # no style parameters - we let Pack determine how to allocate
25 # the space
26 image_from_url = toga.Image('https://pybee.org/project/projects/libraries/toga/toga.png')
27 imageview_from_url = toga.ImageView(image_from_url)
28 box.add(imageview_from_url)
29
30 self.main_window.content = box
31 self.main_window.show()
32
33 def main():
34 return ImageViewApp('ImageView', 'org.pybee.widgets.imageview')
35
36
37 if __name__ == '__main__':
38 app = main()
39 app.main_loop()
40
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/examples/imageview/imageview/app.py b/examples/imageview/imageview/app.py
--- a/examples/imageview/imageview/app.py
+++ b/examples/imageview/imageview/app.py
@@ -14,7 +14,7 @@
# image from local path
# load brutus.png from the package
# We set the style width/height parameters for this one
- image_from_path = toga.Image('resources/brutus.png')
+ image_from_path = toga.Image('../resources/brutus.png')
imageview_from_path = toga.ImageView(image_from_path)
imageview_from_path.style.update(height=72)
imageview_from_path.style.update(width=72)
| {"golden_diff": "diff --git a/examples/imageview/imageview/app.py b/examples/imageview/imageview/app.py\n--- a/examples/imageview/imageview/app.py\n+++ b/examples/imageview/imageview/app.py\n@@ -14,7 +14,7 @@\n # image from local path\n # load brutus.png from the package\n # We set the style width/height parameters for this one\n- image_from_path = toga.Image('resources/brutus.png')\n+ image_from_path = toga.Image('../resources/brutus.png')\n imageview_from_path = toga.ImageView(image_from_path)\n imageview_from_path.style.update(height=72)\n imageview_from_path.style.update(width=72)\n", "issue": "ImageView example not working on Linux\n## Expected Behavior\r\n<!--- If you're describing a bug, tell us what you expect to happen. -->\r\nImageView demos display both a local image file and one from a web url\r\n<!--- If you're requesting a new feature, tell us why you'd like this feature. -->\r\n\r\n\r\n## Current Behavior\r\n<!--- If you're describing a bug, what currently happens? -->\r\nDisplays the wrong path variable when image not found\r\nConcatenates local application path and url when attempting to display web url\r\nproblem in rehint() function, missing attribute get_height\r\n\r\nI tried to address the first two issues with #532, still need to work on the 3rd.\r\n\r\n## Steps to reproduce\r\n<!--- Provide a set of steps describing how to reproduce this bug. If you have a live example, provide the link below -->\r\n1. run the application in examples/imageview\r\n\r\n2.\r\n\r\n3.\r\n\r\n\r\n## Your Environment\r\n<!--- Provide details on your current environment you found the bug in -->\r\n\r\n* Python Version (list the specific version number)\r\n\r\n* Operating System and Version (select from the following and list the specific version number; if your OS is not listed, list that as well)\r\n\r\n - [ ] macOS - version: \r\n - [ x] Linux - distro: - version: Ubuntu 18.04\r\n - [ ] Windows - version:\r\n - [ ] Other - name: - version:\r\n\r\n* Toga Target (the type of app you are trying to generate)\r\n \r\n - [ ] android\r\n - [ ] cocoa\r\n - [ ] django \r\n - [x ] gtk\r\n - [ ] iOS\r\n - [ ] tvOS\r\n - [ ] watchOS\r\n - [ ] winforms \r\n - [ ] win32\r\n - [ ] Other (please specify)\r\n\n", "before_files": [{"content": "import os\nimport toga\nfrom toga.style.pack import *\n\nclass ImageViewApp(toga.App):\n def startup(self):\n self.main_window = toga.MainWindow(title=self.name)\n \n box = toga.Box()\n box.style.padding = 40\n box.style.update(alignment=CENTER)\n box.style.update(direction=COLUMN)\n \n # image from local path\n # load brutus.png from the package\n # We set the style width/height parameters for this one\n image_from_path = toga.Image('resources/brutus.png')\n imageview_from_path = toga.ImageView(image_from_path)\n imageview_from_path.style.update(height=72)\n imageview_from_path.style.update(width=72)\n box.add(imageview_from_path)\n\n # image from remote URL\n # no style parameters - we let Pack determine how to allocate\n # the space\n image_from_url = toga.Image('https://pybee.org/project/projects/libraries/toga/toga.png')\n imageview_from_url = toga.ImageView(image_from_url)\n box.add(imageview_from_url)\n \n self.main_window.content = box\n self.main_window.show()\n\ndef main():\n return ImageViewApp('ImageView', 'org.pybee.widgets.imageview')\n\n\nif __name__ == '__main__':\n app = main()\n app.main_loop()\n", "path": "examples/imageview/imageview/app.py"}], "after_files": [{"content": "import os\nimport toga\nfrom toga.style.pack import *\n\nclass ImageViewApp(toga.App):\n def startup(self):\n self.main_window = toga.MainWindow(title=self.name)\n \n box = toga.Box()\n box.style.padding = 40\n box.style.update(alignment=CENTER)\n box.style.update(direction=COLUMN)\n \n # image from local path\n # load brutus.png from the package\n # We set the style width/height parameters for this one\n image_from_path = toga.Image('../resources/brutus.png')\n imageview_from_path = toga.ImageView(image_from_path)\n imageview_from_path.style.update(height=72)\n imageview_from_path.style.update(width=72)\n box.add(imageview_from_path)\n\n # image from remote URL\n # no style parameters - we let Pack determine how to allocate\n # the space\n image_from_url = toga.Image('https://pybee.org/project/projects/libraries/toga/toga.png')\n imageview_from_url = toga.ImageView(image_from_url)\n box.add(imageview_from_url)\n \n self.main_window.content = box\n self.main_window.show()\n\ndef main():\n return ImageViewApp('ImageView', 'org.pybee.widgets.imageview')\n\n\nif __name__ == '__main__':\n app = main()\n app.main_loop()\n", "path": "examples/imageview/imageview/app.py"}]} | 1,011 | 154 |
gh_patches_debug_39176 | rasdani/github-patches | git_diff | pulp__pulpcore-2768 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
As a plugin writer, I want to have a function for touching content units
Author: @lubosmj (lmjachky)
Redmine Issue: 9419, https://pulp.plan.io/issues/9419
---
In the PR https://github.com/pulp/pulpcore/pull/1624, we introduced a method that uses `bulk_touch` for updating timestamps of content units. We should expose this method to all plugin writers (e.g., pulp_container currently implements the same method - this creates unnecessary duplicates).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pulpcore/plugin/actions.py`
Content:
```
1 from gettext import gettext as _
2 from drf_spectacular.utils import extend_schema
3 from rest_framework.decorators import action
4 from rest_framework.serializers import ValidationError
5
6 from pulpcore.app import tasks
7 from pulpcore.app.models import Content, RepositoryVersion
8 from pulpcore.app.response import OperationPostponedResponse
9 from pulpcore.app.serializers import (
10 AsyncOperationResponseSerializer,
11 RepositoryAddRemoveContentSerializer,
12 )
13 from pulpcore.app.viewsets import NamedModelViewSet
14 from pulpcore.tasking.tasks import dispatch
15
16
17 __all__ = ["ModifyRepositoryActionMixin"]
18
19
20 class ModifyRepositoryActionMixin:
21 @extend_schema(
22 description="Trigger an asynchronous task to create a new repository version.",
23 summary="Modify Repository Content",
24 responses={202: AsyncOperationResponseSerializer},
25 )
26 @action(detail=True, methods=["post"], serializer_class=RepositoryAddRemoveContentSerializer)
27 def modify(self, request, pk):
28 """
29 Queues a task that creates a new RepositoryVersion by adding and removing content units
30 """
31 add_content_units = {}
32 remove_content_units = {}
33
34 repository = self.get_object()
35 serializer = self.get_serializer(data=request.data)
36 serializer.is_valid(raise_exception=True)
37
38 if "base_version" in request.data:
39 base_version_pk = self.get_resource(request.data["base_version"], RepositoryVersion).pk
40 else:
41 base_version_pk = None
42
43 if "add_content_units" in request.data:
44 for url in request.data["add_content_units"]:
45 add_content_units[NamedModelViewSet.extract_pk(url)] = url
46
47 content_units_pks = set(add_content_units.keys())
48 existing_content_units = Content.objects.filter(pk__in=content_units_pks)
49 existing_content_units.touch()
50
51 self.verify_content_units(existing_content_units, add_content_units)
52
53 add_content_units = list(add_content_units.keys())
54
55 if "remove_content_units" in request.data:
56 if "*" in request.data["remove_content_units"]:
57 remove_content_units = ["*"]
58 else:
59 for url in request.data["remove_content_units"]:
60 remove_content_units[NamedModelViewSet.extract_pk(url)] = url
61 content_units_pks = set(remove_content_units.keys())
62 existing_content_units = Content.objects.filter(pk__in=content_units_pks)
63 self.verify_content_units(existing_content_units, remove_content_units)
64 remove_content_units = list(remove_content_units.keys())
65
66 task = dispatch(
67 tasks.repository.add_and_remove,
68 exclusive_resources=[repository],
69 kwargs={
70 "repository_pk": pk,
71 "base_version_pk": base_version_pk,
72 "add_content_units": add_content_units,
73 "remove_content_units": remove_content_units,
74 },
75 )
76 return OperationPostponedResponse(task, request)
77
78 def verify_content_units(self, content_units, all_content_units):
79 """Verify referenced content units."""
80 existing_content_units_pks = content_units.values_list("pk", flat=True)
81 existing_content_units_pks = {str(pk) for pk in existing_content_units_pks}
82
83 missing_pks = set(all_content_units.keys()) - existing_content_units_pks
84 if missing_pks:
85 missing_hrefs = [all_content_units[pk] for pk in missing_pks]
86 raise ValidationError(
87 _("Could not find the following content units: {}").format(missing_hrefs)
88 )
89
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pulpcore/plugin/actions.py b/pulpcore/plugin/actions.py
--- a/pulpcore/plugin/actions.py
+++ b/pulpcore/plugin/actions.py
@@ -1,4 +1,5 @@
from gettext import gettext as _
+
from drf_spectacular.utils import extend_schema
from rest_framework.decorators import action
from rest_framework.serializers import ValidationError
@@ -48,7 +49,7 @@
existing_content_units = Content.objects.filter(pk__in=content_units_pks)
existing_content_units.touch()
- self.verify_content_units(existing_content_units, add_content_units)
+ raise_for_unknown_content_units(existing_content_units, add_content_units)
add_content_units = list(add_content_units.keys())
@@ -60,7 +61,7 @@
remove_content_units[NamedModelViewSet.extract_pk(url)] = url
content_units_pks = set(remove_content_units.keys())
existing_content_units = Content.objects.filter(pk__in=content_units_pks)
- self.verify_content_units(existing_content_units, remove_content_units)
+ raise_for_unknown_content_units(existing_content_units, remove_content_units)
remove_content_units = list(remove_content_units.keys())
task = dispatch(
@@ -75,14 +76,24 @@
)
return OperationPostponedResponse(task, request)
- def verify_content_units(self, content_units, all_content_units):
- """Verify referenced content units."""
- existing_content_units_pks = content_units.values_list("pk", flat=True)
- existing_content_units_pks = {str(pk) for pk in existing_content_units_pks}
-
- missing_pks = set(all_content_units.keys()) - existing_content_units_pks
- if missing_pks:
- missing_hrefs = [all_content_units[pk] for pk in missing_pks]
- raise ValidationError(
- _("Could not find the following content units: {}").format(missing_hrefs)
- )
+
+def raise_for_unknown_content_units(existing_content_units, content_units_pks_hrefs):
+ """Verify if all the specified content units were found in the database.
+
+ Args:
+ existing_content_units (pulpcore.plugin.models.Content): Content filtered by
+ specified_content_units.
+ content_units_pks_hrefs (dict): An original dictionary of pk-href pairs that
+ are used for the verification.
+ Raises:
+ ValidationError: If some of the referenced content units are not present in the database
+ """
+ existing_content_units_pks = existing_content_units.values_list("pk", flat=True)
+ existing_content_units_pks = set(map(str, existing_content_units_pks))
+
+ missing_pks = set(content_units_pks_hrefs.keys()) - existing_content_units_pks
+ if missing_pks:
+ missing_hrefs = [content_units_pks_hrefs[pk] for pk in missing_pks]
+ raise ValidationError(
+ _("Could not find the following content units: {}").format(missing_hrefs)
+ )
| {"golden_diff": "diff --git a/pulpcore/plugin/actions.py b/pulpcore/plugin/actions.py\n--- a/pulpcore/plugin/actions.py\n+++ b/pulpcore/plugin/actions.py\n@@ -1,4 +1,5 @@\n from gettext import gettext as _\n+\n from drf_spectacular.utils import extend_schema\n from rest_framework.decorators import action\n from rest_framework.serializers import ValidationError\n@@ -48,7 +49,7 @@\n existing_content_units = Content.objects.filter(pk__in=content_units_pks)\n existing_content_units.touch()\n \n- self.verify_content_units(existing_content_units, add_content_units)\n+ raise_for_unknown_content_units(existing_content_units, add_content_units)\n \n add_content_units = list(add_content_units.keys())\n \n@@ -60,7 +61,7 @@\n remove_content_units[NamedModelViewSet.extract_pk(url)] = url\n content_units_pks = set(remove_content_units.keys())\n existing_content_units = Content.objects.filter(pk__in=content_units_pks)\n- self.verify_content_units(existing_content_units, remove_content_units)\n+ raise_for_unknown_content_units(existing_content_units, remove_content_units)\n remove_content_units = list(remove_content_units.keys())\n \n task = dispatch(\n@@ -75,14 +76,24 @@\n )\n return OperationPostponedResponse(task, request)\n \n- def verify_content_units(self, content_units, all_content_units):\n- \"\"\"Verify referenced content units.\"\"\"\n- existing_content_units_pks = content_units.values_list(\"pk\", flat=True)\n- existing_content_units_pks = {str(pk) for pk in existing_content_units_pks}\n-\n- missing_pks = set(all_content_units.keys()) - existing_content_units_pks\n- if missing_pks:\n- missing_hrefs = [all_content_units[pk] for pk in missing_pks]\n- raise ValidationError(\n- _(\"Could not find the following content units: {}\").format(missing_hrefs)\n- )\n+\n+def raise_for_unknown_content_units(existing_content_units, content_units_pks_hrefs):\n+ \"\"\"Verify if all the specified content units were found in the database.\n+\n+ Args:\n+ existing_content_units (pulpcore.plugin.models.Content): Content filtered by\n+ specified_content_units.\n+ content_units_pks_hrefs (dict): An original dictionary of pk-href pairs that\n+ are used for the verification.\n+ Raises:\n+ ValidationError: If some of the referenced content units are not present in the database\n+ \"\"\"\n+ existing_content_units_pks = existing_content_units.values_list(\"pk\", flat=True)\n+ existing_content_units_pks = set(map(str, existing_content_units_pks))\n+\n+ missing_pks = set(content_units_pks_hrefs.keys()) - existing_content_units_pks\n+ if missing_pks:\n+ missing_hrefs = [content_units_pks_hrefs[pk] for pk in missing_pks]\n+ raise ValidationError(\n+ _(\"Could not find the following content units: {}\").format(missing_hrefs)\n+ )\n", "issue": "As a plugin writer, I want to have a function for touching content units\nAuthor: @lubosmj (lmjachky)\n\n\nRedmine Issue: 9419, https://pulp.plan.io/issues/9419\n\n---\n\nIn the PR https://github.com/pulp/pulpcore/pull/1624, we introduced a method that uses `bulk_touch` for updating timestamps of content units. We should expose this method to all plugin writers (e.g., pulp_container currently implements the same method - this creates unnecessary duplicates).\n\n\n\n", "before_files": [{"content": "from gettext import gettext as _\nfrom drf_spectacular.utils import extend_schema\nfrom rest_framework.decorators import action\nfrom rest_framework.serializers import ValidationError\n\nfrom pulpcore.app import tasks\nfrom pulpcore.app.models import Content, RepositoryVersion\nfrom pulpcore.app.response import OperationPostponedResponse\nfrom pulpcore.app.serializers import (\n AsyncOperationResponseSerializer,\n RepositoryAddRemoveContentSerializer,\n)\nfrom pulpcore.app.viewsets import NamedModelViewSet\nfrom pulpcore.tasking.tasks import dispatch\n\n\n__all__ = [\"ModifyRepositoryActionMixin\"]\n\n\nclass ModifyRepositoryActionMixin:\n @extend_schema(\n description=\"Trigger an asynchronous task to create a new repository version.\",\n summary=\"Modify Repository Content\",\n responses={202: AsyncOperationResponseSerializer},\n )\n @action(detail=True, methods=[\"post\"], serializer_class=RepositoryAddRemoveContentSerializer)\n def modify(self, request, pk):\n \"\"\"\n Queues a task that creates a new RepositoryVersion by adding and removing content units\n \"\"\"\n add_content_units = {}\n remove_content_units = {}\n\n repository = self.get_object()\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n\n if \"base_version\" in request.data:\n base_version_pk = self.get_resource(request.data[\"base_version\"], RepositoryVersion).pk\n else:\n base_version_pk = None\n\n if \"add_content_units\" in request.data:\n for url in request.data[\"add_content_units\"]:\n add_content_units[NamedModelViewSet.extract_pk(url)] = url\n\n content_units_pks = set(add_content_units.keys())\n existing_content_units = Content.objects.filter(pk__in=content_units_pks)\n existing_content_units.touch()\n\n self.verify_content_units(existing_content_units, add_content_units)\n\n add_content_units = list(add_content_units.keys())\n\n if \"remove_content_units\" in request.data:\n if \"*\" in request.data[\"remove_content_units\"]:\n remove_content_units = [\"*\"]\n else:\n for url in request.data[\"remove_content_units\"]:\n remove_content_units[NamedModelViewSet.extract_pk(url)] = url\n content_units_pks = set(remove_content_units.keys())\n existing_content_units = Content.objects.filter(pk__in=content_units_pks)\n self.verify_content_units(existing_content_units, remove_content_units)\n remove_content_units = list(remove_content_units.keys())\n\n task = dispatch(\n tasks.repository.add_and_remove,\n exclusive_resources=[repository],\n kwargs={\n \"repository_pk\": pk,\n \"base_version_pk\": base_version_pk,\n \"add_content_units\": add_content_units,\n \"remove_content_units\": remove_content_units,\n },\n )\n return OperationPostponedResponse(task, request)\n\n def verify_content_units(self, content_units, all_content_units):\n \"\"\"Verify referenced content units.\"\"\"\n existing_content_units_pks = content_units.values_list(\"pk\", flat=True)\n existing_content_units_pks = {str(pk) for pk in existing_content_units_pks}\n\n missing_pks = set(all_content_units.keys()) - existing_content_units_pks\n if missing_pks:\n missing_hrefs = [all_content_units[pk] for pk in missing_pks]\n raise ValidationError(\n _(\"Could not find the following content units: {}\").format(missing_hrefs)\n )\n", "path": "pulpcore/plugin/actions.py"}], "after_files": [{"content": "from gettext import gettext as _\n\nfrom drf_spectacular.utils import extend_schema\nfrom rest_framework.decorators import action\nfrom rest_framework.serializers import ValidationError\n\nfrom pulpcore.app import tasks\nfrom pulpcore.app.models import Content, RepositoryVersion\nfrom pulpcore.app.response import OperationPostponedResponse\nfrom pulpcore.app.serializers import (\n AsyncOperationResponseSerializer,\n RepositoryAddRemoveContentSerializer,\n)\nfrom pulpcore.app.viewsets import NamedModelViewSet\nfrom pulpcore.tasking.tasks import dispatch\n\n\n__all__ = [\"ModifyRepositoryActionMixin\"]\n\n\nclass ModifyRepositoryActionMixin:\n @extend_schema(\n description=\"Trigger an asynchronous task to create a new repository version.\",\n summary=\"Modify Repository Content\",\n responses={202: AsyncOperationResponseSerializer},\n )\n @action(detail=True, methods=[\"post\"], serializer_class=RepositoryAddRemoveContentSerializer)\n def modify(self, request, pk):\n \"\"\"\n Queues a task that creates a new RepositoryVersion by adding and removing content units\n \"\"\"\n add_content_units = {}\n remove_content_units = {}\n\n repository = self.get_object()\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n\n if \"base_version\" in request.data:\n base_version_pk = self.get_resource(request.data[\"base_version\"], RepositoryVersion).pk\n else:\n base_version_pk = None\n\n if \"add_content_units\" in request.data:\n for url in request.data[\"add_content_units\"]:\n add_content_units[NamedModelViewSet.extract_pk(url)] = url\n\n content_units_pks = set(add_content_units.keys())\n existing_content_units = Content.objects.filter(pk__in=content_units_pks)\n existing_content_units.touch()\n\n raise_for_unknown_content_units(existing_content_units, add_content_units)\n\n add_content_units = list(add_content_units.keys())\n\n if \"remove_content_units\" in request.data:\n if \"*\" in request.data[\"remove_content_units\"]:\n remove_content_units = [\"*\"]\n else:\n for url in request.data[\"remove_content_units\"]:\n remove_content_units[NamedModelViewSet.extract_pk(url)] = url\n content_units_pks = set(remove_content_units.keys())\n existing_content_units = Content.objects.filter(pk__in=content_units_pks)\n raise_for_unknown_content_units(existing_content_units, remove_content_units)\n remove_content_units = list(remove_content_units.keys())\n\n task = dispatch(\n tasks.repository.add_and_remove,\n exclusive_resources=[repository],\n kwargs={\n \"repository_pk\": pk,\n \"base_version_pk\": base_version_pk,\n \"add_content_units\": add_content_units,\n \"remove_content_units\": remove_content_units,\n },\n )\n return OperationPostponedResponse(task, request)\n\n\ndef raise_for_unknown_content_units(existing_content_units, content_units_pks_hrefs):\n \"\"\"Verify if all the specified content units were found in the database.\n\n Args:\n existing_content_units (pulpcore.plugin.models.Content): Content filtered by\n specified_content_units.\n content_units_pks_hrefs (dict): An original dictionary of pk-href pairs that\n are used for the verification.\n Raises:\n ValidationError: If some of the referenced content units are not present in the database\n \"\"\"\n existing_content_units_pks = existing_content_units.values_list(\"pk\", flat=True)\n existing_content_units_pks = set(map(str, existing_content_units_pks))\n\n missing_pks = set(content_units_pks_hrefs.keys()) - existing_content_units_pks\n if missing_pks:\n missing_hrefs = [content_units_pks_hrefs[pk] for pk in missing_pks]\n raise ValidationError(\n _(\"Could not find the following content units: {}\").format(missing_hrefs)\n )\n", "path": "pulpcore/plugin/actions.py"}]} | 1,254 | 657 |
gh_patches_debug_20885 | rasdani/github-patches | git_diff | nilearn__nilearn-394 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Ugly side plots in doc
They mask part of the code they are supposed to illustrate. See an example [here](http://nilearn.github.io/building_blocks/data_preparation.html#computing-the-mask).

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/connectivity/plot_adhd_covariance.py`
Content:
```
1 """
2 Computation of covariance matrix between brain regions
3 ======================================================
4
5 This example shows how to extract signals from regions defined by an atlas,
6 and to estimate a covariance matrix based on these signals.
7 """
8
9 plotted_subject = 0 # subject to plot
10
11
12 import matplotlib.pyplot as plt
13 import matplotlib
14 # Copied from matplotlib 1.2.0 for matplotlib 0.99 compatibility.
15 _bwr_data = ((0.0, 0.0, 1.0), (1.0, 1.0, 1.0), (1.0, 0.0, 0.0))
16 plt.cm.register_cmap(cmap=matplotlib.colors.LinearSegmentedColormap.from_list(
17 "bwr", _bwr_data))
18
19
20 def plot_matrices(cov, prec, title):
21 """Plot covariance and precision matrices, for a given processing. """
22
23 prec = prec.copy() # avoid side effects
24
25 # Display sparsity pattern
26 sparsity = prec == 0
27 plt.figure()
28 plt.imshow(sparsity, interpolation="nearest")
29 plt.title("%s / sparsity" % title)
30
31 # Put zeros on the diagonal, for graph clarity.
32 size = prec.shape[0]
33 prec[range(size), range(size)] = 0
34 span = max(abs(prec.min()), abs(prec.max()))
35
36 # Display covariance matrix
37 plt.figure()
38 plt.imshow(cov, interpolation="nearest",
39 vmin=-1, vmax=1, cmap=plt.cm.get_cmap("bwr"))
40 plt.colorbar()
41 plt.title("%s / covariance" % title)
42
43 # Display precision matrix
44 plt.figure()
45 plt.imshow(prec, interpolation="nearest",
46 vmin=-span, vmax=span,
47 cmap=plt.cm.get_cmap("bwr"))
48 plt.colorbar()
49 plt.title("%s / precision" % title)
50
51
52 # Fetching datasets ###########################################################
53 print("-- Fetching datasets ...")
54 from nilearn import datasets
55 msdl_atlas_dataset = datasets.fetch_msdl_atlas()
56 adhd_dataset = datasets.fetch_adhd()
57
58 # Extracting region signals ###################################################
59 import nilearn.image
60 import nilearn.input_data
61
62 from sklearn.externals.joblib import Memory
63 mem = Memory(".")
64
65 # Number of subjects to consider for group-sparse covariance
66 n_subjects = 10
67 subjects = []
68
69 func_filenames = adhd_dataset.func
70 confound_filenames = adhd_dataset.confounds
71 for func_filename, confound_filename in zip(func_filenames,
72 confound_filenames):
73 print("Processing file %s" % func_filename)
74
75 print("-- Computing confounds ...")
76 hv_confounds = mem.cache(nilearn.image.high_variance_confounds)(
77 func_filename)
78
79 print("-- Computing region signals ...")
80 masker = nilearn.input_data.NiftiMapsMasker(
81 msdl_atlas_dataset.maps, resampling_target="maps", detrend=True,
82 low_pass=None, high_pass=0.01, t_r=2.5, standardize=True,
83 memory=mem, memory_level=1, verbose=1)
84 region_ts = masker.fit_transform(func_filename,
85 confounds=[hv_confounds,
86 confound_filename])
87 subjects.append(region_ts)
88
89 # Computing group-sparse precision matrices ###################################
90 print("-- Computing group-sparse precision matrices ...")
91 from nilearn.group_sparse_covariance import GroupSparseCovarianceCV
92 gsc = GroupSparseCovarianceCV(verbose=2, n_jobs=3)
93 gsc.fit(subjects)
94
95 print("-- Computing graph-lasso precision matrices ...")
96 from sklearn import covariance
97 gl = covariance.GraphLassoCV(n_jobs=3)
98 gl.fit(subjects[plotted_subject])
99
100 # Displaying results ##########################################################
101 print("-- Displaying results")
102 title = "{0:d} GroupSparseCovariance $\\alpha={1:.2e}$".format(plotted_subject,
103 gsc.alpha_)
104 plot_matrices(gsc.covariances_[..., plotted_subject],
105 gsc.precisions_[..., plotted_subject], title)
106
107 title = "{0:d} GraphLasso $\\alpha={1:.2e}$".format(plotted_subject,
108 gl.alpha_)
109 plot_matrices(gl.covariance_, gl.precision_, title)
110
111 plt.show()
112
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/examples/connectivity/plot_adhd_covariance.py b/examples/connectivity/plot_adhd_covariance.py
--- a/examples/connectivity/plot_adhd_covariance.py
+++ b/examples/connectivity/plot_adhd_covariance.py
@@ -20,13 +20,11 @@
def plot_matrices(cov, prec, title):
"""Plot covariance and precision matrices, for a given processing. """
+ # Compute sparsity pattern
+ sparsity = (prec == 0)
+
prec = prec.copy() # avoid side effects
- # Display sparsity pattern
- sparsity = prec == 0
- plt.figure()
- plt.imshow(sparsity, interpolation="nearest")
- plt.title("%s / sparsity" % title)
# Put zeros on the diagonal, for graph clarity.
size = prec.shape[0]
@@ -39,6 +37,11 @@
vmin=-1, vmax=1, cmap=plt.cm.get_cmap("bwr"))
plt.colorbar()
plt.title("%s / covariance" % title)
+
+ # Display sparsity pattern
+ plt.figure()
+ plt.imshow(sparsity, interpolation="nearest")
+ plt.title("%s / sparsity" % title)
# Display precision matrix
plt.figure()
| {"golden_diff": "diff --git a/examples/connectivity/plot_adhd_covariance.py b/examples/connectivity/plot_adhd_covariance.py\n--- a/examples/connectivity/plot_adhd_covariance.py\n+++ b/examples/connectivity/plot_adhd_covariance.py\n@@ -20,13 +20,11 @@\n def plot_matrices(cov, prec, title):\n \"\"\"Plot covariance and precision matrices, for a given processing. \"\"\"\n \n+ # Compute sparsity pattern\n+ sparsity = (prec == 0)\n+ \n prec = prec.copy() # avoid side effects\n \n- # Display sparsity pattern\n- sparsity = prec == 0\n- plt.figure()\n- plt.imshow(sparsity, interpolation=\"nearest\")\n- plt.title(\"%s / sparsity\" % title)\n \n # Put zeros on the diagonal, for graph clarity.\n size = prec.shape[0]\n@@ -39,6 +37,11 @@\n vmin=-1, vmax=1, cmap=plt.cm.get_cmap(\"bwr\"))\n plt.colorbar()\n plt.title(\"%s / covariance\" % title)\n+ \n+ # Display sparsity pattern\n+ plt.figure()\n+ plt.imshow(sparsity, interpolation=\"nearest\")\n+ plt.title(\"%s / sparsity\" % title)\n \n # Display precision matrix\n plt.figure()\n", "issue": "Ugly side plots in doc\nThey mask part of the code they are supposed to illustrate. See an example [here](http://nilearn.github.io/building_blocks/data_preparation.html#computing-the-mask).\n\n\n\n", "before_files": [{"content": "\"\"\"\nComputation of covariance matrix between brain regions\n======================================================\n\nThis example shows how to extract signals from regions defined by an atlas,\nand to estimate a covariance matrix based on these signals.\n\"\"\"\n \nplotted_subject = 0 # subject to plot\n\n\nimport matplotlib.pyplot as plt\nimport matplotlib\n# Copied from matplotlib 1.2.0 for matplotlib 0.99 compatibility.\n_bwr_data = ((0.0, 0.0, 1.0), (1.0, 1.0, 1.0), (1.0, 0.0, 0.0))\nplt.cm.register_cmap(cmap=matplotlib.colors.LinearSegmentedColormap.from_list(\n \"bwr\", _bwr_data))\n\n\ndef plot_matrices(cov, prec, title):\n \"\"\"Plot covariance and precision matrices, for a given processing. \"\"\"\n\n prec = prec.copy() # avoid side effects\n\n # Display sparsity pattern\n sparsity = prec == 0\n plt.figure()\n plt.imshow(sparsity, interpolation=\"nearest\")\n plt.title(\"%s / sparsity\" % title)\n\n # Put zeros on the diagonal, for graph clarity.\n size = prec.shape[0]\n prec[range(size), range(size)] = 0\n span = max(abs(prec.min()), abs(prec.max()))\n\n # Display covariance matrix\n plt.figure()\n plt.imshow(cov, interpolation=\"nearest\",\n vmin=-1, vmax=1, cmap=plt.cm.get_cmap(\"bwr\"))\n plt.colorbar()\n plt.title(\"%s / covariance\" % title)\n\n # Display precision matrix\n plt.figure()\n plt.imshow(prec, interpolation=\"nearest\",\n vmin=-span, vmax=span,\n cmap=plt.cm.get_cmap(\"bwr\"))\n plt.colorbar()\n plt.title(\"%s / precision\" % title)\n\n\n# Fetching datasets ###########################################################\nprint(\"-- Fetching datasets ...\")\nfrom nilearn import datasets\nmsdl_atlas_dataset = datasets.fetch_msdl_atlas()\nadhd_dataset = datasets.fetch_adhd()\n\n# Extracting region signals ###################################################\nimport nilearn.image\nimport nilearn.input_data\n\nfrom sklearn.externals.joblib import Memory\nmem = Memory(\".\")\n\n# Number of subjects to consider for group-sparse covariance\nn_subjects = 10\nsubjects = []\n\nfunc_filenames = adhd_dataset.func\nconfound_filenames = adhd_dataset.confounds\nfor func_filename, confound_filename in zip(func_filenames,\n confound_filenames):\n print(\"Processing file %s\" % func_filename)\n\n print(\"-- Computing confounds ...\")\n hv_confounds = mem.cache(nilearn.image.high_variance_confounds)(\n func_filename)\n\n print(\"-- Computing region signals ...\")\n masker = nilearn.input_data.NiftiMapsMasker(\n msdl_atlas_dataset.maps, resampling_target=\"maps\", detrend=True,\n low_pass=None, high_pass=0.01, t_r=2.5, standardize=True,\n memory=mem, memory_level=1, verbose=1)\n region_ts = masker.fit_transform(func_filename,\n confounds=[hv_confounds,\n confound_filename])\n subjects.append(region_ts)\n\n# Computing group-sparse precision matrices ###################################\nprint(\"-- Computing group-sparse precision matrices ...\")\nfrom nilearn.group_sparse_covariance import GroupSparseCovarianceCV\ngsc = GroupSparseCovarianceCV(verbose=2, n_jobs=3)\ngsc.fit(subjects)\n\nprint(\"-- Computing graph-lasso precision matrices ...\")\nfrom sklearn import covariance\ngl = covariance.GraphLassoCV(n_jobs=3)\ngl.fit(subjects[plotted_subject])\n\n# Displaying results ##########################################################\nprint(\"-- Displaying results\")\ntitle = \"{0:d} GroupSparseCovariance $\\\\alpha={1:.2e}$\".format(plotted_subject,\n gsc.alpha_)\nplot_matrices(gsc.covariances_[..., plotted_subject],\n gsc.precisions_[..., plotted_subject], title)\n\ntitle = \"{0:d} GraphLasso $\\\\alpha={1:.2e}$\".format(plotted_subject,\n gl.alpha_)\nplot_matrices(gl.covariance_, gl.precision_, title)\n\nplt.show()\n", "path": "examples/connectivity/plot_adhd_covariance.py"}], "after_files": [{"content": "\"\"\"\nComputation of covariance matrix between brain regions\n======================================================\n\nThis example shows how to extract signals from regions defined by an atlas,\nand to estimate a covariance matrix based on these signals.\n\"\"\"\n \nplotted_subject = 0 # subject to plot\n\n\nimport matplotlib.pyplot as plt\nimport matplotlib\n# Copied from matplotlib 1.2.0 for matplotlib 0.99 compatibility.\n_bwr_data = ((0.0, 0.0, 1.0), (1.0, 1.0, 1.0), (1.0, 0.0, 0.0))\nplt.cm.register_cmap(cmap=matplotlib.colors.LinearSegmentedColormap.from_list(\n \"bwr\", _bwr_data))\n\n\ndef plot_matrices(cov, prec, title):\n \"\"\"Plot covariance and precision matrices, for a given processing. \"\"\"\n\n # Compute sparsity pattern\n sparsity = (prec == 0)\n \n prec = prec.copy() # avoid side effects\n\n\n # Put zeros on the diagonal, for graph clarity.\n size = prec.shape[0]\n prec[range(size), range(size)] = 0\n span = max(abs(prec.min()), abs(prec.max()))\n\n # Display covariance matrix\n plt.figure()\n plt.imshow(cov, interpolation=\"nearest\",\n vmin=-1, vmax=1, cmap=plt.cm.get_cmap(\"bwr\"))\n plt.colorbar()\n plt.title(\"%s / covariance\" % title)\n \n # Display sparsity pattern\n plt.figure()\n plt.imshow(sparsity, interpolation=\"nearest\")\n plt.title(\"%s / sparsity\" % title)\n\n # Display precision matrix\n plt.figure()\n plt.imshow(prec, interpolation=\"nearest\",\n vmin=-span, vmax=span,\n cmap=plt.cm.get_cmap(\"bwr\"))\n plt.colorbar()\n plt.title(\"%s / precision\" % title)\n\n\n# Fetching datasets ###########################################################\nprint(\"-- Fetching datasets ...\")\nfrom nilearn import datasets\nmsdl_atlas_dataset = datasets.fetch_msdl_atlas()\nadhd_dataset = datasets.fetch_adhd()\n\n# Extracting region signals ###################################################\nimport nilearn.image\nimport nilearn.input_data\n\nfrom sklearn.externals.joblib import Memory\nmem = Memory(\".\")\n\n# Number of subjects to consider for group-sparse covariance\nn_subjects = 10\nsubjects = []\n\nfunc_filenames = adhd_dataset.func\nconfound_filenames = adhd_dataset.confounds\nfor func_filename, confound_filename in zip(func_filenames,\n confound_filenames):\n print(\"Processing file %s\" % func_filename)\n\n print(\"-- Computing confounds ...\")\n hv_confounds = mem.cache(nilearn.image.high_variance_confounds)(\n func_filename)\n\n print(\"-- Computing region signals ...\")\n masker = nilearn.input_data.NiftiMapsMasker(\n msdl_atlas_dataset.maps, resampling_target=\"maps\", detrend=True,\n low_pass=None, high_pass=0.01, t_r=2.5, standardize=True,\n memory=mem, memory_level=1, verbose=1)\n region_ts = masker.fit_transform(func_filename,\n confounds=[hv_confounds,\n confound_filename])\n subjects.append(region_ts)\n\n# Computing group-sparse precision matrices ###################################\nprint(\"-- Computing group-sparse precision matrices ...\")\nfrom nilearn.group_sparse_covariance import GroupSparseCovarianceCV\ngsc = GroupSparseCovarianceCV(verbose=2, n_jobs=3)\ngsc.fit(subjects)\n\nprint(\"-- Computing graph-lasso precision matrices ...\")\nfrom sklearn import covariance\ngl = covariance.GraphLassoCV(n_jobs=3)\ngl.fit(subjects[plotted_subject])\n\n# Displaying results ##########################################################\nprint(\"-- Displaying results\")\ntitle = \"{0:d} GroupSparseCovariance $\\\\alpha={1:.2e}$\".format(plotted_subject,\n gsc.alpha_)\nplot_matrices(gsc.covariances_[..., plotted_subject],\n gsc.precisions_[..., plotted_subject], title)\n\ntitle = \"{0:d} GraphLasso $\\\\alpha={1:.2e}$\".format(plotted_subject,\n gl.alpha_)\nplot_matrices(gl.covariance_, gl.precision_, title)\n\nplt.show()\n", "path": "examples/connectivity/plot_adhd_covariance.py"}]} | 1,506 | 289 |
gh_patches_debug_33939 | rasdani/github-patches | git_diff | aws-cloudformation__cfn-lint-1978 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
E2504 incorrectly rejects "Iops" property for io2/gp3 volumes
*cfn-lint version: (`cfn-lint --version`)*
cfn-lint 0.44.6
*Description of issue.*
cfn-lint produces an error "E2504: Iops shouldn't be defined for type io2 for Resource ... LaunchConfiguration/Properties/BlockDeviceMappings/0/Ebs/Iops" when setting Iops on a io2 EBS volume.
The Iops property is required for io2 and optional for gp3. [1]
Cfn-lint treats the Iops property as required for io1 and forbidden for all other volume types, which is very much not correct
[1] https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-blockdev-template.html#cfn-ec2-blockdev-template-iops
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/cfnlint/rules/resources/ectwo/Ebs.py`
Content:
```
1 """
2 Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 SPDX-License-Identifier: MIT-0
4 """
5 import re
6 import six
7 from cfnlint.rules import CloudFormationLintRule
8 from cfnlint.rules import RuleMatch
9
10
11 class Ebs(CloudFormationLintRule):
12 """Check if Ec2 Ebs Resource Properties"""
13 id = 'E2504'
14 shortdesc = 'Check Ec2 Ebs Properties'
15 description = 'See if Ec2 Eb2 Properties are valid'
16 source_url = 'https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-blockdev-template.html'
17 tags = ['properties', 'ec2', 'ebs']
18
19 def _checkEbs(self, cfn, ebs, path):
20 matches = []
21
22 if isinstance(ebs, dict):
23 volume_types_obj = cfn.get_values(ebs, 'VolumeType')
24 iops_obj = cfn.get_values(ebs, 'Iops')
25 if volume_types_obj is not None:
26 for volume_type_obj in volume_types_obj:
27 volume_type = volume_type_obj.get('Value')
28 if isinstance(volume_type, six.string_types):
29 if volume_type == 'io1':
30 if iops_obj is None:
31 pathmessage = path[:] + ['VolumeType']
32 message = 'VolumeType io1 requires Iops to be specified for {0}'
33 matches.append(
34 RuleMatch(pathmessage, message.format('/'.join(map(str, pathmessage)))))
35 elif volume_type:
36 if iops_obj is not None:
37 pathmessage = path[:] + ['Iops']
38 message = 'Iops shouldn\'t be defined for type {0} for {1}'
39 matches.append(
40 RuleMatch(
41 pathmessage,
42 message.format(volume_type, '/'.join(map(str, pathmessage)))))
43
44 return matches
45
46 def match(self, cfn):
47 """Check Ec2 Ebs Resource Parameters"""
48
49 matches = []
50
51 results = cfn.get_resource_properties(['AWS::EC2::Instance', 'BlockDeviceMappings'])
52 results.extend(cfn.get_resource_properties(
53 ['AWS::AutoScaling::LaunchConfiguration', 'BlockDeviceMappings']))
54 for result in results:
55 path = result['Path']
56 if isinstance(result['Value'], list):
57 for index, properties in enumerate(result['Value']):
58 virtual_name = properties.get('VirtualName')
59 ebs = properties.get('Ebs')
60 if virtual_name:
61 # switch to regex
62 if not re.match(r'^ephemeral([0-9]|[1][0-9]|[2][0-3])$', virtual_name):
63 pathmessage = path[:] + [index, 'VirtualName']
64 message = 'Property VirtualName should be of type ephemeral(n) for {0}'
65 matches.append(
66 RuleMatch(pathmessage, message.format('/'.join(map(str, pathmessage)))))
67 elif ebs:
68 matches.extend(self._checkEbs(cfn, ebs, path[:] + [index, 'Ebs']))
69 return matches
70
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/cfnlint/rules/resources/ectwo/Ebs.py b/src/cfnlint/rules/resources/ectwo/Ebs.py
--- a/src/cfnlint/rules/resources/ectwo/Ebs.py
+++ b/src/cfnlint/rules/resources/ectwo/Ebs.py
@@ -9,10 +9,10 @@
class Ebs(CloudFormationLintRule):
- """Check if Ec2 Ebs Resource Properties"""
+ """Check Ec2 Ebs Resource Properties"""
id = 'E2504'
shortdesc = 'Check Ec2 Ebs Properties'
- description = 'See if Ec2 Eb2 Properties are valid'
+ description = 'See if Ec2 Ebs Properties are valid'
source_url = 'https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-blockdev-template.html'
tags = ['properties', 'ec2', 'ebs']
@@ -26,13 +26,15 @@
for volume_type_obj in volume_types_obj:
volume_type = volume_type_obj.get('Value')
if isinstance(volume_type, six.string_types):
- if volume_type == 'io1':
+ if volume_type in ('io1', 'io2'):
if iops_obj is None:
pathmessage = path[:] + ['VolumeType']
- message = 'VolumeType io1 requires Iops to be specified for {0}'
+ message = 'VolumeType {0} requires Iops to be specified for {1}'
matches.append(
- RuleMatch(pathmessage, message.format('/'.join(map(str, pathmessage)))))
- elif volume_type:
+ RuleMatch(
+ pathmessage,
+ message.format(volume_type, '/'.join(map(str, pathmessage)))))
+ elif volume_type in ('gp2', 'st1', 'sc1', 'standard'):
if iops_obj is not None:
pathmessage = path[:] + ['Iops']
message = 'Iops shouldn\'t be defined for type {0} for {1}'
| {"golden_diff": "diff --git a/src/cfnlint/rules/resources/ectwo/Ebs.py b/src/cfnlint/rules/resources/ectwo/Ebs.py\n--- a/src/cfnlint/rules/resources/ectwo/Ebs.py\n+++ b/src/cfnlint/rules/resources/ectwo/Ebs.py\n@@ -9,10 +9,10 @@\n \n \n class Ebs(CloudFormationLintRule):\n- \"\"\"Check if Ec2 Ebs Resource Properties\"\"\"\n+ \"\"\"Check Ec2 Ebs Resource Properties\"\"\"\n id = 'E2504'\n shortdesc = 'Check Ec2 Ebs Properties'\n- description = 'See if Ec2 Eb2 Properties are valid'\n+ description = 'See if Ec2 Ebs Properties are valid'\n source_url = 'https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-blockdev-template.html'\n tags = ['properties', 'ec2', 'ebs']\n \n@@ -26,13 +26,15 @@\n for volume_type_obj in volume_types_obj:\n volume_type = volume_type_obj.get('Value')\n if isinstance(volume_type, six.string_types):\n- if volume_type == 'io1':\n+ if volume_type in ('io1', 'io2'):\n if iops_obj is None:\n pathmessage = path[:] + ['VolumeType']\n- message = 'VolumeType io1 requires Iops to be specified for {0}'\n+ message = 'VolumeType {0} requires Iops to be specified for {1}'\n matches.append(\n- RuleMatch(pathmessage, message.format('/'.join(map(str, pathmessage)))))\n- elif volume_type:\n+ RuleMatch(\n+ pathmessage,\n+ message.format(volume_type, '/'.join(map(str, pathmessage)))))\n+ elif volume_type in ('gp2', 'st1', 'sc1', 'standard'):\n if iops_obj is not None:\n pathmessage = path[:] + ['Iops']\n message = 'Iops shouldn\\'t be defined for type {0} for {1}'\n", "issue": "E2504 incorrectly rejects \"Iops\" property for io2/gp3 volumes\n*cfn-lint version: (`cfn-lint --version`)*\r\n\r\ncfn-lint 0.44.6\r\n\r\n*Description of issue.*\r\n\r\ncfn-lint produces an error \"E2504: Iops shouldn't be defined for type io2 for Resource ... LaunchConfiguration/Properties/BlockDeviceMappings/0/Ebs/Iops\" when setting Iops on a io2 EBS volume.\r\n\r\nThe Iops property is required for io2 and optional for gp3. [1]\r\n\r\nCfn-lint treats the Iops property as required for io1 and forbidden for all other volume types, which is very much not correct \r\n\r\n[1] https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-blockdev-template.html#cfn-ec2-blockdev-template-iops\r\n\r\n\n", "before_files": [{"content": "\"\"\"\nCopyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\nSPDX-License-Identifier: MIT-0\n\"\"\"\nimport re\nimport six\nfrom cfnlint.rules import CloudFormationLintRule\nfrom cfnlint.rules import RuleMatch\n\n\nclass Ebs(CloudFormationLintRule):\n \"\"\"Check if Ec2 Ebs Resource Properties\"\"\"\n id = 'E2504'\n shortdesc = 'Check Ec2 Ebs Properties'\n description = 'See if Ec2 Eb2 Properties are valid'\n source_url = 'https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-blockdev-template.html'\n tags = ['properties', 'ec2', 'ebs']\n\n def _checkEbs(self, cfn, ebs, path):\n matches = []\n\n if isinstance(ebs, dict):\n volume_types_obj = cfn.get_values(ebs, 'VolumeType')\n iops_obj = cfn.get_values(ebs, 'Iops')\n if volume_types_obj is not None:\n for volume_type_obj in volume_types_obj:\n volume_type = volume_type_obj.get('Value')\n if isinstance(volume_type, six.string_types):\n if volume_type == 'io1':\n if iops_obj is None:\n pathmessage = path[:] + ['VolumeType']\n message = 'VolumeType io1 requires Iops to be specified for {0}'\n matches.append(\n RuleMatch(pathmessage, message.format('/'.join(map(str, pathmessage)))))\n elif volume_type:\n if iops_obj is not None:\n pathmessage = path[:] + ['Iops']\n message = 'Iops shouldn\\'t be defined for type {0} for {1}'\n matches.append(\n RuleMatch(\n pathmessage,\n message.format(volume_type, '/'.join(map(str, pathmessage)))))\n\n return matches\n\n def match(self, cfn):\n \"\"\"Check Ec2 Ebs Resource Parameters\"\"\"\n\n matches = []\n\n results = cfn.get_resource_properties(['AWS::EC2::Instance', 'BlockDeviceMappings'])\n results.extend(cfn.get_resource_properties(\n ['AWS::AutoScaling::LaunchConfiguration', 'BlockDeviceMappings']))\n for result in results:\n path = result['Path']\n if isinstance(result['Value'], list):\n for index, properties in enumerate(result['Value']):\n virtual_name = properties.get('VirtualName')\n ebs = properties.get('Ebs')\n if virtual_name:\n # switch to regex\n if not re.match(r'^ephemeral([0-9]|[1][0-9]|[2][0-3])$', virtual_name):\n pathmessage = path[:] + [index, 'VirtualName']\n message = 'Property VirtualName should be of type ephemeral(n) for {0}'\n matches.append(\n RuleMatch(pathmessage, message.format('/'.join(map(str, pathmessage)))))\n elif ebs:\n matches.extend(self._checkEbs(cfn, ebs, path[:] + [index, 'Ebs']))\n return matches\n", "path": "src/cfnlint/rules/resources/ectwo/Ebs.py"}], "after_files": [{"content": "\"\"\"\nCopyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\nSPDX-License-Identifier: MIT-0\n\"\"\"\nimport re\nimport six\nfrom cfnlint.rules import CloudFormationLintRule\nfrom cfnlint.rules import RuleMatch\n\n\nclass Ebs(CloudFormationLintRule):\n \"\"\"Check Ec2 Ebs Resource Properties\"\"\"\n id = 'E2504'\n shortdesc = 'Check Ec2 Ebs Properties'\n description = 'See if Ec2 Ebs Properties are valid'\n source_url = 'https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-blockdev-template.html'\n tags = ['properties', 'ec2', 'ebs']\n\n def _checkEbs(self, cfn, ebs, path):\n matches = []\n\n if isinstance(ebs, dict):\n volume_types_obj = cfn.get_values(ebs, 'VolumeType')\n iops_obj = cfn.get_values(ebs, 'Iops')\n if volume_types_obj is not None:\n for volume_type_obj in volume_types_obj:\n volume_type = volume_type_obj.get('Value')\n if isinstance(volume_type, six.string_types):\n if volume_type in ('io1', 'io2'):\n if iops_obj is None:\n pathmessage = path[:] + ['VolumeType']\n message = 'VolumeType {0} requires Iops to be specified for {1}'\n matches.append(\n RuleMatch(\n pathmessage,\n message.format(volume_type, '/'.join(map(str, pathmessage)))))\n elif volume_type in ('gp2', 'st1', 'sc1', 'standard'):\n if iops_obj is not None:\n pathmessage = path[:] + ['Iops']\n message = 'Iops shouldn\\'t be defined for type {0} for {1}'\n matches.append(\n RuleMatch(\n pathmessage,\n message.format(volume_type, '/'.join(map(str, pathmessage)))))\n\n return matches\n\n def match(self, cfn):\n \"\"\"Check Ec2 Ebs Resource Parameters\"\"\"\n\n matches = []\n\n results = cfn.get_resource_properties(['AWS::EC2::Instance', 'BlockDeviceMappings'])\n results.extend(cfn.get_resource_properties(\n ['AWS::AutoScaling::LaunchConfiguration', 'BlockDeviceMappings']))\n for result in results:\n path = result['Path']\n if isinstance(result['Value'], list):\n for index, properties in enumerate(result['Value']):\n virtual_name = properties.get('VirtualName')\n ebs = properties.get('Ebs')\n if virtual_name:\n # switch to regex\n if not re.match(r'^ephemeral([0-9]|[1][0-9]|[2][0-3])$', virtual_name):\n pathmessage = path[:] + [index, 'VirtualName']\n message = 'Property VirtualName should be of type ephemeral(n) for {0}'\n matches.append(\n RuleMatch(pathmessage, message.format('/'.join(map(str, pathmessage)))))\n elif ebs:\n matches.extend(self._checkEbs(cfn, ebs, path[:] + [index, 'Ebs']))\n return matches\n", "path": "src/cfnlint/rules/resources/ectwo/Ebs.py"}]} | 1,238 | 442 |
gh_patches_debug_24419 | rasdani/github-patches | git_diff | scrapy__scrapy-1101 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
fetch errors on some https sites
$ scrapy fetch 'https://flixbus.de'
```
2014-12-12 10:01:50+0100 [scrapy] INFO: Scrapy 0.24.4 started (bot: scrapybot)
2014-12-12 10:01:50+0100 [scrapy] INFO: Optional features available: ssl, http11
2014-12-12 10:01:50+0100 [scrapy] INFO: Overridden settings: {}
2014-12-12 10:01:50+0100 [scrapy] INFO: Enabled extensions: LogStats, TelnetConsole, CloseSpider, WebService, CoreStats, SpiderState
2014-12-12 10:01:50+0100 [scrapy] INFO: Enabled downloader middlewares: HttpAuthMiddleware, DownloadTimeoutMiddleware, UserAgentMiddleware, RetryMiddleware, DefaultHeadersMiddleware, MetaRefreshMiddleware, HttpCompressionMiddleware, RedirectMiddleware, CookiesMiddleware, ChunkedTransferMiddleware, DownloaderStats
2014-12-12 10:01:50+0100 [scrapy] INFO: Enabled spider middlewares: HttpErrorMiddleware, OffsiteMiddleware, RefererMiddleware, UrlLengthMiddleware, DepthMiddleware
2014-12-12 10:01:50+0100 [scrapy] INFO: Enabled item pipelines:
2014-12-12 10:01:50+0100 [default] INFO: Spider opened
2014-12-12 10:01:50+0100 [default] INFO: Crawled 0 pages (at 0 pages/min), scraped 0 items (at 0 items/min)
2014-12-12 10:01:50+0100 [scrapy] DEBUG: Telnet console listening on 127.0.0.1:6023
2014-12-12 10:01:50+0100 [scrapy] DEBUG: Web service listening on 127.0.0.1:6080
2014-12-12 10:01:50+0100 [default] DEBUG: Retrying <GET https://flixbus.de> (failed 1 times): [<twisted.python.failure.Failure <class 'OpenSSL.SSL.Error'>>]
2014-12-12 10:01:50+0100 [default] DEBUG: Retrying <GET https://flixbus.de> (failed 2 times): [<twisted.python.failure.Failure <class 'OpenSSL.SSL.Error'>>]
2014-12-12 10:01:50+0100 [default] DEBUG: Gave up retrying <GET https://flixbus.de> (failed 3 times): [<twisted.python.failure.Failure <class 'OpenSSL.SSL.Error'>>]
2014-12-12 10:01:50+0100 [default] ERROR: Error downloading <GET https://flixbus.de>: [<twisted.python.failure.Failure <class 'OpenSSL.SSL.Error'>>]
2014-12-12 10:01:50+0100 [default] INFO: Closing spider (finished)
2014-12-12 10:01:50+0100 [default] INFO: Dumping Scrapy stats:
{'downloader/exception_count': 3,
'downloader/exception_type_count/twisted.web._newclient.ResponseNeverReceived': 3,
'downloader/request_bytes': 627,
'downloader/request_count': 3,
'downloader/request_method_count/GET': 3,
'finish_reason': 'finished',
'finish_time': datetime.datetime(2014, 12, 12, 9, 1, 50, 463722),
'log_count/DEBUG': 5,
'log_count/ERROR': 1,
'log_count/INFO': 7,
'scheduler/dequeued': 3,
'scheduler/dequeued/memory': 3,
'scheduler/enqueued': 3,
'scheduler/enqueued/memory': 3,
'start_time': datetime.datetime(2014, 12, 12, 9, 1, 50, 288873)}
2014-12-12 10:01:50+0100 [default] INFO: Spider closed (finished)
```
$ pip freeze
```
Scrapy==0.24.4
Twisted==14.0.2
argparse==1.2.1
cffi==0.8.6
characteristic==14.2.0
cryptography==0.6.1
cssselect==0.9.1
distribute==0.7.3
lxml==3.4.1
pyOpenSSL==0.14
pyasn1==0.1.7
pyasn1-modules==0.0.5
pycparser==2.10
queuelib==1.2.2
service-identity==14.0.0
six==1.8.0
w3lib==1.10.0
wsgiref==0.1.2
zope.interface==4.1.1
```
Same error on scrapinghub ;)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scrapy/core/downloader/contextfactory.py`
Content:
```
1 from OpenSSL import SSL
2 from twisted.internet.ssl import ClientContextFactory
3
4
5 class ScrapyClientContextFactory(ClientContextFactory):
6 "A SSL context factory which is more permissive against SSL bugs."
7 # see https://github.com/scrapy/scrapy/issues/82
8 # and https://github.com/scrapy/scrapy/issues/26
9
10 def __init__(self):
11 # see this issue on why we use TLSv1_METHOD by default
12 # https://github.com/scrapy/scrapy/issues/194
13 self.method = SSL.TLSv1_METHOD
14
15 def getContext(self, hostname=None, port=None):
16 ctx = ClientContextFactory.getContext(self)
17 # Enable all workarounds to SSL bugs as documented by
18 # http://www.openssl.org/docs/ssl/SSL_CTX_set_options.html
19 ctx.set_options(SSL.OP_ALL)
20 return ctx
21
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/scrapy/core/downloader/contextfactory.py b/scrapy/core/downloader/contextfactory.py
--- a/scrapy/core/downloader/contextfactory.py
+++ b/scrapy/core/downloader/contextfactory.py
@@ -1,11 +1,17 @@
from OpenSSL import SSL
from twisted.internet.ssl import ClientContextFactory
+try:
+ # available since twisted 14.0
+ from twisted.internet._sslverify import ClientTLSOptions
+except ImportError:
+ ClientTLSOptions = None
class ScrapyClientContextFactory(ClientContextFactory):
"A SSL context factory which is more permissive against SSL bugs."
# see https://github.com/scrapy/scrapy/issues/82
# and https://github.com/scrapy/scrapy/issues/26
+ # and https://github.com/scrapy/scrapy/issues/981
def __init__(self):
# see this issue on why we use TLSv1_METHOD by default
@@ -17,4 +23,6 @@
# Enable all workarounds to SSL bugs as documented by
# http://www.openssl.org/docs/ssl/SSL_CTX_set_options.html
ctx.set_options(SSL.OP_ALL)
+ if hostname and ClientTLSOptions is not None: # workaround for TLS SNI
+ ClientTLSOptions(hostname, ctx)
return ctx
| {"golden_diff": "diff --git a/scrapy/core/downloader/contextfactory.py b/scrapy/core/downloader/contextfactory.py\n--- a/scrapy/core/downloader/contextfactory.py\n+++ b/scrapy/core/downloader/contextfactory.py\n@@ -1,11 +1,17 @@\n from OpenSSL import SSL\n from twisted.internet.ssl import ClientContextFactory\n+try:\n+ # available since twisted 14.0\n+ from twisted.internet._sslverify import ClientTLSOptions\n+except ImportError:\n+ ClientTLSOptions = None\n \n \n class ScrapyClientContextFactory(ClientContextFactory):\n \"A SSL context factory which is more permissive against SSL bugs.\"\n # see https://github.com/scrapy/scrapy/issues/82\n # and https://github.com/scrapy/scrapy/issues/26\n+ # and https://github.com/scrapy/scrapy/issues/981\n \n def __init__(self):\n # see this issue on why we use TLSv1_METHOD by default\n@@ -17,4 +23,6 @@\n # Enable all workarounds to SSL bugs as documented by\n # http://www.openssl.org/docs/ssl/SSL_CTX_set_options.html\n ctx.set_options(SSL.OP_ALL)\n+ if hostname and ClientTLSOptions is not None: # workaround for TLS SNI\n+ ClientTLSOptions(hostname, ctx)\n return ctx\n", "issue": "fetch errors on some https sites\n$ scrapy fetch 'https://flixbus.de'\n\n```\n2014-12-12 10:01:50+0100 [scrapy] INFO: Scrapy 0.24.4 started (bot: scrapybot)\n2014-12-12 10:01:50+0100 [scrapy] INFO: Optional features available: ssl, http11\n2014-12-12 10:01:50+0100 [scrapy] INFO: Overridden settings: {}\n2014-12-12 10:01:50+0100 [scrapy] INFO: Enabled extensions: LogStats, TelnetConsole, CloseSpider, WebService, CoreStats, SpiderState\n2014-12-12 10:01:50+0100 [scrapy] INFO: Enabled downloader middlewares: HttpAuthMiddleware, DownloadTimeoutMiddleware, UserAgentMiddleware, RetryMiddleware, DefaultHeadersMiddleware, MetaRefreshMiddleware, HttpCompressionMiddleware, RedirectMiddleware, CookiesMiddleware, ChunkedTransferMiddleware, DownloaderStats\n2014-12-12 10:01:50+0100 [scrapy] INFO: Enabled spider middlewares: HttpErrorMiddleware, OffsiteMiddleware, RefererMiddleware, UrlLengthMiddleware, DepthMiddleware\n2014-12-12 10:01:50+0100 [scrapy] INFO: Enabled item pipelines: \n2014-12-12 10:01:50+0100 [default] INFO: Spider opened\n2014-12-12 10:01:50+0100 [default] INFO: Crawled 0 pages (at 0 pages/min), scraped 0 items (at 0 items/min)\n2014-12-12 10:01:50+0100 [scrapy] DEBUG: Telnet console listening on 127.0.0.1:6023\n2014-12-12 10:01:50+0100 [scrapy] DEBUG: Web service listening on 127.0.0.1:6080\n2014-12-12 10:01:50+0100 [default] DEBUG: Retrying <GET https://flixbus.de> (failed 1 times): [<twisted.python.failure.Failure <class 'OpenSSL.SSL.Error'>>]\n2014-12-12 10:01:50+0100 [default] DEBUG: Retrying <GET https://flixbus.de> (failed 2 times): [<twisted.python.failure.Failure <class 'OpenSSL.SSL.Error'>>]\n2014-12-12 10:01:50+0100 [default] DEBUG: Gave up retrying <GET https://flixbus.de> (failed 3 times): [<twisted.python.failure.Failure <class 'OpenSSL.SSL.Error'>>]\n2014-12-12 10:01:50+0100 [default] ERROR: Error downloading <GET https://flixbus.de>: [<twisted.python.failure.Failure <class 'OpenSSL.SSL.Error'>>]\n2014-12-12 10:01:50+0100 [default] INFO: Closing spider (finished)\n2014-12-12 10:01:50+0100 [default] INFO: Dumping Scrapy stats:\n {'downloader/exception_count': 3,\n 'downloader/exception_type_count/twisted.web._newclient.ResponseNeverReceived': 3,\n 'downloader/request_bytes': 627,\n 'downloader/request_count': 3,\n 'downloader/request_method_count/GET': 3,\n 'finish_reason': 'finished',\n 'finish_time': datetime.datetime(2014, 12, 12, 9, 1, 50, 463722),\n 'log_count/DEBUG': 5,\n 'log_count/ERROR': 1,\n 'log_count/INFO': 7,\n 'scheduler/dequeued': 3,\n 'scheduler/dequeued/memory': 3,\n 'scheduler/enqueued': 3,\n 'scheduler/enqueued/memory': 3,\n 'start_time': datetime.datetime(2014, 12, 12, 9, 1, 50, 288873)}\n2014-12-12 10:01:50+0100 [default] INFO: Spider closed (finished)\n```\n\n$ pip freeze\n\n```\nScrapy==0.24.4\nTwisted==14.0.2\nargparse==1.2.1\ncffi==0.8.6\ncharacteristic==14.2.0\ncryptography==0.6.1\ncssselect==0.9.1\ndistribute==0.7.3\nlxml==3.4.1\npyOpenSSL==0.14\npyasn1==0.1.7\npyasn1-modules==0.0.5\npycparser==2.10\nqueuelib==1.2.2\nservice-identity==14.0.0\nsix==1.8.0\nw3lib==1.10.0\nwsgiref==0.1.2\nzope.interface==4.1.1\n```\n\nSame error on scrapinghub ;)\n\n", "before_files": [{"content": "from OpenSSL import SSL\nfrom twisted.internet.ssl import ClientContextFactory\n\n\nclass ScrapyClientContextFactory(ClientContextFactory):\n \"A SSL context factory which is more permissive against SSL bugs.\"\n # see https://github.com/scrapy/scrapy/issues/82\n # and https://github.com/scrapy/scrapy/issues/26\n\n def __init__(self):\n # see this issue on why we use TLSv1_METHOD by default\n # https://github.com/scrapy/scrapy/issues/194\n self.method = SSL.TLSv1_METHOD\n\n def getContext(self, hostname=None, port=None):\n ctx = ClientContextFactory.getContext(self)\n # Enable all workarounds to SSL bugs as documented by\n # http://www.openssl.org/docs/ssl/SSL_CTX_set_options.html\n ctx.set_options(SSL.OP_ALL)\n return ctx\n", "path": "scrapy/core/downloader/contextfactory.py"}], "after_files": [{"content": "from OpenSSL import SSL\nfrom twisted.internet.ssl import ClientContextFactory\ntry:\n # available since twisted 14.0\n from twisted.internet._sslverify import ClientTLSOptions\nexcept ImportError:\n ClientTLSOptions = None\n\n\nclass ScrapyClientContextFactory(ClientContextFactory):\n \"A SSL context factory which is more permissive against SSL bugs.\"\n # see https://github.com/scrapy/scrapy/issues/82\n # and https://github.com/scrapy/scrapy/issues/26\n # and https://github.com/scrapy/scrapy/issues/981\n\n def __init__(self):\n # see this issue on why we use TLSv1_METHOD by default\n # https://github.com/scrapy/scrapy/issues/194\n self.method = SSL.TLSv1_METHOD\n\n def getContext(self, hostname=None, port=None):\n ctx = ClientContextFactory.getContext(self)\n # Enable all workarounds to SSL bugs as documented by\n # http://www.openssl.org/docs/ssl/SSL_CTX_set_options.html\n ctx.set_options(SSL.OP_ALL)\n if hostname and ClientTLSOptions is not None: # workaround for TLS SNI\n ClientTLSOptions(hostname, ctx)\n return ctx\n", "path": "scrapy/core/downloader/contextfactory.py"}]} | 1,809 | 294 |
gh_patches_debug_11278 | rasdani/github-patches | git_diff | mindsdb__mindsdb-1893 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Tag Docker versions
Currently, GitHub Action will deploy a new version to dockerhub as a `latest`. We need to push the tagged version per MindsDB version e.g 2.62.5 => mindsdb/mindsdb:2.62.5
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docker/build.py`
Content:
```
1 import os
2 import sys
3 import requests
4 import subprocess
5
6 installer_version_url = 'https://public.api.mindsdb.com/installer/@@beta_or_release/docker___success___None'
7
8 api_response = requests.get(
9 installer_version_url.replace('@@beta_or_release', sys.argv[1]))
10
11 if api_response.status_code != 200:
12 exit(1)
13
14 installer_version = api_response.text
15
16 os.system('mkdir -p dist')
17
18 if sys.argv[1] == 'release':
19 container_name = 'mindsdb'
20 dockerfile_template = 'dockerfile_release.template'
21
22 elif sys.argv[1] == 'beta':
23 container_name = 'mindsdb_beta'
24 dockerfile_template = 'dockerfile_beta.template'
25
26 with open(dockerfile_template, 'r') as fp:
27 content = fp.read()
28 content = content.replace('@@beta_or_release', sys.argv[1])
29 content = content.replace('@@installer_version', installer_version)
30
31 with open('dist/Dockerfile', 'w') as fp:
32 fp.write(content)
33
34 command = (f"""
35 cd dist &&
36 docker build -t {container_name} . &&
37 docker tag {container_name} mindsdb/{container_name}:latest &&
38 docker tag {container_name} mindsdb/{container_name}:{installer_version} &&
39 docker push mindsdb/{container_name};
40 cd ..
41 """)
42
43 subprocess.run(command, shell=True, check=True)
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/docker/build.py b/docker/build.py
--- a/docker/build.py
+++ b/docker/build.py
@@ -31,13 +31,11 @@
with open('dist/Dockerfile', 'w') as fp:
fp.write(content)
+print(installer_version)
command = (f"""
cd dist &&
- docker build -t {container_name} . &&
- docker tag {container_name} mindsdb/{container_name}:latest &&
- docker tag {container_name} mindsdb/{container_name}:{installer_version} &&
- docker push mindsdb/{container_name};
- cd ..
+ docker build -t mindsdb/{container_name}:latest -t mindsdb/{container_name}:{installer_version} . &&
+ docker push mindsdb/{container_name} --all-tags
""")
subprocess.run(command, shell=True, check=True)
\ No newline at end of file
| {"golden_diff": "diff --git a/docker/build.py b/docker/build.py\n--- a/docker/build.py\n+++ b/docker/build.py\n@@ -31,13 +31,11 @@\n with open('dist/Dockerfile', 'w') as fp:\n fp.write(content)\n \n+print(installer_version)\n command = (f\"\"\"\n cd dist &&\n- docker build -t {container_name} . &&\n- docker tag {container_name} mindsdb/{container_name}:latest &&\n- docker tag {container_name} mindsdb/{container_name}:{installer_version} &&\n- docker push mindsdb/{container_name};\n- cd ..\n+ docker build -t mindsdb/{container_name}:latest -t mindsdb/{container_name}:{installer_version} . &&\n+ docker push mindsdb/{container_name} --all-tags\n \"\"\")\n \n subprocess.run(command, shell=True, check=True)\n\\ No newline at end of file\n", "issue": "Tag Docker versions\nCurrently, GitHub Action will deploy a new version to dockerhub as a `latest`. We need to push the tagged version per MindsDB version e.g 2.62.5 => mindsdb/mindsdb:2.62.5\n", "before_files": [{"content": "import os\nimport sys\nimport requests\nimport subprocess\n\ninstaller_version_url = 'https://public.api.mindsdb.com/installer/@@beta_or_release/docker___success___None'\n\napi_response = requests.get(\n installer_version_url.replace('@@beta_or_release', sys.argv[1]))\n\nif api_response.status_code != 200:\n exit(1)\n\ninstaller_version = api_response.text\n\nos.system('mkdir -p dist')\n\nif sys.argv[1] == 'release':\n container_name = 'mindsdb'\n dockerfile_template = 'dockerfile_release.template'\n\nelif sys.argv[1] == 'beta':\n container_name = 'mindsdb_beta'\n dockerfile_template = 'dockerfile_beta.template'\n\nwith open(dockerfile_template, 'r') as fp:\n content = fp.read()\n content = content.replace('@@beta_or_release', sys.argv[1])\n content = content.replace('@@installer_version', installer_version)\n\nwith open('dist/Dockerfile', 'w') as fp:\n fp.write(content)\n\ncommand = (f\"\"\"\n cd dist &&\n docker build -t {container_name} . &&\n docker tag {container_name} mindsdb/{container_name}:latest &&\n docker tag {container_name} mindsdb/{container_name}:{installer_version} &&\n docker push mindsdb/{container_name};\n cd ..\n \"\"\")\n\nsubprocess.run(command, shell=True, check=True)", "path": "docker/build.py"}], "after_files": [{"content": "import os\nimport sys\nimport requests\nimport subprocess\n\ninstaller_version_url = 'https://public.api.mindsdb.com/installer/@@beta_or_release/docker___success___None'\n\napi_response = requests.get(\n installer_version_url.replace('@@beta_or_release', sys.argv[1]))\n\nif api_response.status_code != 200:\n exit(1)\n\ninstaller_version = api_response.text\n\nos.system('mkdir -p dist')\n\nif sys.argv[1] == 'release':\n container_name = 'mindsdb'\n dockerfile_template = 'dockerfile_release.template'\n\nelif sys.argv[1] == 'beta':\n container_name = 'mindsdb_beta'\n dockerfile_template = 'dockerfile_beta.template'\n\nwith open(dockerfile_template, 'r') as fp:\n content = fp.read()\n content = content.replace('@@beta_or_release', sys.argv[1])\n content = content.replace('@@installer_version', installer_version)\n\nwith open('dist/Dockerfile', 'w') as fp:\n fp.write(content)\n\nprint(installer_version)\ncommand = (f\"\"\"\n cd dist &&\n docker build -t mindsdb/{container_name}:latest -t mindsdb/{container_name}:{installer_version} . &&\n docker push mindsdb/{container_name} --all-tags\n \"\"\")\n\nsubprocess.run(command, shell=True, check=True)", "path": "docker/build.py"}]} | 695 | 195 |
gh_patches_debug_23379 | rasdani/github-patches | git_diff | pre-commit__pre-commit-81 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Occasional flakiness of staged file stasher
It appears `git diff-files` is returning incorrectly in some case that I haven't been able to pinpoint.
It results in something like this (you can see however that all the files are staged):
```
$ pre-commit
[WARNING] Unstaged files detected.
Stashing unstaged files to /home/anthony/workspace/pre-commit/.pre-commit-files/patch1397370090.
Trim Trailing Whitespace............................................Passed
Fix End of Files....................................................Passed
Check Yaml..........................................................Passed
Debug Statements (Python)...........................................Passed
Tests should end in _test.py........................................Passed
Pyflakes............................................................Passed
Validate Pre-Commit Config..........................................Passed
Validate Pre-Commit Manifest........................................Passed
[WARNING] Stashed changes conflicted with hook auto-fixes... Rolling back fixes...
Traceback (most recent call last):
File "/home/anthony/workspace/pre-commit/venv-pre_commit/bin/pre-commit", line 9, in <module>
load_entry_point('pre-commit==0.0.0', 'console_scripts', 'pre-commit')()
File "/home/anthony/workspace/pre-commit/pre_commit/util.py", line 52, in wrapper
return func(argv)
File "/home/anthony/workspace/pre-commit/pre_commit/run.py", line 143, in run
return _run(runner, args)
File "/home/anthony/workspace/pre-commit/pre_commit/run.py", line 95, in _run
return run_hooks(runner, args)
File "/usr/lib/python2.7/contextlib.py", line 24, in __exit__
self.gen.next()
File "/home/anthony/workspace/pre-commit/pre_commit/staged_files_only.py", line 51, in staged_files_only
cmd_runner.run(['git', 'apply', patch_filename])
File "/home/anthony/workspace/pre-commit/pre_commit/prefixed_command_runner.py", line 67, in run
returncode, replaced_cmd, retcode, output=(stdout, stderr),
pre_commit.prefixed_command_runner.CalledProcessError: Command: ['git', 'apply', '/home/anthony/workspace/pre-commit/.pre-commit-files/patch1397370090']
Return code: 128
Expected return code: 0
Output: ('', 'fatal: unrecognized input\n')
$ git status
# On branch rebuild_venv
# Changes to be committed:
# (use "git reset HEAD <file>..." to unstage)
#
# modified: .gitignore
# modified: Makefile
#
```
The "stashed diff" is an empty file. I think the "fix" is to check if the diff contains anything before printing the warning message and entering the branch that isn't a noop context manager.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pre_commit/staged_files_only.py`
Content:
```
1 import contextlib
2 import logging
3 import time
4
5 from pre_commit.prefixed_command_runner import CalledProcessError
6
7
8 logger = logging.getLogger('pre_commit')
9
10
11 @contextlib.contextmanager
12 def staged_files_only(cmd_runner):
13 """Clear any unstaged changes from the git working directory inside this
14 context.
15
16 Args:
17 cmd_runner - PrefixedCommandRunner
18 """
19 # Determine if there are unstaged files
20 retcode, _, _ = cmd_runner.run(
21 ['git', 'diff-files', '--quiet'],
22 retcode=None,
23 )
24 if retcode:
25 patch_filename = cmd_runner.path('patch{0}'.format(int(time.time())))
26 logger.warning('Unstaged files detected.')
27 logger.info(
28 'Stashing unstaged files to {0}.'.format(patch_filename),
29 )
30 # Save the current unstaged changes as a patch
31 with open(patch_filename, 'w') as patch_file:
32 cmd_runner.run(['git', 'diff', '--binary'], stdout=patch_file)
33
34 # Clear the working directory of unstaged changes
35 cmd_runner.run(['git', 'checkout', '--', '.'])
36 try:
37 yield
38 finally:
39 # Try to apply the patch we saved
40 try:
41 cmd_runner.run(['git', 'apply', patch_filename])
42 except CalledProcessError:
43 logger.warning(
44 'Stashed changes conflicted with hook auto-fixes... '
45 'Rolling back fixes...'
46 )
47 # We failed to apply the patch, presumably due to fixes made
48 # by hooks.
49 # Roll back the changes made by hooks.
50 cmd_runner.run(['git', 'checkout', '--', '.'])
51 cmd_runner.run(['git', 'apply', patch_filename])
52 logger.info('Restored changes from {0}.'.format(patch_filename))
53 else:
54 # There weren't any staged files so we don't need to do anything
55 # special
56 yield
57
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pre_commit/staged_files_only.py b/pre_commit/staged_files_only.py
--- a/pre_commit/staged_files_only.py
+++ b/pre_commit/staged_files_only.py
@@ -17,11 +17,11 @@
cmd_runner - PrefixedCommandRunner
"""
# Determine if there are unstaged files
- retcode, _, _ = cmd_runner.run(
- ['git', 'diff-files', '--quiet'],
+ retcode, diff_stdout, _ = cmd_runner.run(
+ ['git', 'diff', '--ignore-submodules', '--binary', '--exit-code'],
retcode=None,
)
- if retcode:
+ if retcode and diff_stdout.strip():
patch_filename = cmd_runner.path('patch{0}'.format(int(time.time())))
logger.warning('Unstaged files detected.')
logger.info(
@@ -29,7 +29,7 @@
)
# Save the current unstaged changes as a patch
with open(patch_filename, 'w') as patch_file:
- cmd_runner.run(['git', 'diff', '--binary'], stdout=patch_file)
+ patch_file.write(diff_stdout)
# Clear the working directory of unstaged changes
cmd_runner.run(['git', 'checkout', '--', '.'])
| {"golden_diff": "diff --git a/pre_commit/staged_files_only.py b/pre_commit/staged_files_only.py\n--- a/pre_commit/staged_files_only.py\n+++ b/pre_commit/staged_files_only.py\n@@ -17,11 +17,11 @@\n cmd_runner - PrefixedCommandRunner\n \"\"\"\n # Determine if there are unstaged files\n- retcode, _, _ = cmd_runner.run(\n- ['git', 'diff-files', '--quiet'],\n+ retcode, diff_stdout, _ = cmd_runner.run(\n+ ['git', 'diff', '--ignore-submodules', '--binary', '--exit-code'],\n retcode=None,\n )\n- if retcode:\n+ if retcode and diff_stdout.strip():\n patch_filename = cmd_runner.path('patch{0}'.format(int(time.time())))\n logger.warning('Unstaged files detected.')\n logger.info(\n@@ -29,7 +29,7 @@\n )\n # Save the current unstaged changes as a patch\n with open(patch_filename, 'w') as patch_file:\n- cmd_runner.run(['git', 'diff', '--binary'], stdout=patch_file)\n+ patch_file.write(diff_stdout)\n \n # Clear the working directory of unstaged changes\n cmd_runner.run(['git', 'checkout', '--', '.'])\n", "issue": "Occasional flakiness of staged file stasher\nIt appears `git diff-files` is returning incorrectly in some case that I haven't been able to pinpoint.\n\nIt results in something like this (you can see however that all the files are staged):\n\n```\n$ pre-commit \n[WARNING] Unstaged files detected.\nStashing unstaged files to /home/anthony/workspace/pre-commit/.pre-commit-files/patch1397370090.\nTrim Trailing Whitespace............................................Passed\nFix End of Files....................................................Passed\nCheck Yaml..........................................................Passed\nDebug Statements (Python)...........................................Passed\nTests should end in _test.py........................................Passed\nPyflakes............................................................Passed\nValidate Pre-Commit Config..........................................Passed\nValidate Pre-Commit Manifest........................................Passed\n[WARNING] Stashed changes conflicted with hook auto-fixes... Rolling back fixes...\nTraceback (most recent call last):\n File \"/home/anthony/workspace/pre-commit/venv-pre_commit/bin/pre-commit\", line 9, in <module>\n load_entry_point('pre-commit==0.0.0', 'console_scripts', 'pre-commit')()\n File \"/home/anthony/workspace/pre-commit/pre_commit/util.py\", line 52, in wrapper\n return func(argv)\n File \"/home/anthony/workspace/pre-commit/pre_commit/run.py\", line 143, in run\n return _run(runner, args)\n File \"/home/anthony/workspace/pre-commit/pre_commit/run.py\", line 95, in _run\n return run_hooks(runner, args)\n File \"/usr/lib/python2.7/contextlib.py\", line 24, in __exit__\n self.gen.next()\n File \"/home/anthony/workspace/pre-commit/pre_commit/staged_files_only.py\", line 51, in staged_files_only\n cmd_runner.run(['git', 'apply', patch_filename])\n File \"/home/anthony/workspace/pre-commit/pre_commit/prefixed_command_runner.py\", line 67, in run\n returncode, replaced_cmd, retcode, output=(stdout, stderr),\npre_commit.prefixed_command_runner.CalledProcessError: Command: ['git', 'apply', '/home/anthony/workspace/pre-commit/.pre-commit-files/patch1397370090']\nReturn code: 128\nExpected return code: 0\nOutput: ('', 'fatal: unrecognized input\\n')\n$ git status\n# On branch rebuild_venv\n# Changes to be committed:\n# (use \"git reset HEAD <file>...\" to unstage)\n#\n# modified: .gitignore\n# modified: Makefile\n#\n```\n\nThe \"stashed diff\" is an empty file. I think the \"fix\" is to check if the diff contains anything before printing the warning message and entering the branch that isn't a noop context manager.\n\n", "before_files": [{"content": "import contextlib\nimport logging\nimport time\n\nfrom pre_commit.prefixed_command_runner import CalledProcessError\n\n\nlogger = logging.getLogger('pre_commit')\n\n\[email protected]\ndef staged_files_only(cmd_runner):\n \"\"\"Clear any unstaged changes from the git working directory inside this\n context.\n\n Args:\n cmd_runner - PrefixedCommandRunner\n \"\"\"\n # Determine if there are unstaged files\n retcode, _, _ = cmd_runner.run(\n ['git', 'diff-files', '--quiet'],\n retcode=None,\n )\n if retcode:\n patch_filename = cmd_runner.path('patch{0}'.format(int(time.time())))\n logger.warning('Unstaged files detected.')\n logger.info(\n 'Stashing unstaged files to {0}.'.format(patch_filename),\n )\n # Save the current unstaged changes as a patch\n with open(patch_filename, 'w') as patch_file:\n cmd_runner.run(['git', 'diff', '--binary'], stdout=patch_file)\n\n # Clear the working directory of unstaged changes\n cmd_runner.run(['git', 'checkout', '--', '.'])\n try:\n yield\n finally:\n # Try to apply the patch we saved\n try:\n cmd_runner.run(['git', 'apply', patch_filename])\n except CalledProcessError:\n logger.warning(\n 'Stashed changes conflicted with hook auto-fixes... '\n 'Rolling back fixes...'\n )\n # We failed to apply the patch, presumably due to fixes made\n # by hooks.\n # Roll back the changes made by hooks.\n cmd_runner.run(['git', 'checkout', '--', '.'])\n cmd_runner.run(['git', 'apply', patch_filename])\n logger.info('Restored changes from {0}.'.format(patch_filename))\n else:\n # There weren't any staged files so we don't need to do anything\n # special\n yield\n", "path": "pre_commit/staged_files_only.py"}], "after_files": [{"content": "import contextlib\nimport logging\nimport time\n\nfrom pre_commit.prefixed_command_runner import CalledProcessError\n\n\nlogger = logging.getLogger('pre_commit')\n\n\[email protected]\ndef staged_files_only(cmd_runner):\n \"\"\"Clear any unstaged changes from the git working directory inside this\n context.\n\n Args:\n cmd_runner - PrefixedCommandRunner\n \"\"\"\n # Determine if there are unstaged files\n retcode, diff_stdout, _ = cmd_runner.run(\n ['git', 'diff', '--ignore-submodules', '--binary', '--exit-code'],\n retcode=None,\n )\n if retcode and diff_stdout.strip():\n patch_filename = cmd_runner.path('patch{0}'.format(int(time.time())))\n logger.warning('Unstaged files detected.')\n logger.info(\n 'Stashing unstaged files to {0}.'.format(patch_filename),\n )\n # Save the current unstaged changes as a patch\n with open(patch_filename, 'w') as patch_file:\n patch_file.write(diff_stdout)\n\n # Clear the working directory of unstaged changes\n cmd_runner.run(['git', 'checkout', '--', '.'])\n try:\n yield\n finally:\n # Try to apply the patch we saved\n try:\n cmd_runner.run(['git', 'apply', patch_filename])\n except CalledProcessError:\n logger.warning(\n 'Stashed changes conflicted with hook auto-fixes... '\n 'Rolling back fixes...'\n )\n # We failed to apply the patch, presumably due to fixes made\n # by hooks.\n # Roll back the changes made by hooks.\n cmd_runner.run(['git', 'checkout', '--', '.'])\n cmd_runner.run(['git', 'apply', patch_filename])\n logger.info('Restored changes from {0}.'.format(patch_filename))\n else:\n # There weren't any staged files so we don't need to do anything\n # special\n yield\n", "path": "pre_commit/staged_files_only.py"}]} | 1,398 | 279 |
gh_patches_debug_17535 | rasdani/github-patches | git_diff | doccano__doccano-607 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Enhancement request] Avoid duplicate key value error on launching
Enhancement description
---------
I have these errors in log on each start:
```
postgres_1 | 2020-03-04 05:34:30.467 UTC [27] ERROR: duplicate key value violates unique constraint "api_role_name_key"
postgres_1 | 2020-03-04 05:34:30.467 UTC [27] DETAIL: Key (name)=(project_admin) already exists.
postgres_1 | 2020-03-04 05:34:30.467 UTC [27] STATEMENT: INSERT INTO "api_role" ("name", "description", "created_at", "updated_at") VALUES ('project_admin', '', '2020-03-04T05:34:30.460290+00:00'::timestamptz, '2020-03-04T05:34:30.460312+00:00'::timestamptz) RETURNING "api_role"."id"
backend_1 | Datbase Error: "duplicate key value violates unique constraint "api_role_name_key"
backend_1 | DETAIL: Key (name)=(project_admin) already exists.
backend_1 | "
postgres_1 | 2020-03-04 05:34:30.468 UTC [27] ERROR: duplicate key value violates unique constraint "api_role_name_key"
postgres_1 | 2020-03-04 05:34:30.468 UTC [27] DETAIL: Key (name)=(annotator) already exists.
postgres_1 | 2020-03-04 05:34:30.468 UTC [27] STATEMENT: INSERT INTO "api_role" ("name", "description", "created_at", "updated_at") VALUES ('annotator', '', '2020-03-04T05:34:30.467909+00:00'::timestamptz, '2020-03-04T05:34:30.467926+00:00'::timestamptz) RETURNING "api_role"."id"
backend_1 | Datbase Error: "duplicate key value violates unique constraint "api_role_name_key"
backend_1 | DETAIL: Key (name)=(annotator) already exists.
backend_1 | "
postgres_1 | 2020-03-04 05:34:30.468 UTC [27] ERROR: duplicate key value violates unique constraint "api_role_name_key"
postgres_1 | 2020-03-04 05:34:30.468 UTC [27] DETAIL: Key (name)=(annotation_approver) already exists.
postgres_1 | 2020-03-04 05:34:30.468 UTC [27] STATEMENT: INSERT INTO "api_role" ("name", "description", "created_at", "updated_at") VALUES ('annotation_approver', '', '2020-03-04T05:34:30.468689+00:00'::timestamptz, '2020-03-04T05:34:30.468706+00:00'::timestamptz) RETURNING "api_role"."id"
backend_1 | Datbase Error: "duplicate key value violates unique constraint "api_role_name_key"
backend_1 | DETAIL: Key (name)=(annotation_approver) already exists.
backend_1 | "
postgres_1 | 2020-03-04 05:34:32.026 UTC [28] ERROR: duplicate key value violates unique constraint "auth_user_username_key"
postgres_1 | 2020-03-04 05:34:32.026 UTC [28] DETAIL: Key (username)=(admin) already exists.
postgres_1 | 2020-03-04 05:34:32.026 UTC [28] STATEMENT: INSERT INTO "auth_user" ("password", "last_login", "is_superuser", "username", "first_name", "last_name", "email", "is_staff", "is_active", "date_joined") VALUES ('<...>', NULL, true, 'admin', '', '', '[email protected]', true, true, '2020-03-04T05:34:32.023520+00:00'::timestamptz) RETURNING "auth_user"."id"
backend_1 | User admin already exists.
backend_1 | CommandError: Error: That username is already taken.
```
Propose to check existence of specified table's rows before creation to avoid these errors.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `app/server/management/commands/create_roles.py`
Content:
```
1 from api.models import Role
2 from django.core.management.base import BaseCommand
3 from django.db import DatabaseError
4 from django.conf import settings
5
6
7 class Command(BaseCommand):
8 help = 'Non-interactively create default roles'
9
10 def handle(self, *args, **options):
11 try:
12 role_names = [settings.ROLE_PROJECT_ADMIN, settings.ROLE_ANNOTATOR, settings.ROLE_ANNOTATION_APPROVER]
13 except KeyError as key_error:
14 self.stderr.write(self.style.ERROR(f'Missing Key: "{key_error}"'))
15 for role_name in role_names:
16 role = Role()
17 role.name = role_name
18 try:
19 role.save()
20 except DatabaseError as db_error:
21 self.stderr.write(self.style.ERROR(f'Datbase Error: "{db_error}"'))
22 else:
23 self.stdout.write(self.style.SUCCESS(f'Role created successfully "{role_name}"'))
24
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/app/server/management/commands/create_roles.py b/app/server/management/commands/create_roles.py
--- a/app/server/management/commands/create_roles.py
+++ b/app/server/management/commands/create_roles.py
@@ -13,11 +13,13 @@
except KeyError as key_error:
self.stderr.write(self.style.ERROR(f'Missing Key: "{key_error}"'))
for role_name in role_names:
+ if Role.objects.filter(name=role_name).exists():
+ continue
role = Role()
role.name = role_name
try:
role.save()
except DatabaseError as db_error:
- self.stderr.write(self.style.ERROR(f'Datbase Error: "{db_error}"'))
+ self.stderr.write(self.style.ERROR(f'Database Error: "{db_error}"'))
else:
self.stdout.write(self.style.SUCCESS(f'Role created successfully "{role_name}"'))
| {"golden_diff": "diff --git a/app/server/management/commands/create_roles.py b/app/server/management/commands/create_roles.py\n--- a/app/server/management/commands/create_roles.py\n+++ b/app/server/management/commands/create_roles.py\n@@ -13,11 +13,13 @@\n except KeyError as key_error:\n self.stderr.write(self.style.ERROR(f'Missing Key: \"{key_error}\"'))\n for role_name in role_names:\n+ if Role.objects.filter(name=role_name).exists():\n+ continue\n role = Role()\n role.name = role_name\n try:\n role.save()\n except DatabaseError as db_error:\n- self.stderr.write(self.style.ERROR(f'Datbase Error: \"{db_error}\"'))\n+ self.stderr.write(self.style.ERROR(f'Database Error: \"{db_error}\"'))\n else:\n self.stdout.write(self.style.SUCCESS(f'Role created successfully \"{role_name}\"'))\n", "issue": "[Enhancement request] Avoid duplicate key value error on launching\nEnhancement description\r\n---------\r\nI have these errors in log on each start:\r\n```\r\npostgres_1 | 2020-03-04 05:34:30.467 UTC [27] ERROR: duplicate key value violates unique constraint \"api_role_name_key\"\r\npostgres_1 | 2020-03-04 05:34:30.467 UTC [27] DETAIL: Key (name)=(project_admin) already exists.\r\npostgres_1 | 2020-03-04 05:34:30.467 UTC [27] STATEMENT: INSERT INTO \"api_role\" (\"name\", \"description\", \"created_at\", \"updated_at\") VALUES ('project_admin', '', '2020-03-04T05:34:30.460290+00:00'::timestamptz, '2020-03-04T05:34:30.460312+00:00'::timestamptz) RETURNING \"api_role\".\"id\"\r\nbackend_1 | Datbase Error: \"duplicate key value violates unique constraint \"api_role_name_key\"\r\nbackend_1 | DETAIL: Key (name)=(project_admin) already exists.\r\nbackend_1 | \"\r\npostgres_1 | 2020-03-04 05:34:30.468 UTC [27] ERROR: duplicate key value violates unique constraint \"api_role_name_key\"\r\npostgres_1 | 2020-03-04 05:34:30.468 UTC [27] DETAIL: Key (name)=(annotator) already exists.\r\npostgres_1 | 2020-03-04 05:34:30.468 UTC [27] STATEMENT: INSERT INTO \"api_role\" (\"name\", \"description\", \"created_at\", \"updated_at\") VALUES ('annotator', '', '2020-03-04T05:34:30.467909+00:00'::timestamptz, '2020-03-04T05:34:30.467926+00:00'::timestamptz) RETURNING \"api_role\".\"id\"\r\nbackend_1 | Datbase Error: \"duplicate key value violates unique constraint \"api_role_name_key\"\r\nbackend_1 | DETAIL: Key (name)=(annotator) already exists.\r\nbackend_1 | \"\r\npostgres_1 | 2020-03-04 05:34:30.468 UTC [27] ERROR: duplicate key value violates unique constraint \"api_role_name_key\"\r\npostgres_1 | 2020-03-04 05:34:30.468 UTC [27] DETAIL: Key (name)=(annotation_approver) already exists.\r\npostgres_1 | 2020-03-04 05:34:30.468 UTC [27] STATEMENT: INSERT INTO \"api_role\" (\"name\", \"description\", \"created_at\", \"updated_at\") VALUES ('annotation_approver', '', '2020-03-04T05:34:30.468689+00:00'::timestamptz, '2020-03-04T05:34:30.468706+00:00'::timestamptz) RETURNING \"api_role\".\"id\"\r\nbackend_1 | Datbase Error: \"duplicate key value violates unique constraint \"api_role_name_key\"\r\nbackend_1 | DETAIL: Key (name)=(annotation_approver) already exists.\r\nbackend_1 | \"\r\npostgres_1 | 2020-03-04 05:34:32.026 UTC [28] ERROR: duplicate key value violates unique constraint \"auth_user_username_key\"\r\npostgres_1 | 2020-03-04 05:34:32.026 UTC [28] DETAIL: Key (username)=(admin) already exists.\r\npostgres_1 | 2020-03-04 05:34:32.026 UTC [28] STATEMENT: INSERT INTO \"auth_user\" (\"password\", \"last_login\", \"is_superuser\", \"username\", \"first_name\", \"last_name\", \"email\", \"is_staff\", \"is_active\", \"date_joined\") VALUES ('<...>', NULL, true, 'admin', '', '', '[email protected]', true, true, '2020-03-04T05:34:32.023520+00:00'::timestamptz) RETURNING \"auth_user\".\"id\"\r\nbackend_1 | User admin already exists.\r\nbackend_1 | CommandError: Error: That username is already taken.\r\n```\r\n\r\nPropose to check existence of specified table's rows before creation to avoid these errors.\n", "before_files": [{"content": "from api.models import Role\nfrom django.core.management.base import BaseCommand\nfrom django.db import DatabaseError\nfrom django.conf import settings\n\n\nclass Command(BaseCommand):\n help = 'Non-interactively create default roles'\n\n def handle(self, *args, **options):\n try:\n role_names = [settings.ROLE_PROJECT_ADMIN, settings.ROLE_ANNOTATOR, settings.ROLE_ANNOTATION_APPROVER]\n except KeyError as key_error:\n self.stderr.write(self.style.ERROR(f'Missing Key: \"{key_error}\"'))\n for role_name in role_names:\n role = Role()\n role.name = role_name\n try:\n role.save()\n except DatabaseError as db_error:\n self.stderr.write(self.style.ERROR(f'Datbase Error: \"{db_error}\"'))\n else:\n self.stdout.write(self.style.SUCCESS(f'Role created successfully \"{role_name}\"'))\n", "path": "app/server/management/commands/create_roles.py"}], "after_files": [{"content": "from api.models import Role\nfrom django.core.management.base import BaseCommand\nfrom django.db import DatabaseError\nfrom django.conf import settings\n\n\nclass Command(BaseCommand):\n help = 'Non-interactively create default roles'\n\n def handle(self, *args, **options):\n try:\n role_names = [settings.ROLE_PROJECT_ADMIN, settings.ROLE_ANNOTATOR, settings.ROLE_ANNOTATION_APPROVER]\n except KeyError as key_error:\n self.stderr.write(self.style.ERROR(f'Missing Key: \"{key_error}\"'))\n for role_name in role_names:\n if Role.objects.filter(name=role_name).exists():\n continue\n role = Role()\n role.name = role_name\n try:\n role.save()\n except DatabaseError as db_error:\n self.stderr.write(self.style.ERROR(f'Database Error: \"{db_error}\"'))\n else:\n self.stdout.write(self.style.SUCCESS(f'Role created successfully \"{role_name}\"'))\n", "path": "app/server/management/commands/create_roles.py"}]} | 1,710 | 195 |
gh_patches_debug_17960 | rasdani/github-patches | git_diff | PaddlePaddle__PaddleSeg-2277 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Bug] Dice loss bug
环境:aistudio tesla v100
paddlepaddle=2.3.0
paddleseg=2.5.0
bug:dice loss 前向传播时,label转换为one-hot编码前,并未对ignore_index进行处理,当ignore_index值大于num_classes(比如ignore index为255,类别为19)时,报错cuda 719。原因为one-hot转换错误。
代码链接:https://github.com/PaddlePaddle/PaddleSeg/blob/35a4c4d229df2d4a5ca724ad442bf5e0f75b4823/paddleseg/models/losses/dice_loss.py#L46
可将mask部分放到one-hot之前,然后将ignore_index赋值一个小于num_classes的值:
```python
def forward(self, logits, labels):
num_class = logits.shape[1]
if self.weight is not None:
assert num_class == len(self.weight), \
"The lenght of weight should be euqal to the num class"
if logits.shape != labels.shape:
labels = labels.unsqueeze(axis=1)
labels = F.interpolate(labels, size=logits.shape[2:], mode='nearest')
labels = labels.squeeze(axis=1)
logits = F.softmax(logits, axis=1)
mask = labels != self.ignore_index
mask = paddle.cast(paddle.unsqueeze(mask, 1), 'float32')
labels[labels == self.ignore_index] = 0
labels_one_hot = F.one_hot(labels, num_class)
labels_one_hot = paddle.transpose(labels_one_hot, [0, 3, 1, 2])
dice_loss = 0.0
for i in range(num_class):
dice_loss_i = dice_loss_helper(logits[:, i], labels_one_hot[:, i],
mask, self.smooth, self.eps)
if self.weight is not None:
dice_loss_i *= self.weight[i]
dice_loss += dice_loss_i
dice_loss = dice_loss / num_class
return dice_loss
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `paddleseg/models/losses/dice_loss.py`
Content:
```
1 # you may not use this file except in compliance with the License.
2 # You may obtain a copy of the License at
3 #
4 # http://www.apache.org/licenses/LICENSE-2.0
5 #
6 # Unless required by applicable law or agreed to in writing, software
7 # distributed under the License is distributed on an "AS IS" BASIS,
8 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
9 # See the License for the specific language governing permissions and
10 # limitations under the License.
11
12 import paddle
13 from paddle import nn
14 import paddle.nn.functional as F
15
16 from paddleseg.cvlibs import manager
17
18
19 @manager.LOSSES.add_component
20 class DiceLoss(nn.Layer):
21 """
22 The implements of the dice loss.
23
24 Args:
25 weight (list[float], optional): The weight for each class. Default: None.
26 ignore_index (int64): ignore_index (int64, optional): Specifies a target value that
27 is ignored and does not contribute to the input gradient. Default ``255``.
28 smooth (float32): Laplace smoothing to smooth dice loss and accelerate convergence.
29 Default: 1.0
30 """
31
32 def __init__(self, weight=None, ignore_index=255, smooth=1.0):
33 super().__init__()
34 self.weight = weight
35 self.ignore_index = ignore_index
36 self.smooth = smooth
37 self.eps = 1e-8
38
39 def forward(self, logits, labels):
40 num_class = logits.shape[1]
41 if self.weight is not None:
42 assert num_class == len(self.weight), \
43 "The lenght of weight should be euqal to the num class"
44
45 logits = F.softmax(logits, axis=1)
46 labels_one_hot = F.one_hot(labels, num_class)
47 labels_one_hot = paddle.transpose(labels_one_hot, [0, 3, 1, 2])
48
49 mask = labels != self.ignore_index
50 mask = paddle.cast(paddle.unsqueeze(mask, 1), 'float32')
51
52 dice_loss = 0.0
53 for i in range(num_class):
54 dice_loss_i = dice_loss_helper(logits[:, i], labels_one_hot[:, i],
55 mask, self.smooth, self.eps)
56 if self.weight is not None:
57 dice_loss_i *= self.weight[i]
58 dice_loss += dice_loss_i
59 dice_loss = dice_loss / num_class
60
61 return dice_loss
62
63
64 def dice_loss_helper(logit, label, mask, smooth, eps):
65 assert logit.shape == label.shape, \
66 "The shape of logit and label should be the same"
67 logit = paddle.reshape(logit, [0, -1])
68 label = paddle.reshape(label, [0, -1])
69 mask = paddle.reshape(mask, [0, -1])
70 logit *= mask
71 label *= mask
72 intersection = paddle.sum(logit * label, axis=1)
73 cardinality = paddle.sum(logit + label, axis=1)
74 dice_loss = 1 - (2 * intersection + smooth) / (cardinality + smooth + eps)
75 dice_loss = dice_loss.mean()
76 return dice_loss
77
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/paddleseg/models/losses/dice_loss.py b/paddleseg/models/losses/dice_loss.py
--- a/paddleseg/models/losses/dice_loss.py
+++ b/paddleseg/models/losses/dice_loss.py
@@ -42,13 +42,14 @@
assert num_class == len(self.weight), \
"The lenght of weight should be euqal to the num class"
- logits = F.softmax(logits, axis=1)
- labels_one_hot = F.one_hot(labels, num_class)
- labels_one_hot = paddle.transpose(labels_one_hot, [0, 3, 1, 2])
-
mask = labels != self.ignore_index
mask = paddle.cast(paddle.unsqueeze(mask, 1), 'float32')
+ labels[labels == self.ignore_index] = 0
+ labels_one_hot = F.one_hot(labels, num_class)
+ labels_one_hot = paddle.transpose(labels_one_hot, [0, 3, 1, 2])
+ logits = F.softmax(logits, axis=1)
+
dice_loss = 0.0
for i in range(num_class):
dice_loss_i = dice_loss_helper(logits[:, i], labels_one_hot[:, i],
| {"golden_diff": "diff --git a/paddleseg/models/losses/dice_loss.py b/paddleseg/models/losses/dice_loss.py\n--- a/paddleseg/models/losses/dice_loss.py\n+++ b/paddleseg/models/losses/dice_loss.py\n@@ -42,13 +42,14 @@\n assert num_class == len(self.weight), \\\n \"The lenght of weight should be euqal to the num class\"\n \n- logits = F.softmax(logits, axis=1)\n- labels_one_hot = F.one_hot(labels, num_class)\n- labels_one_hot = paddle.transpose(labels_one_hot, [0, 3, 1, 2])\n-\n mask = labels != self.ignore_index\n mask = paddle.cast(paddle.unsqueeze(mask, 1), 'float32')\n \n+ labels[labels == self.ignore_index] = 0\n+ labels_one_hot = F.one_hot(labels, num_class)\n+ labels_one_hot = paddle.transpose(labels_one_hot, [0, 3, 1, 2])\n+ logits = F.softmax(logits, axis=1)\n+\n dice_loss = 0.0\n for i in range(num_class):\n dice_loss_i = dice_loss_helper(logits[:, i], labels_one_hot[:, i],\n", "issue": "[Bug] Dice loss bug\n\u73af\u5883\uff1aaistudio tesla v100\r\npaddlepaddle=2.3.0\r\npaddleseg=2.5.0\r\n\r\nbug\uff1adice loss \u524d\u5411\u4f20\u64ad\u65f6\uff0clabel\u8f6c\u6362\u4e3aone-hot\u7f16\u7801\u524d\uff0c\u5e76\u672a\u5bf9ignore_index\u8fdb\u884c\u5904\u7406\uff0c\u5f53ignore_index\u503c\u5927\u4e8enum_classes\uff08\u6bd4\u5982ignore index\u4e3a255\uff0c\u7c7b\u522b\u4e3a19\uff09\u65f6\uff0c\u62a5\u9519cuda 719\u3002\u539f\u56e0\u4e3aone-hot\u8f6c\u6362\u9519\u8bef\u3002 \r\n\u4ee3\u7801\u94fe\u63a5\uff1ahttps://github.com/PaddlePaddle/PaddleSeg/blob/35a4c4d229df2d4a5ca724ad442bf5e0f75b4823/paddleseg/models/losses/dice_loss.py#L46\r\n\r\n\u53ef\u5c06mask\u90e8\u5206\u653e\u5230one-hot\u4e4b\u524d\uff0c\u7136\u540e\u5c06ignore_index\u8d4b\u503c\u4e00\u4e2a\u5c0f\u4e8enum_classes\u7684\u503c\uff1a\r\n```python\r\n def forward(self, logits, labels):\r\n num_class = logits.shape[1]\r\n if self.weight is not None:\r\n assert num_class == len(self.weight), \\\r\n \"The lenght of weight should be euqal to the num class\"\r\n if logits.shape != labels.shape:\r\n labels = labels.unsqueeze(axis=1)\r\n labels = F.interpolate(labels, size=logits.shape[2:], mode='nearest')\r\n labels = labels.squeeze(axis=1)\r\n logits = F.softmax(logits, axis=1)\r\n mask = labels != self.ignore_index\r\n mask = paddle.cast(paddle.unsqueeze(mask, 1), 'float32')\r\n labels[labels == self.ignore_index] = 0\r\n labels_one_hot = F.one_hot(labels, num_class)\r\n labels_one_hot = paddle.transpose(labels_one_hot, [0, 3, 1, 2])\r\n \r\n dice_loss = 0.0\r\n for i in range(num_class):\r\n dice_loss_i = dice_loss_helper(logits[:, i], labels_one_hot[:, i],\r\n mask, self.smooth, self.eps)\r\n if self.weight is not None:\r\n dice_loss_i *= self.weight[i]\r\n dice_loss += dice_loss_i\r\n dice_loss = dice_loss / num_class\r\n\r\n return dice_loss\r\n```\r\n\n", "before_files": [{"content": "# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport paddle\nfrom paddle import nn\nimport paddle.nn.functional as F\n\nfrom paddleseg.cvlibs import manager\n\n\[email protected]_component\nclass DiceLoss(nn.Layer):\n \"\"\"\n The implements of the dice loss.\n\n Args:\n weight (list[float], optional): The weight for each class. Default: None.\n ignore_index (int64): ignore_index (int64, optional): Specifies a target value that\n is ignored and does not contribute to the input gradient. Default ``255``.\n smooth (float32): Laplace smoothing to smooth dice loss and accelerate convergence.\n Default: 1.0\n \"\"\"\n\n def __init__(self, weight=None, ignore_index=255, smooth=1.0):\n super().__init__()\n self.weight = weight\n self.ignore_index = ignore_index\n self.smooth = smooth\n self.eps = 1e-8\n\n def forward(self, logits, labels):\n num_class = logits.shape[1]\n if self.weight is not None:\n assert num_class == len(self.weight), \\\n \"The lenght of weight should be euqal to the num class\"\n\n logits = F.softmax(logits, axis=1)\n labels_one_hot = F.one_hot(labels, num_class)\n labels_one_hot = paddle.transpose(labels_one_hot, [0, 3, 1, 2])\n\n mask = labels != self.ignore_index\n mask = paddle.cast(paddle.unsqueeze(mask, 1), 'float32')\n\n dice_loss = 0.0\n for i in range(num_class):\n dice_loss_i = dice_loss_helper(logits[:, i], labels_one_hot[:, i],\n mask, self.smooth, self.eps)\n if self.weight is not None:\n dice_loss_i *= self.weight[i]\n dice_loss += dice_loss_i\n dice_loss = dice_loss / num_class\n\n return dice_loss\n\n\ndef dice_loss_helper(logit, label, mask, smooth, eps):\n assert logit.shape == label.shape, \\\n \"The shape of logit and label should be the same\"\n logit = paddle.reshape(logit, [0, -1])\n label = paddle.reshape(label, [0, -1])\n mask = paddle.reshape(mask, [0, -1])\n logit *= mask\n label *= mask\n intersection = paddle.sum(logit * label, axis=1)\n cardinality = paddle.sum(logit + label, axis=1)\n dice_loss = 1 - (2 * intersection + smooth) / (cardinality + smooth + eps)\n dice_loss = dice_loss.mean()\n return dice_loss\n", "path": "paddleseg/models/losses/dice_loss.py"}], "after_files": [{"content": "# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport paddle\nfrom paddle import nn\nimport paddle.nn.functional as F\n\nfrom paddleseg.cvlibs import manager\n\n\[email protected]_component\nclass DiceLoss(nn.Layer):\n \"\"\"\n The implements of the dice loss.\n\n Args:\n weight (list[float], optional): The weight for each class. Default: None.\n ignore_index (int64): ignore_index (int64, optional): Specifies a target value that\n is ignored and does not contribute to the input gradient. Default ``255``.\n smooth (float32): Laplace smoothing to smooth dice loss and accelerate convergence.\n Default: 1.0\n \"\"\"\n\n def __init__(self, weight=None, ignore_index=255, smooth=1.0):\n super().__init__()\n self.weight = weight\n self.ignore_index = ignore_index\n self.smooth = smooth\n self.eps = 1e-8\n\n def forward(self, logits, labels):\n num_class = logits.shape[1]\n if self.weight is not None:\n assert num_class == len(self.weight), \\\n \"The lenght of weight should be euqal to the num class\"\n\n mask = labels != self.ignore_index\n mask = paddle.cast(paddle.unsqueeze(mask, 1), 'float32')\n\n labels[labels == self.ignore_index] = 0\n labels_one_hot = F.one_hot(labels, num_class)\n labels_one_hot = paddle.transpose(labels_one_hot, [0, 3, 1, 2])\n logits = F.softmax(logits, axis=1)\n\n dice_loss = 0.0\n for i in range(num_class):\n dice_loss_i = dice_loss_helper(logits[:, i], labels_one_hot[:, i],\n mask, self.smooth, self.eps)\n if self.weight is not None:\n dice_loss_i *= self.weight[i]\n dice_loss += dice_loss_i\n dice_loss = dice_loss / num_class\n\n return dice_loss\n\n\ndef dice_loss_helper(logit, label, mask, smooth, eps):\n assert logit.shape == label.shape, \\\n \"The shape of logit and label should be the same\"\n logit = paddle.reshape(logit, [0, -1])\n label = paddle.reshape(label, [0, -1])\n mask = paddle.reshape(mask, [0, -1])\n logit *= mask\n label *= mask\n intersection = paddle.sum(logit * label, axis=1)\n cardinality = paddle.sum(logit + label, axis=1)\n dice_loss = 1 - (2 * intersection + smooth) / (cardinality + smooth + eps)\n dice_loss = dice_loss.mean()\n return dice_loss\n", "path": "paddleseg/models/losses/dice_loss.py"}]} | 1,559 | 276 |
gh_patches_debug_531 | rasdani/github-patches | git_diff | joke2k__faker-1569 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
too long iban generated for pl-PL locale
* Faker version: 9.8.2
* OS: MacOs 12.0.1
IBANs generated for pl_PL locales are 30 characters long. This is too many. Valid PL IBAN should have 28 characters (including country code).
### Steps to reproduce
Generate a Polish IBAN with:
```
from faker import Faker
fake=Faker('pl-PL')
print(fake.iban())
```
Copy paste generated string into IBAN Validator at https://www.ibancalculator.com/
### Expected behavior
IBAN should have the correct length and checksum
### Actual behavior
There is an error message that IBAN have too many characters:
"This IBAN cannot be correct because of its length. A Polish IBAN always contains exactly 28 digits and letters ("PL", a 2-digit checksum, and the 24-digit national account number, whose first 8 digits determine the bank and branch). The IBAN you entered is 30 characters long."
too long iban generated for pl-PL locale
* Faker version: 9.8.2
* OS: MacOs 12.0.1
IBANs generated for pl_PL locales are 30 characters long. This is too many. Valid PL IBAN should have 28 characters (including country code).
### Steps to reproduce
Generate a Polish IBAN with:
```
from faker import Faker
fake=Faker('pl-PL')
print(fake.iban())
```
Copy paste generated string into IBAN Validator at https://www.ibancalculator.com/
### Expected behavior
IBAN should have the correct length and checksum
### Actual behavior
There is an error message that IBAN have too many characters:
"This IBAN cannot be correct because of its length. A Polish IBAN always contains exactly 28 digits and letters ("PL", a 2-digit checksum, and the 24-digit national account number, whose first 8 digits determine the bank and branch). The IBAN you entered is 30 characters long."
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `faker/providers/bank/pl_PL/__init__.py`
Content:
```
1 from .. import Provider as BankProvider
2
3
4 class Provider(BankProvider):
5 """Implement bank provider for ``pl_PL`` locale."""
6
7 bban_format = "#" * 26
8 country_code = "PL"
9
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/faker/providers/bank/pl_PL/__init__.py b/faker/providers/bank/pl_PL/__init__.py
--- a/faker/providers/bank/pl_PL/__init__.py
+++ b/faker/providers/bank/pl_PL/__init__.py
@@ -4,5 +4,5 @@
class Provider(BankProvider):
"""Implement bank provider for ``pl_PL`` locale."""
- bban_format = "#" * 26
+ bban_format = "#" * 24
country_code = "PL"
| {"golden_diff": "diff --git a/faker/providers/bank/pl_PL/__init__.py b/faker/providers/bank/pl_PL/__init__.py\n--- a/faker/providers/bank/pl_PL/__init__.py\n+++ b/faker/providers/bank/pl_PL/__init__.py\n@@ -4,5 +4,5 @@\n class Provider(BankProvider):\n \"\"\"Implement bank provider for ``pl_PL`` locale.\"\"\"\n \n- bban_format = \"#\" * 26\n+ bban_format = \"#\" * 24\n country_code = \"PL\"\n", "issue": "too long iban generated for pl-PL locale\n* Faker version: 9.8.2\r\n* OS: MacOs 12.0.1\r\n\r\nIBANs generated for pl_PL locales are 30 characters long. This is too many. Valid PL IBAN should have 28 characters (including country code).\r\n\r\n### Steps to reproduce\r\nGenerate a Polish IBAN with:\r\n```\r\nfrom faker import Faker\r\n fake=Faker('pl-PL')\r\n print(fake.iban())\r\n```\r\nCopy paste generated string into IBAN Validator at https://www.ibancalculator.com/\r\n### Expected behavior\r\n\r\nIBAN should have the correct length and checksum\r\n\r\n### Actual behavior\r\n\r\nThere is an error message that IBAN have too many characters:\r\n\"This IBAN cannot be correct because of its length. A Polish IBAN always contains exactly 28 digits and letters (\"PL\", a 2-digit checksum, and the 24-digit national account number, whose first 8 digits determine the bank and branch). The IBAN you entered is 30 characters long.\"\r\n\ntoo long iban generated for pl-PL locale\n* Faker version: 9.8.2\r\n* OS: MacOs 12.0.1\r\n\r\nIBANs generated for pl_PL locales are 30 characters long. This is too many. Valid PL IBAN should have 28 characters (including country code).\r\n\r\n### Steps to reproduce\r\nGenerate a Polish IBAN with:\r\n```\r\nfrom faker import Faker\r\n fake=Faker('pl-PL')\r\n print(fake.iban())\r\n```\r\nCopy paste generated string into IBAN Validator at https://www.ibancalculator.com/\r\n### Expected behavior\r\n\r\nIBAN should have the correct length and checksum\r\n\r\n### Actual behavior\r\n\r\nThere is an error message that IBAN have too many characters:\r\n\"This IBAN cannot be correct because of its length. A Polish IBAN always contains exactly 28 digits and letters (\"PL\", a 2-digit checksum, and the 24-digit national account number, whose first 8 digits determine the bank and branch). The IBAN you entered is 30 characters long.\"\r\n\n", "before_files": [{"content": "from .. import Provider as BankProvider\n\n\nclass Provider(BankProvider):\n \"\"\"Implement bank provider for ``pl_PL`` locale.\"\"\"\n\n bban_format = \"#\" * 26\n country_code = \"PL\"\n", "path": "faker/providers/bank/pl_PL/__init__.py"}], "after_files": [{"content": "from .. import Provider as BankProvider\n\n\nclass Provider(BankProvider):\n \"\"\"Implement bank provider for ``pl_PL`` locale.\"\"\"\n\n bban_format = \"#\" * 24\n country_code = \"PL\"\n", "path": "faker/providers/bank/pl_PL/__init__.py"}]} | 767 | 116 |
gh_patches_debug_20741 | rasdani/github-patches | git_diff | astronomer__astro-sdk-165 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add SQLite example
* Create an example in `example_dags` illustrating the usage of SQLite
* This example could use one of our checks
* Update `tests/test_example_dags.py` to run it
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `noxfile.py`
Content:
```
1 """Nox automation definitions."""
2
3 import pathlib
4
5 import nox
6
7 nox.options.sessions = ["dev"]
8
9
10 @nox.session(python="3.9")
11 def dev(session: nox.Session) -> None:
12 """Create a dev environment with everything installed.
13
14 This is useful for setting up IDE for autocompletion etc. Point the
15 development environment to ``.nox/dev``.
16 """
17 session.install("nox")
18 session.install("-e", ".[all]")
19 session.install("-e", ".[tests]")
20
21
22 @nox.session(python=["3.7", "3.8", "3.9"])
23 def test(session: nox.Session) -> None:
24 """Run unit tests."""
25 session.install("-e", ".[all]")
26 session.install("-e", ".[tests]")
27 session.run("airflow", "db", "init")
28 session.run("pytest", *session.posargs)
29
30
31 @nox.session()
32 @nox.parametrize(
33 "extras",
34 [
35 ("postgres-only", {"include": ["postgres"], "exclude": ["amazon"]}),
36 ("postgres-amazon", {"include": ["postgres", "amazon"]}),
37 ("snowflake-amazon", {"include": ["snowflake", "amazon"]})
38 # ("sqlite", {"include": ["sqlite"]}),
39 ],
40 )
41 def test_examples_by_dependency(session: nox.Session, extras):
42 _, extras = extras
43 pypi_deps = ",".join(extras["include"])
44 pytest_options = " and ".join(extras["include"])
45 pytest_options = " and not ".join([pytest_options, *extras.get("exclude", [])])
46 pytest_args = ["-k", pytest_options]
47
48 session.install("-e", f".[{pypi_deps}]")
49 session.install("-e", f".[tests]")
50 session.run("airflow", "db", "init")
51
52 session.run("pytest", "tests/test_example_dags.py", *pytest_args, *session.posargs)
53
54
55 @nox.session()
56 def lint(session: nox.Session) -> None:
57 """Run linters."""
58 session.install("pre-commit")
59 if session.posargs:
60 args = [*session.posargs, "--all-files"]
61 else:
62 args = ["--all-files", "--show-diff-on-failure"]
63 session.run("pre-commit", "run", *args)
64
65
66 @nox.session()
67 def build(session: nox.Session) -> None:
68 """Build release artifacts."""
69 session.install("build")
70
71 # TODO: Automate version bumping, Git tagging, and more?
72
73 dist = pathlib.Path("dist")
74 if dist.exists() and next(dist.iterdir(), None) is not None:
75 session.error(
76 "There are files in dist/. Remove them and try again. "
77 "You can use `git clean -fxdi -- dist` command to do this."
78 )
79 dist.mkdir(exist_ok=True)
80
81 session.run("python", "-m", "build", *session.posargs)
82
83
84 @nox.session()
85 def release(session: nox.Session) -> None:
86 """Publish a release."""
87 session.install("twine")
88 # TODO: Better artifact checking.
89 session.run("twine", "check", *session.posargs)
90 session.run("twine", "upload", *session.posargs)
91
```
Path: `example_dags/example_sqlite_load_transform.py`
Content:
```
1 from datetime import datetime
2
3 from airflow import DAG
4
5 from astro import sql as aql
6 from astro.sql.table import Table
7
8 START_DATE = datetime(2000, 1, 1)
9
10
11 @aql.transform()
12 def top_five_animations(input_table: Table):
13 return """
14 SELECT Title, Rating
15 FROM {{input_table}}
16 WHERE Genre1=='Animation'
17 ORDER BY Rating desc
18 LIMIT 5;
19 """
20
21
22 with DAG(
23 "example_sqlite_load_transform",
24 schedule_interval=None,
25 start_date=START_DATE,
26 catchup=False,
27 ) as dag:
28
29 imdb_movies = aql.load_file(
30 path="https://raw.githubusercontent.com/astro-projects/astro/readme/tests/data/imdb.csv",
31 task_id="load_csv",
32 output_table=Table(
33 table_name="imdb_movies", database="sqlite", conn_id="sqlite_default"
34 ),
35 )
36
37 top_five_animations(
38 input_table=imdb_movies,
39 output_table=Table(
40 table_name="top_animation", database="sqlite", conn_id="sqlite_default"
41 ),
42 )
43
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/example_dags/example_sqlite_load_transform.py b/example_dags/example_sqlite_load_transform.py
--- a/example_dags/example_sqlite_load_transform.py
+++ b/example_dags/example_sqlite_load_transform.py
@@ -27,7 +27,7 @@
) as dag:
imdb_movies = aql.load_file(
- path="https://raw.githubusercontent.com/astro-projects/astro/readme/tests/data/imdb.csv",
+ path="https://raw.githubusercontent.com/astro-projects/astro/main/tests/data/imdb.csv",
task_id="load_csv",
output_table=Table(
table_name="imdb_movies", database="sqlite", conn_id="sqlite_default"
diff --git a/noxfile.py b/noxfile.py
--- a/noxfile.py
+++ b/noxfile.py
@@ -34,8 +34,8 @@
[
("postgres-only", {"include": ["postgres"], "exclude": ["amazon"]}),
("postgres-amazon", {"include": ["postgres", "amazon"]}),
- ("snowflake-amazon", {"include": ["snowflake", "amazon"]})
- # ("sqlite", {"include": ["sqlite"]}),
+ ("snowflake-amazon", {"include": ["snowflake", "amazon"]}),
+ ("sqlite", {"include": ["sqlite"]}),
],
)
def test_examples_by_dependency(session: nox.Session, extras):
| {"golden_diff": "diff --git a/example_dags/example_sqlite_load_transform.py b/example_dags/example_sqlite_load_transform.py\n--- a/example_dags/example_sqlite_load_transform.py\n+++ b/example_dags/example_sqlite_load_transform.py\n@@ -27,7 +27,7 @@\n ) as dag:\n \n imdb_movies = aql.load_file(\n- path=\"https://raw.githubusercontent.com/astro-projects/astro/readme/tests/data/imdb.csv\",\n+ path=\"https://raw.githubusercontent.com/astro-projects/astro/main/tests/data/imdb.csv\",\n task_id=\"load_csv\",\n output_table=Table(\n table_name=\"imdb_movies\", database=\"sqlite\", conn_id=\"sqlite_default\"\ndiff --git a/noxfile.py b/noxfile.py\n--- a/noxfile.py\n+++ b/noxfile.py\n@@ -34,8 +34,8 @@\n [\n (\"postgres-only\", {\"include\": [\"postgres\"], \"exclude\": [\"amazon\"]}),\n (\"postgres-amazon\", {\"include\": [\"postgres\", \"amazon\"]}),\n- (\"snowflake-amazon\", {\"include\": [\"snowflake\", \"amazon\"]})\n- # (\"sqlite\", {\"include\": [\"sqlite\"]}),\n+ (\"snowflake-amazon\", {\"include\": [\"snowflake\", \"amazon\"]}),\n+ (\"sqlite\", {\"include\": [\"sqlite\"]}),\n ],\n )\n def test_examples_by_dependency(session: nox.Session, extras):\n", "issue": "Add SQLite example\n* Create an example in `example_dags` illustrating the usage of SQLite\r\n* This example could use one of our checks\r\n* Update `tests/test_example_dags.py` to run it\n", "before_files": [{"content": "\"\"\"Nox automation definitions.\"\"\"\n\nimport pathlib\n\nimport nox\n\nnox.options.sessions = [\"dev\"]\n\n\[email protected](python=\"3.9\")\ndef dev(session: nox.Session) -> None:\n \"\"\"Create a dev environment with everything installed.\n\n This is useful for setting up IDE for autocompletion etc. Point the\n development environment to ``.nox/dev``.\n \"\"\"\n session.install(\"nox\")\n session.install(\"-e\", \".[all]\")\n session.install(\"-e\", \".[tests]\")\n\n\[email protected](python=[\"3.7\", \"3.8\", \"3.9\"])\ndef test(session: nox.Session) -> None:\n \"\"\"Run unit tests.\"\"\"\n session.install(\"-e\", \".[all]\")\n session.install(\"-e\", \".[tests]\")\n session.run(\"airflow\", \"db\", \"init\")\n session.run(\"pytest\", *session.posargs)\n\n\[email protected]()\[email protected](\n \"extras\",\n [\n (\"postgres-only\", {\"include\": [\"postgres\"], \"exclude\": [\"amazon\"]}),\n (\"postgres-amazon\", {\"include\": [\"postgres\", \"amazon\"]}),\n (\"snowflake-amazon\", {\"include\": [\"snowflake\", \"amazon\"]})\n # (\"sqlite\", {\"include\": [\"sqlite\"]}),\n ],\n)\ndef test_examples_by_dependency(session: nox.Session, extras):\n _, extras = extras\n pypi_deps = \",\".join(extras[\"include\"])\n pytest_options = \" and \".join(extras[\"include\"])\n pytest_options = \" and not \".join([pytest_options, *extras.get(\"exclude\", [])])\n pytest_args = [\"-k\", pytest_options]\n\n session.install(\"-e\", f\".[{pypi_deps}]\")\n session.install(\"-e\", f\".[tests]\")\n session.run(\"airflow\", \"db\", \"init\")\n\n session.run(\"pytest\", \"tests/test_example_dags.py\", *pytest_args, *session.posargs)\n\n\[email protected]()\ndef lint(session: nox.Session) -> None:\n \"\"\"Run linters.\"\"\"\n session.install(\"pre-commit\")\n if session.posargs:\n args = [*session.posargs, \"--all-files\"]\n else:\n args = [\"--all-files\", \"--show-diff-on-failure\"]\n session.run(\"pre-commit\", \"run\", *args)\n\n\[email protected]()\ndef build(session: nox.Session) -> None:\n \"\"\"Build release artifacts.\"\"\"\n session.install(\"build\")\n\n # TODO: Automate version bumping, Git tagging, and more?\n\n dist = pathlib.Path(\"dist\")\n if dist.exists() and next(dist.iterdir(), None) is not None:\n session.error(\n \"There are files in dist/. Remove them and try again. \"\n \"You can use `git clean -fxdi -- dist` command to do this.\"\n )\n dist.mkdir(exist_ok=True)\n\n session.run(\"python\", \"-m\", \"build\", *session.posargs)\n\n\[email protected]()\ndef release(session: nox.Session) -> None:\n \"\"\"Publish a release.\"\"\"\n session.install(\"twine\")\n # TODO: Better artifact checking.\n session.run(\"twine\", \"check\", *session.posargs)\n session.run(\"twine\", \"upload\", *session.posargs)\n", "path": "noxfile.py"}, {"content": "from datetime import datetime\n\nfrom airflow import DAG\n\nfrom astro import sql as aql\nfrom astro.sql.table import Table\n\nSTART_DATE = datetime(2000, 1, 1)\n\n\[email protected]()\ndef top_five_animations(input_table: Table):\n return \"\"\"\n SELECT Title, Rating\n FROM {{input_table}}\n WHERE Genre1=='Animation'\n ORDER BY Rating desc\n LIMIT 5;\n \"\"\"\n\n\nwith DAG(\n \"example_sqlite_load_transform\",\n schedule_interval=None,\n start_date=START_DATE,\n catchup=False,\n) as dag:\n\n imdb_movies = aql.load_file(\n path=\"https://raw.githubusercontent.com/astro-projects/astro/readme/tests/data/imdb.csv\",\n task_id=\"load_csv\",\n output_table=Table(\n table_name=\"imdb_movies\", database=\"sqlite\", conn_id=\"sqlite_default\"\n ),\n )\n\n top_five_animations(\n input_table=imdb_movies,\n output_table=Table(\n table_name=\"top_animation\", database=\"sqlite\", conn_id=\"sqlite_default\"\n ),\n )\n", "path": "example_dags/example_sqlite_load_transform.py"}], "after_files": [{"content": "\"\"\"Nox automation definitions.\"\"\"\n\nimport pathlib\n\nimport nox\n\nnox.options.sessions = [\"dev\"]\n\n\[email protected](python=\"3.9\")\ndef dev(session: nox.Session) -> None:\n \"\"\"Create a dev environment with everything installed.\n\n This is useful for setting up IDE for autocompletion etc. Point the\n development environment to ``.nox/dev``.\n \"\"\"\n session.install(\"nox\")\n session.install(\"-e\", \".[all]\")\n session.install(\"-e\", \".[tests]\")\n\n\[email protected](python=[\"3.7\", \"3.8\", \"3.9\"])\ndef test(session: nox.Session) -> None:\n \"\"\"Run unit tests.\"\"\"\n session.install(\"-e\", \".[all]\")\n session.install(\"-e\", \".[tests]\")\n session.run(\"airflow\", \"db\", \"init\")\n session.run(\"pytest\", *session.posargs)\n\n\[email protected]()\[email protected](\n \"extras\",\n [\n (\"postgres-only\", {\"include\": [\"postgres\"], \"exclude\": [\"amazon\"]}),\n (\"postgres-amazon\", {\"include\": [\"postgres\", \"amazon\"]}),\n (\"snowflake-amazon\", {\"include\": [\"snowflake\", \"amazon\"]}),\n (\"sqlite\", {\"include\": [\"sqlite\"]}),\n ],\n)\ndef test_examples_by_dependency(session: nox.Session, extras):\n _, extras = extras\n pypi_deps = \",\".join(extras[\"include\"])\n pytest_options = \" and \".join(extras[\"include\"])\n pytest_options = \" and not \".join([pytest_options, *extras.get(\"exclude\", [])])\n pytest_args = [\"-k\", pytest_options]\n\n session.install(\"-e\", f\".[{pypi_deps}]\")\n session.install(\"-e\", f\".[tests]\")\n session.run(\"airflow\", \"db\", \"init\")\n\n session.run(\"pytest\", \"tests/test_example_dags.py\", *pytest_args, *session.posargs)\n\n\[email protected]()\ndef lint(session: nox.Session) -> None:\n \"\"\"Run linters.\"\"\"\n session.install(\"pre-commit\")\n if session.posargs:\n args = [*session.posargs, \"--all-files\"]\n else:\n args = [\"--all-files\", \"--show-diff-on-failure\"]\n session.run(\"pre-commit\", \"run\", *args)\n\n\[email protected]()\ndef build(session: nox.Session) -> None:\n \"\"\"Build release artifacts.\"\"\"\n session.install(\"build\")\n\n # TODO: Automate version bumping, Git tagging, and more?\n\n dist = pathlib.Path(\"dist\")\n if dist.exists() and next(dist.iterdir(), None) is not None:\n session.error(\n \"There are files in dist/. Remove them and try again. \"\n \"You can use `git clean -fxdi -- dist` command to do this.\"\n )\n dist.mkdir(exist_ok=True)\n\n session.run(\"python\", \"-m\", \"build\", *session.posargs)\n\n\[email protected]()\ndef release(session: nox.Session) -> None:\n \"\"\"Publish a release.\"\"\"\n session.install(\"twine\")\n # TODO: Better artifact checking.\n session.run(\"twine\", \"check\", *session.posargs)\n session.run(\"twine\", \"upload\", *session.posargs)\n", "path": "noxfile.py"}, {"content": "from datetime import datetime\n\nfrom airflow import DAG\n\nfrom astro import sql as aql\nfrom astro.sql.table import Table\n\nSTART_DATE = datetime(2000, 1, 1)\n\n\[email protected]()\ndef top_five_animations(input_table: Table):\n return \"\"\"\n SELECT Title, Rating\n FROM {{input_table}}\n WHERE Genre1=='Animation'\n ORDER BY Rating desc\n LIMIT 5;\n \"\"\"\n\n\nwith DAG(\n \"example_sqlite_load_transform\",\n schedule_interval=None,\n start_date=START_DATE,\n catchup=False,\n) as dag:\n\n imdb_movies = aql.load_file(\n path=\"https://raw.githubusercontent.com/astro-projects/astro/main/tests/data/imdb.csv\",\n task_id=\"load_csv\",\n output_table=Table(\n table_name=\"imdb_movies\", database=\"sqlite\", conn_id=\"sqlite_default\"\n ),\n )\n\n top_five_animations(\n input_table=imdb_movies,\n output_table=Table(\n table_name=\"top_animation\", database=\"sqlite\", conn_id=\"sqlite_default\"\n ),\n )\n", "path": "example_dags/example_sqlite_load_transform.py"}]} | 1,515 | 305 |
gh_patches_debug_23891 | rasdani/github-patches | git_diff | DDMAL__CantusDB-945 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Users should only be shown the View-Edit toggle if they have edit access for the source/chant in question
@annamorphism made a comment on #441 that really deserves its own issue
> also is there a way to not have the "Edit" tab show up for unauthorized people? it's annoying to try to edit something and then be sent to a 403.
Currently, the view-edit toggle is being displayed whenever the user is logged in. Instead, we need to properly check that the user is actually allowed to edit the chant before displaying the Edit link.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `django/cantusdb_project/main_app/views/sequence.py`
Content:
```
1 from django.views.generic import DetailView, ListView, UpdateView
2 from main_app.models import Sequence
3 from django.db.models import Q
4 from main_app.forms import SequenceEditForm
5 from django.contrib.auth.mixins import LoginRequiredMixin
6 from django.contrib import messages
7 from django.contrib.auth.mixins import UserPassesTestMixin
8 from django.core.exceptions import PermissionDenied
9
10
11 class SequenceDetailView(DetailView):
12 """
13 Displays a single Sequence object. Accessed with ``sequences/<int:pk>``
14 """
15
16 model = Sequence
17 context_object_name = "sequence"
18 template_name = "sequence_detail.html"
19
20 def get_context_data(self, **kwargs):
21 sequence = self.get_object()
22 source = sequence.source
23 # if the sequence's source isn't published,
24 # only logged-in users should be able to view the sequence's detail page
25 if (
26 (source is not None)
27 and (source.published is False)
28 and (not self.request.user.is_authenticated)
29 ):
30 raise PermissionDenied()
31
32 context = super().get_context_data(**kwargs)
33 context["concordances"] = (
34 Sequence.objects.filter(cantus_id=sequence.cantus_id)
35 .select_related("source")
36 .order_by("siglum")
37 )
38 return context
39
40
41 class SequenceListView(ListView):
42 """
43 Displays a list of Sequence objects. Accessed with ``sequences/``
44 """
45
46 paginate_by = 100
47 context_object_name = "sequences"
48 template_name = "sequence_list.html"
49
50 def get_queryset(self):
51 queryset = Sequence.objects.select_related("source")
52 display_unpublished = self.request.user.is_authenticated
53 if display_unpublished:
54 q_obj_filter = Q()
55 else:
56 q_obj_filter = Q(source__published=True)
57
58 if self.request.GET.get("incipit"):
59 incipit = self.request.GET.get("incipit")
60 q_obj_filter &= Q(incipit__icontains=incipit)
61 if self.request.GET.get("siglum"):
62 siglum = self.request.GET.get("siglum")
63 q_obj_filter &= Q(siglum__icontains=siglum)
64 if self.request.GET.get("cantus_id"):
65 cantus_id = self.request.GET.get("cantus_id")
66 q_obj_filter &= Q(cantus_id__icontains=cantus_id)
67
68 return queryset.filter(q_obj_filter).order_by("siglum", "s_sequence")
69
70
71 class SequenceEditView(LoginRequiredMixin, UserPassesTestMixin, UpdateView):
72 template_name = "sequence_edit.html"
73 model = Sequence
74 form_class = SequenceEditForm
75 pk_url_kwarg = "sequence_id"
76
77 def form_valid(self, form):
78 form.instance.last_updated_by = self.request.user
79 messages.success(
80 self.request,
81 "Sequence updated successfully!",
82 )
83 return super().form_valid(form)
84
85 def test_func(self):
86 user = self.request.user
87 # checks if the user is a project manager (they should have the privilege to edit any sequence)
88 is_project_manager = user.groups.filter(name="project manager").exists()
89
90 if is_project_manager:
91 return True
92 else:
93 return False
94
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/django/cantusdb_project/main_app/views/sequence.py b/django/cantusdb_project/main_app/views/sequence.py
--- a/django/cantusdb_project/main_app/views/sequence.py
+++ b/django/cantusdb_project/main_app/views/sequence.py
@@ -6,6 +6,7 @@
from django.contrib import messages
from django.contrib.auth.mixins import UserPassesTestMixin
from django.core.exceptions import PermissionDenied
+from main_app.views.chant import user_can_edit_chants_in_source
class SequenceDetailView(DetailView):
@@ -20,6 +21,8 @@
def get_context_data(self, **kwargs):
sequence = self.get_object()
source = sequence.source
+ user = self.request.user
+
# if the sequence's source isn't published,
# only logged-in users should be able to view the sequence's detail page
if (
@@ -35,6 +38,7 @@
.select_related("source")
.order_by("siglum")
)
+ context["user_can_edit_sequence"] = user_can_edit_chants_in_source(user, source)
return context
| {"golden_diff": "diff --git a/django/cantusdb_project/main_app/views/sequence.py b/django/cantusdb_project/main_app/views/sequence.py\n--- a/django/cantusdb_project/main_app/views/sequence.py\n+++ b/django/cantusdb_project/main_app/views/sequence.py\n@@ -6,6 +6,7 @@\n from django.contrib import messages\n from django.contrib.auth.mixins import UserPassesTestMixin\n from django.core.exceptions import PermissionDenied\n+from main_app.views.chant import user_can_edit_chants_in_source\n \n \n class SequenceDetailView(DetailView):\n@@ -20,6 +21,8 @@\n def get_context_data(self, **kwargs):\n sequence = self.get_object()\n source = sequence.source\n+ user = self.request.user\n+\n # if the sequence's source isn't published,\n # only logged-in users should be able to view the sequence's detail page\n if (\n@@ -35,6 +38,7 @@\n .select_related(\"source\")\n .order_by(\"siglum\")\n )\n+ context[\"user_can_edit_sequence\"] = user_can_edit_chants_in_source(user, source)\n return context\n", "issue": "Users should only be shown the View-Edit toggle if they have edit access for the source/chant in question\n@annamorphism made a comment on #441 that really deserves its own issue\r\n\r\n> also is there a way to not have the \"Edit\" tab show up for unauthorized people? it's annoying to try to edit something and then be sent to a 403.\r\n\r\nCurrently, the view-edit toggle is being displayed whenever the user is logged in. Instead, we need to properly check that the user is actually allowed to edit the chant before displaying the Edit link.\n", "before_files": [{"content": "from django.views.generic import DetailView, ListView, UpdateView\nfrom main_app.models import Sequence\nfrom django.db.models import Q\nfrom main_app.forms import SequenceEditForm\nfrom django.contrib.auth.mixins import LoginRequiredMixin\nfrom django.contrib import messages\nfrom django.contrib.auth.mixins import UserPassesTestMixin\nfrom django.core.exceptions import PermissionDenied\n\n\nclass SequenceDetailView(DetailView):\n \"\"\"\n Displays a single Sequence object. Accessed with ``sequences/<int:pk>``\n \"\"\"\n\n model = Sequence\n context_object_name = \"sequence\"\n template_name = \"sequence_detail.html\"\n\n def get_context_data(self, **kwargs):\n sequence = self.get_object()\n source = sequence.source\n # if the sequence's source isn't published,\n # only logged-in users should be able to view the sequence's detail page\n if (\n (source is not None)\n and (source.published is False)\n and (not self.request.user.is_authenticated)\n ):\n raise PermissionDenied()\n\n context = super().get_context_data(**kwargs)\n context[\"concordances\"] = (\n Sequence.objects.filter(cantus_id=sequence.cantus_id)\n .select_related(\"source\")\n .order_by(\"siglum\")\n )\n return context\n\n\nclass SequenceListView(ListView):\n \"\"\"\n Displays a list of Sequence objects. Accessed with ``sequences/``\n \"\"\"\n\n paginate_by = 100\n context_object_name = \"sequences\"\n template_name = \"sequence_list.html\"\n\n def get_queryset(self):\n queryset = Sequence.objects.select_related(\"source\")\n display_unpublished = self.request.user.is_authenticated\n if display_unpublished:\n q_obj_filter = Q()\n else:\n q_obj_filter = Q(source__published=True)\n\n if self.request.GET.get(\"incipit\"):\n incipit = self.request.GET.get(\"incipit\")\n q_obj_filter &= Q(incipit__icontains=incipit)\n if self.request.GET.get(\"siglum\"):\n siglum = self.request.GET.get(\"siglum\")\n q_obj_filter &= Q(siglum__icontains=siglum)\n if self.request.GET.get(\"cantus_id\"):\n cantus_id = self.request.GET.get(\"cantus_id\")\n q_obj_filter &= Q(cantus_id__icontains=cantus_id)\n\n return queryset.filter(q_obj_filter).order_by(\"siglum\", \"s_sequence\")\n\n\nclass SequenceEditView(LoginRequiredMixin, UserPassesTestMixin, UpdateView):\n template_name = \"sequence_edit.html\"\n model = Sequence\n form_class = SequenceEditForm\n pk_url_kwarg = \"sequence_id\"\n\n def form_valid(self, form):\n form.instance.last_updated_by = self.request.user\n messages.success(\n self.request,\n \"Sequence updated successfully!\",\n )\n return super().form_valid(form)\n\n def test_func(self):\n user = self.request.user\n # checks if the user is a project manager (they should have the privilege to edit any sequence)\n is_project_manager = user.groups.filter(name=\"project manager\").exists()\n\n if is_project_manager:\n return True\n else:\n return False\n", "path": "django/cantusdb_project/main_app/views/sequence.py"}], "after_files": [{"content": "from django.views.generic import DetailView, ListView, UpdateView\nfrom main_app.models import Sequence\nfrom django.db.models import Q\nfrom main_app.forms import SequenceEditForm\nfrom django.contrib.auth.mixins import LoginRequiredMixin\nfrom django.contrib import messages\nfrom django.contrib.auth.mixins import UserPassesTestMixin\nfrom django.core.exceptions import PermissionDenied\nfrom main_app.views.chant import user_can_edit_chants_in_source\n\n\nclass SequenceDetailView(DetailView):\n \"\"\"\n Displays a single Sequence object. Accessed with ``sequences/<int:pk>``\n \"\"\"\n\n model = Sequence\n context_object_name = \"sequence\"\n template_name = \"sequence_detail.html\"\n\n def get_context_data(self, **kwargs):\n sequence = self.get_object()\n source = sequence.source\n user = self.request.user\n\n # if the sequence's source isn't published,\n # only logged-in users should be able to view the sequence's detail page\n if (\n (source is not None)\n and (source.published is False)\n and (not self.request.user.is_authenticated)\n ):\n raise PermissionDenied()\n\n context = super().get_context_data(**kwargs)\n context[\"concordances\"] = (\n Sequence.objects.filter(cantus_id=sequence.cantus_id)\n .select_related(\"source\")\n .order_by(\"siglum\")\n )\n context[\"user_can_edit_sequence\"] = user_can_edit_chants_in_source(user, source)\n return context\n\n\nclass SequenceListView(ListView):\n \"\"\"\n Displays a list of Sequence objects. Accessed with ``sequences/``\n \"\"\"\n\n paginate_by = 100\n context_object_name = \"sequences\"\n template_name = \"sequence_list.html\"\n\n def get_queryset(self):\n queryset = Sequence.objects.select_related(\"source\")\n display_unpublished = self.request.user.is_authenticated\n if display_unpublished:\n q_obj_filter = Q()\n else:\n q_obj_filter = Q(source__published=True)\n\n if self.request.GET.get(\"incipit\"):\n incipit = self.request.GET.get(\"incipit\")\n q_obj_filter &= Q(incipit__icontains=incipit)\n if self.request.GET.get(\"siglum\"):\n siglum = self.request.GET.get(\"siglum\")\n q_obj_filter &= Q(siglum__icontains=siglum)\n if self.request.GET.get(\"cantus_id\"):\n cantus_id = self.request.GET.get(\"cantus_id\")\n q_obj_filter &= Q(cantus_id__icontains=cantus_id)\n\n return queryset.filter(q_obj_filter).order_by(\"siglum\", \"s_sequence\")\n\n\nclass SequenceEditView(LoginRequiredMixin, UserPassesTestMixin, UpdateView):\n template_name = \"sequence_edit.html\"\n model = Sequence\n form_class = SequenceEditForm\n pk_url_kwarg = \"sequence_id\"\n\n def form_valid(self, form):\n form.instance.last_updated_by = self.request.user\n messages.success(\n self.request,\n \"Sequence updated successfully!\",\n )\n return super().form_valid(form)\n\n def test_func(self):\n user = self.request.user\n # checks if the user is a project manager (they should have the privilege to edit any sequence)\n is_project_manager = user.groups.filter(name=\"project manager\").exists()\n\n if is_project_manager:\n return True\n else:\n return False\n", "path": "django/cantusdb_project/main_app/views/sequence.py"}]} | 1,247 | 255 |
gh_patches_debug_38416 | rasdani/github-patches | git_diff | vyperlang__vyper-1042 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Convert function takes string instead of type for conversion to
### What's your issue about?
If you forget to add quotes to the type you want to convert to, Vyper will give an unhelpful error:
```python
convert(some_int128, uint256)
# raises:
# AttributeError: 'Name' object has no attribute 's'
```
### How can it be fixed?
So, catching this error might work out to fix this issue, but I think the underlying issue is that the second argument (`convertTo`) is a string instead of a typename. This actually makes it a little more unintuitive to write conversions as you could misspell the name and not get visual feedback from your IDE (assuming you have syntax highlighting up)
I would suggest turning this into a VIP to modify the syntax of convert such that a valid typename is supplied as the second argument
#### Cute Animal Picture

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `vyper/types/convert.py`
Content:
```
1 from vyper.functions.signature import (
2 signature
3 )
4 from vyper.parser.parser_utils import (
5 LLLnode,
6 getpos,
7 byte_array_to_num
8 )
9 from vyper.exceptions import (
10 InvalidLiteralException,
11 TypeMismatchException,
12 )
13 from vyper.types import (
14 BaseType,
15 )
16 from vyper.types import (
17 get_type,
18 )
19 from vyper.utils import (
20 DECIMAL_DIVISOR,
21 MemoryPositions,
22 SizeLimits
23 )
24
25
26 @signature(('uint256', 'bytes32', 'bytes'), 'str_literal')
27 def to_int128(expr, args, kwargs, context):
28 in_node = args[0]
29 typ, len = get_type(in_node)
30 if typ in ('uint256', 'bytes32'):
31 if in_node.typ.is_literal and not SizeLimits.in_bounds('int128', in_node.value):
32 raise InvalidLiteralException("Number out of range: {}".format(in_node.value), expr)
33 return LLLnode.from_list(
34 ['clamp', ['mload', MemoryPositions.MINNUM], in_node,
35 ['mload', MemoryPositions.MAXNUM]], typ=BaseType('int128', in_node.typ.unit), pos=getpos(expr)
36 )
37 else:
38 return byte_array_to_num(in_node, expr, 'int128')
39
40
41 @signature(('num_literal', 'int128', 'bytes32', 'address'), 'str_literal')
42 def to_uint256(expr, args, kwargs, context):
43 in_node = args[0]
44 input_type, len = get_type(in_node)
45
46 if isinstance(in_node, int):
47 if not SizeLimits.in_bounds('uint256', in_node):
48 raise InvalidLiteralException("Number out of range: {}".format(in_node))
49 _unit = in_node.typ.unit if input_type == 'int128' else None
50 return LLLnode.from_list(in_node, typ=BaseType('uint256', _unit), pos=getpos(expr))
51
52 elif isinstance(in_node, LLLnode) and input_type in ('int128', 'num_literal'):
53 _unit = in_node.typ.unit if input_type == 'int128' else None
54 return LLLnode.from_list(['clampge', in_node, 0], typ=BaseType('uint256', _unit), pos=getpos(expr))
55
56 elif isinstance(in_node, LLLnode) and input_type in ('bytes32', 'address'):
57 return LLLnode(value=in_node.value, args=in_node.args, typ=BaseType('uint256'), pos=getpos(expr))
58
59 else:
60 raise InvalidLiteralException("Invalid input for uint256: %r" % in_node, expr)
61
62
63 @signature(('int128', 'uint256'), 'str_literal')
64 def to_decimal(expr, args, kwargs, context):
65 input = args[0]
66 if input.typ.typ == 'uint256':
67 return LLLnode.from_list(
68 ['uclample', ['mul', input, DECIMAL_DIVISOR], ['mload', MemoryPositions.MAXDECIMAL]],
69 typ=BaseType('decimal', input.typ.unit, input.typ.positional), pos=getpos(expr)
70 )
71 else:
72 return LLLnode.from_list(
73 ['mul', input, DECIMAL_DIVISOR],
74 typ=BaseType('decimal', input.typ.unit, input.typ.positional),
75 pos=getpos(expr)
76 )
77
78
79 @signature(('int128', 'uint256', 'address', 'bytes'), 'str_literal')
80 def to_bytes32(expr, args, kwargs, context):
81 input = args[0]
82 typ, len = get_type(input)
83 if typ == 'bytes':
84 if len != 32:
85 raise TypeMismatchException("Unable to convert bytes[{}] to bytes32".format(len))
86 if input.location == "memory":
87 return LLLnode.from_list(
88 ['mload', ['add', input, 32]], typ=BaseType('bytes32')
89 )
90 elif input.location == "storage":
91 return LLLnode.from_list(
92 ['sload', ['add', ['sha3_32', input], 1]], typ=BaseType('bytes32')
93 )
94 else:
95 return LLLnode(value=input.value, args=input.args, typ=BaseType('bytes32'), pos=getpos(expr))
96
97
98 def convert(expr, context):
99 output_type = expr.args[1].s
100 if output_type in conversion_table:
101 return conversion_table[output_type](expr, context)
102 else:
103 raise Exception("Conversion to {} is invalid.".format(output_type))
104
105
106 conversion_table = {
107 'int128': to_int128,
108 'uint256': to_uint256,
109 'decimal': to_decimal,
110 'bytes32': to_bytes32,
111 }
112
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/vyper/types/convert.py b/vyper/types/convert.py
--- a/vyper/types/convert.py
+++ b/vyper/types/convert.py
@@ -1,3 +1,6 @@
+import ast
+import warnings
+
from vyper.functions.signature import (
signature
)
@@ -9,6 +12,7 @@
from vyper.exceptions import (
InvalidLiteralException,
TypeMismatchException,
+ ParserException,
)
from vyper.types import (
BaseType,
@@ -23,7 +27,7 @@
)
-@signature(('uint256', 'bytes32', 'bytes'), 'str_literal')
+@signature(('uint256', 'bytes32', 'bytes'), '*')
def to_int128(expr, args, kwargs, context):
in_node = args[0]
typ, len = get_type(in_node)
@@ -38,7 +42,7 @@
return byte_array_to_num(in_node, expr, 'int128')
-@signature(('num_literal', 'int128', 'bytes32', 'address'), 'str_literal')
+@signature(('num_literal', 'int128', 'bytes32', 'address'), '*')
def to_uint256(expr, args, kwargs, context):
in_node = args[0]
input_type, len = get_type(in_node)
@@ -60,7 +64,7 @@
raise InvalidLiteralException("Invalid input for uint256: %r" % in_node, expr)
-@signature(('int128', 'uint256'), 'str_literal')
+@signature(('int128', 'uint256'), '*')
def to_decimal(expr, args, kwargs, context):
input = args[0]
if input.typ.typ == 'uint256':
@@ -76,7 +80,7 @@
)
-@signature(('int128', 'uint256', 'address', 'bytes'), 'str_literal')
+@signature(('int128', 'uint256', 'address', 'bytes'), '*')
def to_bytes32(expr, args, kwargs, context):
input = args[0]
typ, len = get_type(input)
@@ -96,11 +100,23 @@
def convert(expr, context):
- output_type = expr.args[1].s
+
+ if isinstance(expr.args[1], ast.Str):
+ warnings.warn(
+ "String parameter has been removed, see VIP1026). "
+ "Use a vyper type instead.",
+ DeprecationWarning
+ )
+
+ if isinstance(expr.args[1], ast.Name):
+ output_type = expr.args[1].id
+ else:
+ raise ParserException("Invalid conversion type, use valid vyper type.", expr)
+
if output_type in conversion_table:
return conversion_table[output_type](expr, context)
else:
- raise Exception("Conversion to {} is invalid.".format(output_type))
+ raise ParserException("Conversion to {} is invalid.".format(output_type), expr)
conversion_table = {
| {"golden_diff": "diff --git a/vyper/types/convert.py b/vyper/types/convert.py\n--- a/vyper/types/convert.py\n+++ b/vyper/types/convert.py\n@@ -1,3 +1,6 @@\n+import ast\n+import warnings\n+\n from vyper.functions.signature import (\n signature\n )\n@@ -9,6 +12,7 @@\n from vyper.exceptions import (\n InvalidLiteralException,\n TypeMismatchException,\n+ ParserException,\n )\n from vyper.types import (\n BaseType,\n@@ -23,7 +27,7 @@\n )\n \n \n-@signature(('uint256', 'bytes32', 'bytes'), 'str_literal')\n+@signature(('uint256', 'bytes32', 'bytes'), '*')\n def to_int128(expr, args, kwargs, context):\n in_node = args[0]\n typ, len = get_type(in_node)\n@@ -38,7 +42,7 @@\n return byte_array_to_num(in_node, expr, 'int128')\n \n \n-@signature(('num_literal', 'int128', 'bytes32', 'address'), 'str_literal')\n+@signature(('num_literal', 'int128', 'bytes32', 'address'), '*')\n def to_uint256(expr, args, kwargs, context):\n in_node = args[0]\n input_type, len = get_type(in_node)\n@@ -60,7 +64,7 @@\n raise InvalidLiteralException(\"Invalid input for uint256: %r\" % in_node, expr)\n \n \n-@signature(('int128', 'uint256'), 'str_literal')\n+@signature(('int128', 'uint256'), '*')\n def to_decimal(expr, args, kwargs, context):\n input = args[0]\n if input.typ.typ == 'uint256':\n@@ -76,7 +80,7 @@\n )\n \n \n-@signature(('int128', 'uint256', 'address', 'bytes'), 'str_literal')\n+@signature(('int128', 'uint256', 'address', 'bytes'), '*')\n def to_bytes32(expr, args, kwargs, context):\n input = args[0]\n typ, len = get_type(input)\n@@ -96,11 +100,23 @@\n \n \n def convert(expr, context):\n- output_type = expr.args[1].s\n+\n+ if isinstance(expr.args[1], ast.Str):\n+ warnings.warn(\n+ \"String parameter has been removed, see VIP1026). \"\n+ \"Use a vyper type instead.\",\n+ DeprecationWarning\n+ )\n+\n+ if isinstance(expr.args[1], ast.Name):\n+ output_type = expr.args[1].id\n+ else:\n+ raise ParserException(\"Invalid conversion type, use valid vyper type.\", expr)\n+\n if output_type in conversion_table:\n return conversion_table[output_type](expr, context)\n else:\n- raise Exception(\"Conversion to {} is invalid.\".format(output_type))\n+ raise ParserException(\"Conversion to {} is invalid.\".format(output_type), expr)\n \n \n conversion_table = {\n", "issue": "Convert function takes string instead of type for conversion to\n### What's your issue about?\r\nIf you forget to add quotes to the type you want to convert to, Vyper will give an unhelpful error:\r\n```python\r\nconvert(some_int128, uint256)\r\n# raises:\r\n# AttributeError: 'Name' object has no attribute 's'\r\n```\r\n\r\n### How can it be fixed?\r\nSo, catching this error might work out to fix this issue, but I think the underlying issue is that the second argument (`convertTo`) is a string instead of a typename. This actually makes it a little more unintuitive to write conversions as you could misspell the name and not get visual feedback from your IDE (assuming you have syntax highlighting up)\r\n\r\nI would suggest turning this into a VIP to modify the syntax of convert such that a valid typename is supplied as the second argument\r\n\r\n#### Cute Animal Picture\r\n\r\n\n", "before_files": [{"content": "from vyper.functions.signature import (\n signature\n)\nfrom vyper.parser.parser_utils import (\n LLLnode,\n getpos,\n byte_array_to_num\n)\nfrom vyper.exceptions import (\n InvalidLiteralException,\n TypeMismatchException,\n)\nfrom vyper.types import (\n BaseType,\n)\nfrom vyper.types import (\n get_type,\n)\nfrom vyper.utils import (\n DECIMAL_DIVISOR,\n MemoryPositions,\n SizeLimits\n)\n\n\n@signature(('uint256', 'bytes32', 'bytes'), 'str_literal')\ndef to_int128(expr, args, kwargs, context):\n in_node = args[0]\n typ, len = get_type(in_node)\n if typ in ('uint256', 'bytes32'):\n if in_node.typ.is_literal and not SizeLimits.in_bounds('int128', in_node.value):\n raise InvalidLiteralException(\"Number out of range: {}\".format(in_node.value), expr)\n return LLLnode.from_list(\n ['clamp', ['mload', MemoryPositions.MINNUM], in_node,\n ['mload', MemoryPositions.MAXNUM]], typ=BaseType('int128', in_node.typ.unit), pos=getpos(expr)\n )\n else:\n return byte_array_to_num(in_node, expr, 'int128')\n\n\n@signature(('num_literal', 'int128', 'bytes32', 'address'), 'str_literal')\ndef to_uint256(expr, args, kwargs, context):\n in_node = args[0]\n input_type, len = get_type(in_node)\n\n if isinstance(in_node, int):\n if not SizeLimits.in_bounds('uint256', in_node):\n raise InvalidLiteralException(\"Number out of range: {}\".format(in_node))\n _unit = in_node.typ.unit if input_type == 'int128' else None\n return LLLnode.from_list(in_node, typ=BaseType('uint256', _unit), pos=getpos(expr))\n\n elif isinstance(in_node, LLLnode) and input_type in ('int128', 'num_literal'):\n _unit = in_node.typ.unit if input_type == 'int128' else None\n return LLLnode.from_list(['clampge', in_node, 0], typ=BaseType('uint256', _unit), pos=getpos(expr))\n\n elif isinstance(in_node, LLLnode) and input_type in ('bytes32', 'address'):\n return LLLnode(value=in_node.value, args=in_node.args, typ=BaseType('uint256'), pos=getpos(expr))\n\n else:\n raise InvalidLiteralException(\"Invalid input for uint256: %r\" % in_node, expr)\n\n\n@signature(('int128', 'uint256'), 'str_literal')\ndef to_decimal(expr, args, kwargs, context):\n input = args[0]\n if input.typ.typ == 'uint256':\n return LLLnode.from_list(\n ['uclample', ['mul', input, DECIMAL_DIVISOR], ['mload', MemoryPositions.MAXDECIMAL]],\n typ=BaseType('decimal', input.typ.unit, input.typ.positional), pos=getpos(expr)\n )\n else:\n return LLLnode.from_list(\n ['mul', input, DECIMAL_DIVISOR],\n typ=BaseType('decimal', input.typ.unit, input.typ.positional),\n pos=getpos(expr)\n )\n\n\n@signature(('int128', 'uint256', 'address', 'bytes'), 'str_literal')\ndef to_bytes32(expr, args, kwargs, context):\n input = args[0]\n typ, len = get_type(input)\n if typ == 'bytes':\n if len != 32:\n raise TypeMismatchException(\"Unable to convert bytes[{}] to bytes32\".format(len))\n if input.location == \"memory\":\n return LLLnode.from_list(\n ['mload', ['add', input, 32]], typ=BaseType('bytes32')\n )\n elif input.location == \"storage\":\n return LLLnode.from_list(\n ['sload', ['add', ['sha3_32', input], 1]], typ=BaseType('bytes32')\n )\n else:\n return LLLnode(value=input.value, args=input.args, typ=BaseType('bytes32'), pos=getpos(expr))\n\n\ndef convert(expr, context):\n output_type = expr.args[1].s\n if output_type in conversion_table:\n return conversion_table[output_type](expr, context)\n else:\n raise Exception(\"Conversion to {} is invalid.\".format(output_type))\n\n\nconversion_table = {\n 'int128': to_int128,\n 'uint256': to_uint256,\n 'decimal': to_decimal,\n 'bytes32': to_bytes32,\n}\n", "path": "vyper/types/convert.py"}], "after_files": [{"content": "import ast\nimport warnings\n\nfrom vyper.functions.signature import (\n signature\n)\nfrom vyper.parser.parser_utils import (\n LLLnode,\n getpos,\n byte_array_to_num\n)\nfrom vyper.exceptions import (\n InvalidLiteralException,\n TypeMismatchException,\n ParserException,\n)\nfrom vyper.types import (\n BaseType,\n)\nfrom vyper.types import (\n get_type,\n)\nfrom vyper.utils import (\n DECIMAL_DIVISOR,\n MemoryPositions,\n SizeLimits\n)\n\n\n@signature(('uint256', 'bytes32', 'bytes'), '*')\ndef to_int128(expr, args, kwargs, context):\n in_node = args[0]\n typ, len = get_type(in_node)\n if typ in ('uint256', 'bytes32'):\n if in_node.typ.is_literal and not SizeLimits.in_bounds('int128', in_node.value):\n raise InvalidLiteralException(\"Number out of range: {}\".format(in_node.value), expr)\n return LLLnode.from_list(\n ['clamp', ['mload', MemoryPositions.MINNUM], in_node,\n ['mload', MemoryPositions.MAXNUM]], typ=BaseType('int128', in_node.typ.unit), pos=getpos(expr)\n )\n else:\n return byte_array_to_num(in_node, expr, 'int128')\n\n\n@signature(('num_literal', 'int128', 'bytes32', 'address'), '*')\ndef to_uint256(expr, args, kwargs, context):\n in_node = args[0]\n input_type, len = get_type(in_node)\n\n if isinstance(in_node, int):\n if not SizeLimits.in_bounds('uint256', in_node):\n raise InvalidLiteralException(\"Number out of range: {}\".format(in_node))\n _unit = in_node.typ.unit if input_type == 'int128' else None\n return LLLnode.from_list(in_node, typ=BaseType('uint256', _unit), pos=getpos(expr))\n\n elif isinstance(in_node, LLLnode) and input_type in ('int128', 'num_literal'):\n _unit = in_node.typ.unit if input_type == 'int128' else None\n return LLLnode.from_list(['clampge', in_node, 0], typ=BaseType('uint256', _unit), pos=getpos(expr))\n\n elif isinstance(in_node, LLLnode) and input_type in ('bytes32', 'address'):\n return LLLnode(value=in_node.value, args=in_node.args, typ=BaseType('uint256'), pos=getpos(expr))\n\n else:\n raise InvalidLiteralException(\"Invalid input for uint256: %r\" % in_node, expr)\n\n\n@signature(('int128', 'uint256'), '*')\ndef to_decimal(expr, args, kwargs, context):\n input = args[0]\n if input.typ.typ == 'uint256':\n return LLLnode.from_list(\n ['uclample', ['mul', input, DECIMAL_DIVISOR], ['mload', MemoryPositions.MAXDECIMAL]],\n typ=BaseType('decimal', input.typ.unit, input.typ.positional), pos=getpos(expr)\n )\n else:\n return LLLnode.from_list(\n ['mul', input, DECIMAL_DIVISOR],\n typ=BaseType('decimal', input.typ.unit, input.typ.positional),\n pos=getpos(expr)\n )\n\n\n@signature(('int128', 'uint256', 'address', 'bytes'), '*')\ndef to_bytes32(expr, args, kwargs, context):\n input = args[0]\n typ, len = get_type(input)\n if typ == 'bytes':\n if len != 32:\n raise TypeMismatchException(\"Unable to convert bytes[{}] to bytes32\".format(len))\n if input.location == \"memory\":\n return LLLnode.from_list(\n ['mload', ['add', input, 32]], typ=BaseType('bytes32')\n )\n elif input.location == \"storage\":\n return LLLnode.from_list(\n ['sload', ['add', ['sha3_32', input], 1]], typ=BaseType('bytes32')\n )\n else:\n return LLLnode(value=input.value, args=input.args, typ=BaseType('bytes32'), pos=getpos(expr))\n\n\ndef convert(expr, context):\n\n if isinstance(expr.args[1], ast.Str):\n warnings.warn(\n \"String parameter has been removed, see VIP1026). \"\n \"Use a vyper type instead.\",\n DeprecationWarning\n )\n\n if isinstance(expr.args[1], ast.Name):\n output_type = expr.args[1].id\n else:\n raise ParserException(\"Invalid conversion type, use valid vyper type.\", expr)\n\n if output_type in conversion_table:\n return conversion_table[output_type](expr, context)\n else:\n raise ParserException(\"Conversion to {} is invalid.\".format(output_type), expr)\n\n\nconversion_table = {\n 'int128': to_int128,\n 'uint256': to_uint256,\n 'decimal': to_decimal,\n 'bytes32': to_bytes32,\n}\n", "path": "vyper/types/convert.py"}]} | 1,787 | 697 |
gh_patches_debug_35861 | rasdani/github-patches | git_diff | streamlink__streamlink-2048 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Euronews error, unable to open URL
<!--
Thanks for reporting a bug!
USE THE TEMPLATE. Otherwise your bug report may be rejected.
First, see the contribution guidelines:
https://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink
Also check the list of open and closed bug reports:
https://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22bug%22
Please see the text preview to avoid unnecessary formatting errors.
-->
## Bug Report
<!-- Replace [ ] with [x] in order to check the box -->
- [x] This is a bug report and I have read the contribution guidelines.
### Description
<!-- Explain the bug as thoroughly as you can. Don't leave out information which is necessary for us to reproduce and debug this issue. -->
I'm unable to open Euronews live stream.
### Expected / Actual behavior
<!-- What do you expect to happen, and what is actually happening? -->
I expect the stream to open in my media player.
Instead I get this:
```
marco@vbox-ubuntu1804:~$ streamlink http://it.euronews.com/live
[cli][info] Found matching plugin euronews for URL http://it.euronews.com/live
error: Unable to open URL: //euronews-it-p-api.hexaglobe.net/1c903a19de71387485a0f6f74d7923f5/5b8a5583/euronews/euronews-euronews-website-web-responsive-2/it/stream_info.php?format=hls (Invalid URL '//euronews-it-p-api.hexaglobe.net/1c903a19de71387485a0f6f74d7923f5/5b8a5583/euronews/euronews-euronews-website-web-responsive-2/it/stream_info.php?format=hls': No schema supplied. Perhaps you meant http:////euronews-it-p-api.hexaglobe.net/1c903a19de71387485a0f6f74d7923f5/5b8a5583/euronews/euronews-euronews-website-web-responsive-2/it/stream_info.php?format=hls?)
```
### Reproduction steps / Explicit stream URLs to test
<!-- How can we reproduce this? Please note the exact steps below using the list format supplied. If you need more steps please add them. -->
Run this command:
```
streamlink http://it.euronews.com/live
```
### Log output
<!--
TEXT LOG OUTPUT IS REQUIRED for a bug report!
Use the `--loglevel debug` parameter and avoid using parameters which suppress log output.
https://streamlink.github.io/cli.html#cmdoption-l
Make sure to **remove usernames and passwords**
You can copy the output to https://gist.github.com/ or paste it below.
-->
```
marco@vbox-ubuntu1804:~$ streamlink --loglevel debug http://it.euronews.com/live
[cli][debug] OS: Linux-4.15.0-33-generic-x86_64-with-Ubuntu-18.04-bionic
[cli][debug] Python: 3.6.5
[cli][debug] Streamlink: 0.14.2+92.gc7bef14b
[cli][debug] Requests(2.18.4), Socks(1.6.7), Websocket(0.51.0)
[cli][info] Found matching plugin euronews for URL http://it.euronews.com/live
error: Unable to open URL: //euronews-it-p-api.hexaglobe.net/688afb391d4325cad6765c6dc61585a4/5b8a7b36/euronews/euronews-euronews-website-web-responsive-2/it/stream_info.php?format=hls (Invalid URL '//euronews-it-p-api.hexaglobe.net/688afb391d4325cad6765c6dc61585a4/5b8a7b36/euronews/euronews-euronews-website-web-responsive-2/it/stream_info.php?format=hls': No schema supplied. Perhaps you meant http:////euronews-it-p-api.hexaglobe.net/688afb391d4325cad6765c6dc61585a4/5b8a7b36/euronews/euronews-euronews-website-web-responsive-2/it/stream_info.php?format=hls?)
```
### Additional comments, screenshots, etc.
Streamlink versions tested: 0.9.0, 0.14.2 from pip, master from git
Same error with all of them.
[Love Streamlink? Please consider supporting our collective. Thanks!](https://opencollective.com/streamlink/donate)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/streamlink/plugins/euronews.py`
Content:
```
1 import re
2
3 from streamlink.plugin import Plugin
4 from streamlink.plugin.api import validate
5 from streamlink.stream import HLSStream, HTTPStream
6
7
8 class Euronews(Plugin):
9 _url_re = re.compile(r"http(?:s)?://(\w+)\.?euronews.com/(live|.*)")
10 _re_vod = re.compile(r'<meta\s+property="og:video"\s+content="(http.*?)"\s*/>')
11 _live_api_url = "http://{0}.euronews.com/api/watchlive.json"
12 _live_schema = validate.Schema({
13 u"url": validate.url()
14 })
15 _stream_api_schema = validate.Schema({
16 u'status': u'ok',
17 u'primary': validate.url(),
18 validate.optional(u'backup'): validate.url()
19 })
20
21 @classmethod
22 def can_handle_url(cls, url):
23 return cls._url_re.match(url)
24
25 def _get_vod_stream(self):
26 """
27 Find the VOD video url
28 :return: video url
29 """
30 res = self.session.http.get(self.url)
31 video_urls = self._re_vod.findall(res.text)
32 if len(video_urls):
33 return dict(vod=HTTPStream(self.session, video_urls[0]))
34
35 def _get_live_streams(self, subdomain):
36 """
37 Get the live stream in a particular language
38 :param subdomain:
39 :return:
40 """
41 res = self.session.http.get(self._live_api_url.format(subdomain))
42 live_res = self.session.http.json(res, schema=self._live_schema)
43 api_res = self.session.http.get(live_res[u"url"])
44 stream_data = self.session.http.json(api_res, schema=self._stream_api_schema)
45 return HLSStream.parse_variant_playlist(self.session, stream_data[u'primary'])
46
47 def _get_streams(self):
48 """
49 Find the streams for euronews
50 :return:
51 """
52 match = self._url_re.match(self.url)
53 subdomain, path = match.groups()
54
55 if path == "live":
56 return self._get_live_streams(subdomain)
57 else:
58 return self._get_vod_stream()
59
60
61 __plugin__ = Euronews
62
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/streamlink/plugins/euronews.py b/src/streamlink/plugins/euronews.py
--- a/src/streamlink/plugins/euronews.py
+++ b/src/streamlink/plugins/euronews.py
@@ -3,10 +3,11 @@
from streamlink.plugin import Plugin
from streamlink.plugin.api import validate
from streamlink.stream import HLSStream, HTTPStream
+from streamlink.utils.url import update_scheme
class Euronews(Plugin):
- _url_re = re.compile(r"http(?:s)?://(\w+)\.?euronews.com/(live|.*)")
+ _url_re = re.compile(r'(?P<scheme>https?)://(?P<subdomain>\w+)\.?euronews.com/(?P<path>live|.*)')
_re_vod = re.compile(r'<meta\s+property="og:video"\s+content="(http.*?)"\s*/>')
_live_api_url = "http://{0}.euronews.com/api/watchlive.json"
_live_schema = validate.Schema({
@@ -32,28 +33,29 @@
if len(video_urls):
return dict(vod=HTTPStream(self.session, video_urls[0]))
- def _get_live_streams(self, subdomain):
+ def _get_live_streams(self, match):
"""
Get the live stream in a particular language
- :param subdomain:
+ :param match:
:return:
"""
- res = self.session.http.get(self._live_api_url.format(subdomain))
- live_res = self.session.http.json(res, schema=self._live_schema)
- api_res = self.session.http.get(live_res[u"url"])
- stream_data = self.session.http.json(api_res, schema=self._stream_api_schema)
- return HLSStream.parse_variant_playlist(self.session, stream_data[u'primary'])
+ live_url = self._live_api_url.format(match.get("subdomain"))
+ live_res = self.session.http.json(self.session.http.get(live_url), schema=self._live_schema)
+
+ api_url = update_scheme("{0}:///".format(match.get("scheme")), live_res["url"])
+ api_res = self.session.http.json(self.session.http.get(api_url), schema=self._stream_api_schema)
+
+ return HLSStream.parse_variant_playlist(self.session, api_res["primary"])
def _get_streams(self):
"""
Find the streams for euronews
:return:
"""
- match = self._url_re.match(self.url)
- subdomain, path = match.groups()
+ match = self._url_re.match(self.url).groupdict()
- if path == "live":
- return self._get_live_streams(subdomain)
+ if match.get("path") == "live":
+ return self._get_live_streams(match)
else:
return self._get_vod_stream()
| {"golden_diff": "diff --git a/src/streamlink/plugins/euronews.py b/src/streamlink/plugins/euronews.py\n--- a/src/streamlink/plugins/euronews.py\n+++ b/src/streamlink/plugins/euronews.py\n@@ -3,10 +3,11 @@\n from streamlink.plugin import Plugin\n from streamlink.plugin.api import validate\n from streamlink.stream import HLSStream, HTTPStream\n+from streamlink.utils.url import update_scheme\n \n \n class Euronews(Plugin):\n- _url_re = re.compile(r\"http(?:s)?://(\\w+)\\.?euronews.com/(live|.*)\")\n+ _url_re = re.compile(r'(?P<scheme>https?)://(?P<subdomain>\\w+)\\.?euronews.com/(?P<path>live|.*)')\n _re_vod = re.compile(r'<meta\\s+property=\"og:video\"\\s+content=\"(http.*?)\"\\s*/>')\n _live_api_url = \"http://{0}.euronews.com/api/watchlive.json\"\n _live_schema = validate.Schema({\n@@ -32,28 +33,29 @@\n if len(video_urls):\n return dict(vod=HTTPStream(self.session, video_urls[0]))\n \n- def _get_live_streams(self, subdomain):\n+ def _get_live_streams(self, match):\n \"\"\"\n Get the live stream in a particular language\n- :param subdomain:\n+ :param match:\n :return:\n \"\"\"\n- res = self.session.http.get(self._live_api_url.format(subdomain))\n- live_res = self.session.http.json(res, schema=self._live_schema)\n- api_res = self.session.http.get(live_res[u\"url\"])\n- stream_data = self.session.http.json(api_res, schema=self._stream_api_schema)\n- return HLSStream.parse_variant_playlist(self.session, stream_data[u'primary'])\n+ live_url = self._live_api_url.format(match.get(\"subdomain\"))\n+ live_res = self.session.http.json(self.session.http.get(live_url), schema=self._live_schema)\n+\n+ api_url = update_scheme(\"{0}:///\".format(match.get(\"scheme\")), live_res[\"url\"])\n+ api_res = self.session.http.json(self.session.http.get(api_url), schema=self._stream_api_schema)\n+\n+ return HLSStream.parse_variant_playlist(self.session, api_res[\"primary\"])\n \n def _get_streams(self):\n \"\"\"\n Find the streams for euronews\n :return:\n \"\"\"\n- match = self._url_re.match(self.url)\n- subdomain, path = match.groups()\n+ match = self._url_re.match(self.url).groupdict()\n \n- if path == \"live\":\n- return self._get_live_streams(subdomain)\n+ if match.get(\"path\") == \"live\":\n+ return self._get_live_streams(match)\n else:\n return self._get_vod_stream()\n", "issue": "Euronews error, unable to open URL\n<!--\r\nThanks for reporting a bug!\r\nUSE THE TEMPLATE. Otherwise your bug report may be rejected.\r\n\r\nFirst, see the contribution guidelines:\r\nhttps://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink\r\n\r\nAlso check the list of open and closed bug reports:\r\nhttps://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22bug%22\r\n\r\nPlease see the text preview to avoid unnecessary formatting errors.\r\n-->\r\n\r\n\r\n## Bug Report\r\n\r\n<!-- Replace [ ] with [x] in order to check the box -->\r\n- [x] This is a bug report and I have read the contribution guidelines.\r\n\r\n\r\n### Description\r\n\r\n<!-- Explain the bug as thoroughly as you can. Don't leave out information which is necessary for us to reproduce and debug this issue. -->\r\nI'm unable to open Euronews live stream.\r\n\r\n### Expected / Actual behavior\r\n\r\n<!-- What do you expect to happen, and what is actually happening? -->\r\nI expect the stream to open in my media player.\r\nInstead I get this:\r\n```\r\nmarco@vbox-ubuntu1804:~$ streamlink http://it.euronews.com/live\r\n[cli][info] Found matching plugin euronews for URL http://it.euronews.com/live\r\nerror: Unable to open URL: //euronews-it-p-api.hexaglobe.net/1c903a19de71387485a0f6f74d7923f5/5b8a5583/euronews/euronews-euronews-website-web-responsive-2/it/stream_info.php?format=hls (Invalid URL '//euronews-it-p-api.hexaglobe.net/1c903a19de71387485a0f6f74d7923f5/5b8a5583/euronews/euronews-euronews-website-web-responsive-2/it/stream_info.php?format=hls': No schema supplied. Perhaps you meant http:////euronews-it-p-api.hexaglobe.net/1c903a19de71387485a0f6f74d7923f5/5b8a5583/euronews/euronews-euronews-website-web-responsive-2/it/stream_info.php?format=hls?)\r\n```\r\n\r\n### Reproduction steps / Explicit stream URLs to test\r\n\r\n<!-- How can we reproduce this? Please note the exact steps below using the list format supplied. If you need more steps please add them. -->\r\n\r\nRun this command:\r\n```\r\nstreamlink http://it.euronews.com/live\r\n```\r\n\r\n### Log output\r\n\r\n<!--\r\nTEXT LOG OUTPUT IS REQUIRED for a bug report!\r\nUse the `--loglevel debug` parameter and avoid using parameters which suppress log output.\r\nhttps://streamlink.github.io/cli.html#cmdoption-l\r\n\r\nMake sure to **remove usernames and passwords**\r\nYou can copy the output to https://gist.github.com/ or paste it below.\r\n-->\r\n\r\n```\r\nmarco@vbox-ubuntu1804:~$ streamlink --loglevel debug http://it.euronews.com/live\r\n[cli][debug] OS: Linux-4.15.0-33-generic-x86_64-with-Ubuntu-18.04-bionic\r\n[cli][debug] Python: 3.6.5\r\n[cli][debug] Streamlink: 0.14.2+92.gc7bef14b\r\n[cli][debug] Requests(2.18.4), Socks(1.6.7), Websocket(0.51.0)\r\n[cli][info] Found matching plugin euronews for URL http://it.euronews.com/live\r\nerror: Unable to open URL: //euronews-it-p-api.hexaglobe.net/688afb391d4325cad6765c6dc61585a4/5b8a7b36/euronews/euronews-euronews-website-web-responsive-2/it/stream_info.php?format=hls (Invalid URL '//euronews-it-p-api.hexaglobe.net/688afb391d4325cad6765c6dc61585a4/5b8a7b36/euronews/euronews-euronews-website-web-responsive-2/it/stream_info.php?format=hls': No schema supplied. Perhaps you meant http:////euronews-it-p-api.hexaglobe.net/688afb391d4325cad6765c6dc61585a4/5b8a7b36/euronews/euronews-euronews-website-web-responsive-2/it/stream_info.php?format=hls?)\r\n```\r\n\r\n\r\n### Additional comments, screenshots, etc.\r\nStreamlink versions tested: 0.9.0, 0.14.2 from pip, master from git\r\nSame error with all of them.\r\n\r\n\r\n[Love Streamlink? Please consider supporting our collective. Thanks!](https://opencollective.com/streamlink/donate)\r\n\n", "before_files": [{"content": "import re\n\nfrom streamlink.plugin import Plugin\nfrom streamlink.plugin.api import validate\nfrom streamlink.stream import HLSStream, HTTPStream\n\n\nclass Euronews(Plugin):\n _url_re = re.compile(r\"http(?:s)?://(\\w+)\\.?euronews.com/(live|.*)\")\n _re_vod = re.compile(r'<meta\\s+property=\"og:video\"\\s+content=\"(http.*?)\"\\s*/>')\n _live_api_url = \"http://{0}.euronews.com/api/watchlive.json\"\n _live_schema = validate.Schema({\n u\"url\": validate.url()\n })\n _stream_api_schema = validate.Schema({\n u'status': u'ok',\n u'primary': validate.url(),\n validate.optional(u'backup'): validate.url()\n })\n\n @classmethod\n def can_handle_url(cls, url):\n return cls._url_re.match(url)\n\n def _get_vod_stream(self):\n \"\"\"\n Find the VOD video url\n :return: video url\n \"\"\"\n res = self.session.http.get(self.url)\n video_urls = self._re_vod.findall(res.text)\n if len(video_urls):\n return dict(vod=HTTPStream(self.session, video_urls[0]))\n\n def _get_live_streams(self, subdomain):\n \"\"\"\n Get the live stream in a particular language\n :param subdomain:\n :return:\n \"\"\"\n res = self.session.http.get(self._live_api_url.format(subdomain))\n live_res = self.session.http.json(res, schema=self._live_schema)\n api_res = self.session.http.get(live_res[u\"url\"])\n stream_data = self.session.http.json(api_res, schema=self._stream_api_schema)\n return HLSStream.parse_variant_playlist(self.session, stream_data[u'primary'])\n\n def _get_streams(self):\n \"\"\"\n Find the streams for euronews\n :return:\n \"\"\"\n match = self._url_re.match(self.url)\n subdomain, path = match.groups()\n\n if path == \"live\":\n return self._get_live_streams(subdomain)\n else:\n return self._get_vod_stream()\n\n\n__plugin__ = Euronews\n", "path": "src/streamlink/plugins/euronews.py"}], "after_files": [{"content": "import re\n\nfrom streamlink.plugin import Plugin\nfrom streamlink.plugin.api import validate\nfrom streamlink.stream import HLSStream, HTTPStream\nfrom streamlink.utils.url import update_scheme\n\n\nclass Euronews(Plugin):\n _url_re = re.compile(r'(?P<scheme>https?)://(?P<subdomain>\\w+)\\.?euronews.com/(?P<path>live|.*)')\n _re_vod = re.compile(r'<meta\\s+property=\"og:video\"\\s+content=\"(http.*?)\"\\s*/>')\n _live_api_url = \"http://{0}.euronews.com/api/watchlive.json\"\n _live_schema = validate.Schema({\n u\"url\": validate.url()\n })\n _stream_api_schema = validate.Schema({\n u'status': u'ok',\n u'primary': validate.url(),\n validate.optional(u'backup'): validate.url()\n })\n\n @classmethod\n def can_handle_url(cls, url):\n return cls._url_re.match(url)\n\n def _get_vod_stream(self):\n \"\"\"\n Find the VOD video url\n :return: video url\n \"\"\"\n res = self.session.http.get(self.url)\n video_urls = self._re_vod.findall(res.text)\n if len(video_urls):\n return dict(vod=HTTPStream(self.session, video_urls[0]))\n\n def _get_live_streams(self, match):\n \"\"\"\n Get the live stream in a particular language\n :param match:\n :return:\n \"\"\"\n live_url = self._live_api_url.format(match.get(\"subdomain\"))\n live_res = self.session.http.json(self.session.http.get(live_url), schema=self._live_schema)\n\n api_url = update_scheme(\"{0}:///\".format(match.get(\"scheme\")), live_res[\"url\"])\n api_res = self.session.http.json(self.session.http.get(api_url), schema=self._stream_api_schema)\n\n return HLSStream.parse_variant_playlist(self.session, api_res[\"primary\"])\n\n def _get_streams(self):\n \"\"\"\n Find the streams for euronews\n :return:\n \"\"\"\n match = self._url_re.match(self.url).groupdict()\n\n if match.get(\"path\") == \"live\":\n return self._get_live_streams(match)\n else:\n return self._get_vod_stream()\n\n\n__plugin__ = Euronews\n", "path": "src/streamlink/plugins/euronews.py"}]} | 1,998 | 625 |
gh_patches_debug_15061 | rasdani/github-patches | git_diff | netbox-community__netbox-2996 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Inconsistent /api/virtualization/interfaces/ results
<!--
Before opening a new issue, please search through the existing issues to
see if your topic has already been addressed. Note that you may need to
remove the "is:open" filter from the search bar to include closed issues.
Check the appropriate type for your issue below by placing an x between the
brackets. For assistance with installation issues, or for any other issues
other than those listed below, please raise your topic for discussion on
our mailing list:
https://groups.google.com/forum/#!forum/netbox-discuss
Please note that issues which do not fall under any of the below categories
will be closed. Due to an excessive backlog of feature requests, we are
not currently accepting any proposals which extend NetBox's feature scope.
Do not prepend any sort of tag to your issue's title. An administrator will
review your issue and assign labels as appropriate.
--->
### Issue type
[ ] Feature request <!-- An enhancement of existing functionality -->
[ x ] Bug report <!-- Unexpected or erroneous behavior -->
[ ] Documentation <!-- A modification to the documentation -->
<!--
Please describe the environment in which you are running NetBox. (Be sure
to verify that you are running the latest stable release of NetBox before
submitting a bug report.) If you are submitting a bug report and have made
any changes to the code base, please first validate that your bug can be
recreated while running an official release.
-->
### Environment
* Python version: 3.6.5
* NetBox version: 2.3.4
<!--
BUG REPORTS must include:
* A list of the steps needed for someone else to reproduce the bug
* A description of the expected and observed behavior
* Any relevant error messages (screenshots may also help)
FEATURE REQUESTS must include:
* A detailed description of the proposed functionality
* A use case for the new feature
* A rough description of any necessary changes to the database schema
* Any relevant third-party libraries which would be needed
-->
### Description
Querying all virtualized interfaces returns inconsistents results : the count is OK, but some interfaces are missing and some are duplicated.
The underlying query is ordering by an empty column ("dcim_device"."name") which seems to fall under the non predictable results case : https://www.postgresql.org/docs/current/static/queries-limit.html
```sql
... WHERE "dcim_interface"."virtual_machine_id" IS NOT NULL
ORDER BY "dcim_device"."name" ASC, "dcim_interface"."name" ASC LIMIT 1000 OFFSET 50
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `netbox/dcim/managers.py`
Content:
```
1 from django.db.models import Manager, QuerySet
2 from django.db.models.expressions import RawSQL
3
4 from .constants import NONCONNECTABLE_IFACE_TYPES
5
6 # Regular expressions for parsing Interface names
7 TYPE_RE = r"SUBSTRING({} FROM '^([^0-9\.:]+)')"
8 SLOT_RE = r"COALESCE(CAST(SUBSTRING({} FROM '^(?:[^0-9]+)?(\d{{1,9}})/') AS integer), NULL)"
9 SUBSLOT_RE = r"COALESCE(CAST(SUBSTRING({} FROM '^(?:[^0-9\.:]+)?\d{{1,9}}/(\d{{1,9}})') AS integer), NULL)"
10 POSITION_RE = r"COALESCE(CAST(SUBSTRING({} FROM '^(?:[^0-9]+)?(?:\d{{1,9}}/){{2}}(\d{{1,9}})') AS integer), NULL)"
11 SUBPOSITION_RE = r"COALESCE(CAST(SUBSTRING({} FROM '^(?:[^0-9]+)?(?:\d{{1,9}}/){{3}}(\d{{1,9}})') AS integer), NULL)"
12 ID_RE = r"CAST(SUBSTRING({} FROM '^(?:[^0-9\.:]+)?(\d{{1,9}})([^/]|$)') AS integer)"
13 CHANNEL_RE = r"COALESCE(CAST(SUBSTRING({} FROM '^.*:(\d{{1,9}})(\.\d{{1,9}})?$') AS integer), 0)"
14 VC_RE = r"COALESCE(CAST(SUBSTRING({} FROM '^.*\.(\d{{1,9}})$') AS integer), 0)"
15
16
17 class DeviceComponentManager(Manager):
18
19 def get_queryset(self):
20
21 queryset = super().get_queryset()
22 table_name = self.model._meta.db_table
23 sql = r"CONCAT(REGEXP_REPLACE({}.name, '\d+$', ''), LPAD(SUBSTRING({}.name FROM '\d+$'), 8, '0'))"
24
25 # Pad any trailing digits to effect natural sorting
26 return queryset.extra(
27 select={
28 'name_padded': sql.format(table_name, table_name),
29 }
30 ).order_by('name_padded', 'pk')
31
32
33 class InterfaceQuerySet(QuerySet):
34
35 def connectable(self):
36 """
37 Return only physical interfaces which are capable of being connected to other interfaces (i.e. not virtual or
38 wireless).
39 """
40 return self.exclude(form_factor__in=NONCONNECTABLE_IFACE_TYPES)
41
42
43 class InterfaceManager(Manager):
44
45 def get_queryset(self):
46 """
47 Naturally order interfaces by their type and numeric position. To order interfaces naturally, the `name` field
48 is split into eight distinct components: leading text (type), slot, subslot, position, subposition, ID, channel,
49 and virtual circuit:
50
51 {type}{slot or ID}/{subslot}/{position}/{subposition}:{channel}.{vc}
52
53 Components absent from the interface name are coalesced to zero or null. For example, an interface named
54 GigabitEthernet1/2/3 would be parsed as follows:
55
56 type = 'GigabitEthernet'
57 slot = 1
58 subslot = 2
59 position = 3
60 subposition = None
61 id = None
62 channel = 0
63 vc = 0
64
65 The original `name` field is considered in its entirety to serve as a fallback in the event interfaces do not
66 match any of the prescribed fields.
67 """
68
69 sql_col = '{}.name'.format(self.model._meta.db_table)
70 ordering = [
71 '_slot', '_subslot', '_position', '_subposition', '_type', '_id', '_channel', '_vc', 'name',
72 ]
73
74 fields = {
75 '_type': RawSQL(TYPE_RE.format(sql_col), []),
76 '_id': RawSQL(ID_RE.format(sql_col), []),
77 '_slot': RawSQL(SLOT_RE.format(sql_col), []),
78 '_subslot': RawSQL(SUBSLOT_RE.format(sql_col), []),
79 '_position': RawSQL(POSITION_RE.format(sql_col), []),
80 '_subposition': RawSQL(SUBPOSITION_RE.format(sql_col), []),
81 '_channel': RawSQL(CHANNEL_RE.format(sql_col), []),
82 '_vc': RawSQL(VC_RE.format(sql_col), []),
83 }
84
85 return InterfaceQuerySet(self.model, using=self._db).annotate(**fields).order_by(*ordering)
86
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/netbox/dcim/managers.py b/netbox/dcim/managers.py
--- a/netbox/dcim/managers.py
+++ b/netbox/dcim/managers.py
@@ -64,11 +64,15 @@
The original `name` field is considered in its entirety to serve as a fallback in the event interfaces do not
match any of the prescribed fields.
+
+ The `id` field is included to enforce deterministic ordering of interfaces in similar vein of other device
+ components.
"""
sql_col = '{}.name'.format(self.model._meta.db_table)
ordering = [
- '_slot', '_subslot', '_position', '_subposition', '_type', '_id', '_channel', '_vc', 'name',
+ '_slot', '_subslot', '_position', '_subposition', '_type', '_id', '_channel', '_vc', 'name', 'pk'
+
]
fields = {
| {"golden_diff": "diff --git a/netbox/dcim/managers.py b/netbox/dcim/managers.py\n--- a/netbox/dcim/managers.py\n+++ b/netbox/dcim/managers.py\n@@ -64,11 +64,15 @@\n \n The original `name` field is considered in its entirety to serve as a fallback in the event interfaces do not\n match any of the prescribed fields.\n+\n+ The `id` field is included to enforce deterministic ordering of interfaces in similar vein of other device\n+ components.\n \"\"\"\n \n sql_col = '{}.name'.format(self.model._meta.db_table)\n ordering = [\n- '_slot', '_subslot', '_position', '_subposition', '_type', '_id', '_channel', '_vc', 'name',\n+ '_slot', '_subslot', '_position', '_subposition', '_type', '_id', '_channel', '_vc', 'name', 'pk'\n+\n ]\n \n fields = {\n", "issue": "Inconsistent /api/virtualization/interfaces/ results\n<!--\r\n Before opening a new issue, please search through the existing issues to\r\n see if your topic has already been addressed. Note that you may need to\r\n remove the \"is:open\" filter from the search bar to include closed issues.\r\n\r\n Check the appropriate type for your issue below by placing an x between the\r\n brackets. For assistance with installation issues, or for any other issues\r\n other than those listed below, please raise your topic for discussion on\r\n our mailing list:\r\n\r\n https://groups.google.com/forum/#!forum/netbox-discuss\r\n\r\n Please note that issues which do not fall under any of the below categories\r\n will be closed. Due to an excessive backlog of feature requests, we are\r\n not currently accepting any proposals which extend NetBox's feature scope.\r\n\r\n Do not prepend any sort of tag to your issue's title. An administrator will\r\n review your issue and assign labels as appropriate.\r\n--->\r\n### Issue type\r\n[ ] Feature request <!-- An enhancement of existing functionality -->\r\n[ x ] Bug report <!-- Unexpected or erroneous behavior -->\r\n[ ] Documentation <!-- A modification to the documentation -->\r\n\r\n<!--\r\n Please describe the environment in which you are running NetBox. (Be sure\r\n to verify that you are running the latest stable release of NetBox before\r\n submitting a bug report.) If you are submitting a bug report and have made\r\n any changes to the code base, please first validate that your bug can be\r\n recreated while running an official release.\r\n-->\r\n### Environment\r\n* Python version: 3.6.5\r\n* NetBox version: 2.3.4\r\n\r\n<!--\r\n BUG REPORTS must include:\r\n * A list of the steps needed for someone else to reproduce the bug\r\n * A description of the expected and observed behavior\r\n * Any relevant error messages (screenshots may also help)\r\n\r\n FEATURE REQUESTS must include:\r\n * A detailed description of the proposed functionality\r\n * A use case for the new feature\r\n * A rough description of any necessary changes to the database schema\r\n * Any relevant third-party libraries which would be needed\r\n-->\r\n### Description\r\n\r\nQuerying all virtualized interfaces returns inconsistents results : the count is OK, but some interfaces are missing and some are duplicated.\r\n\r\nThe underlying query is ordering by an empty column (\"dcim_device\".\"name\") which seems to fall under the non predictable results case : https://www.postgresql.org/docs/current/static/queries-limit.html\r\n\r\n```sql\r\n... WHERE \"dcim_interface\".\"virtual_machine_id\" IS NOT NULL \r\nORDER BY \"dcim_device\".\"name\" ASC, \"dcim_interface\".\"name\" ASC LIMIT 1000 OFFSET 50\r\n```\n", "before_files": [{"content": "from django.db.models import Manager, QuerySet\nfrom django.db.models.expressions import RawSQL\n\nfrom .constants import NONCONNECTABLE_IFACE_TYPES\n\n# Regular expressions for parsing Interface names\nTYPE_RE = r\"SUBSTRING({} FROM '^([^0-9\\.:]+)')\"\nSLOT_RE = r\"COALESCE(CAST(SUBSTRING({} FROM '^(?:[^0-9]+)?(\\d{{1,9}})/') AS integer), NULL)\"\nSUBSLOT_RE = r\"COALESCE(CAST(SUBSTRING({} FROM '^(?:[^0-9\\.:]+)?\\d{{1,9}}/(\\d{{1,9}})') AS integer), NULL)\"\nPOSITION_RE = r\"COALESCE(CAST(SUBSTRING({} FROM '^(?:[^0-9]+)?(?:\\d{{1,9}}/){{2}}(\\d{{1,9}})') AS integer), NULL)\"\nSUBPOSITION_RE = r\"COALESCE(CAST(SUBSTRING({} FROM '^(?:[^0-9]+)?(?:\\d{{1,9}}/){{3}}(\\d{{1,9}})') AS integer), NULL)\"\nID_RE = r\"CAST(SUBSTRING({} FROM '^(?:[^0-9\\.:]+)?(\\d{{1,9}})([^/]|$)') AS integer)\"\nCHANNEL_RE = r\"COALESCE(CAST(SUBSTRING({} FROM '^.*:(\\d{{1,9}})(\\.\\d{{1,9}})?$') AS integer), 0)\"\nVC_RE = r\"COALESCE(CAST(SUBSTRING({} FROM '^.*\\.(\\d{{1,9}})$') AS integer), 0)\"\n\n\nclass DeviceComponentManager(Manager):\n\n def get_queryset(self):\n\n queryset = super().get_queryset()\n table_name = self.model._meta.db_table\n sql = r\"CONCAT(REGEXP_REPLACE({}.name, '\\d+$', ''), LPAD(SUBSTRING({}.name FROM '\\d+$'), 8, '0'))\"\n\n # Pad any trailing digits to effect natural sorting\n return queryset.extra(\n select={\n 'name_padded': sql.format(table_name, table_name),\n }\n ).order_by('name_padded', 'pk')\n\n\nclass InterfaceQuerySet(QuerySet):\n\n def connectable(self):\n \"\"\"\n Return only physical interfaces which are capable of being connected to other interfaces (i.e. not virtual or\n wireless).\n \"\"\"\n return self.exclude(form_factor__in=NONCONNECTABLE_IFACE_TYPES)\n\n\nclass InterfaceManager(Manager):\n\n def get_queryset(self):\n \"\"\"\n Naturally order interfaces by their type and numeric position. To order interfaces naturally, the `name` field\n is split into eight distinct components: leading text (type), slot, subslot, position, subposition, ID, channel,\n and virtual circuit:\n\n {type}{slot or ID}/{subslot}/{position}/{subposition}:{channel}.{vc}\n\n Components absent from the interface name are coalesced to zero or null. For example, an interface named\n GigabitEthernet1/2/3 would be parsed as follows:\n\n type = 'GigabitEthernet'\n slot = 1\n subslot = 2\n position = 3\n subposition = None\n id = None\n channel = 0\n vc = 0\n\n The original `name` field is considered in its entirety to serve as a fallback in the event interfaces do not\n match any of the prescribed fields.\n \"\"\"\n\n sql_col = '{}.name'.format(self.model._meta.db_table)\n ordering = [\n '_slot', '_subslot', '_position', '_subposition', '_type', '_id', '_channel', '_vc', 'name',\n ]\n\n fields = {\n '_type': RawSQL(TYPE_RE.format(sql_col), []),\n '_id': RawSQL(ID_RE.format(sql_col), []),\n '_slot': RawSQL(SLOT_RE.format(sql_col), []),\n '_subslot': RawSQL(SUBSLOT_RE.format(sql_col), []),\n '_position': RawSQL(POSITION_RE.format(sql_col), []),\n '_subposition': RawSQL(SUBPOSITION_RE.format(sql_col), []),\n '_channel': RawSQL(CHANNEL_RE.format(sql_col), []),\n '_vc': RawSQL(VC_RE.format(sql_col), []),\n }\n\n return InterfaceQuerySet(self.model, using=self._db).annotate(**fields).order_by(*ordering)\n", "path": "netbox/dcim/managers.py"}], "after_files": [{"content": "from django.db.models import Manager, QuerySet\nfrom django.db.models.expressions import RawSQL\n\nfrom .constants import NONCONNECTABLE_IFACE_TYPES\n\n# Regular expressions for parsing Interface names\nTYPE_RE = r\"SUBSTRING({} FROM '^([^0-9\\.:]+)')\"\nSLOT_RE = r\"COALESCE(CAST(SUBSTRING({} FROM '^(?:[^0-9]+)?(\\d{{1,9}})/') AS integer), NULL)\"\nSUBSLOT_RE = r\"COALESCE(CAST(SUBSTRING({} FROM '^(?:[^0-9\\.:]+)?\\d{{1,9}}/(\\d{{1,9}})') AS integer), NULL)\"\nPOSITION_RE = r\"COALESCE(CAST(SUBSTRING({} FROM '^(?:[^0-9]+)?(?:\\d{{1,9}}/){{2}}(\\d{{1,9}})') AS integer), NULL)\"\nSUBPOSITION_RE = r\"COALESCE(CAST(SUBSTRING({} FROM '^(?:[^0-9]+)?(?:\\d{{1,9}}/){{3}}(\\d{{1,9}})') AS integer), NULL)\"\nID_RE = r\"CAST(SUBSTRING({} FROM '^(?:[^0-9\\.:]+)?(\\d{{1,9}})([^/]|$)') AS integer)\"\nCHANNEL_RE = r\"COALESCE(CAST(SUBSTRING({} FROM '^.*:(\\d{{1,9}})(\\.\\d{{1,9}})?$') AS integer), 0)\"\nVC_RE = r\"COALESCE(CAST(SUBSTRING({} FROM '^.*\\.(\\d{{1,9}})$') AS integer), 0)\"\n\n\nclass DeviceComponentManager(Manager):\n\n def get_queryset(self):\n\n queryset = super().get_queryset()\n table_name = self.model._meta.db_table\n sql = r\"CONCAT(REGEXP_REPLACE({}.name, '\\d+$', ''), LPAD(SUBSTRING({}.name FROM '\\d+$'), 8, '0'))\"\n\n # Pad any trailing digits to effect natural sorting\n return queryset.extra(\n select={\n 'name_padded': sql.format(table_name, table_name),\n }\n ).order_by('name_padded', 'pk')\n\n\nclass InterfaceQuerySet(QuerySet):\n\n def connectable(self):\n \"\"\"\n Return only physical interfaces which are capable of being connected to other interfaces (i.e. not virtual or\n wireless).\n \"\"\"\n return self.exclude(form_factor__in=NONCONNECTABLE_IFACE_TYPES)\n\n\nclass InterfaceManager(Manager):\n\n def get_queryset(self):\n \"\"\"\n Naturally order interfaces by their type and numeric position. To order interfaces naturally, the `name` field\n is split into eight distinct components: leading text (type), slot, subslot, position, subposition, ID, channel,\n and virtual circuit:\n\n {type}{slot or ID}/{subslot}/{position}/{subposition}:{channel}.{vc}\n\n Components absent from the interface name are coalesced to zero or null. For example, an interface named\n GigabitEthernet1/2/3 would be parsed as follows:\n\n type = 'GigabitEthernet'\n slot = 1\n subslot = 2\n position = 3\n subposition = None\n id = None\n channel = 0\n vc = 0\n\n The original `name` field is considered in its entirety to serve as a fallback in the event interfaces do not\n match any of the prescribed fields.\n\n The `id` field is included to enforce deterministic ordering of interfaces in similar vein of other device\n components.\n \"\"\"\n\n sql_col = '{}.name'.format(self.model._meta.db_table)\n ordering = [\n '_slot', '_subslot', '_position', '_subposition', '_type', '_id', '_channel', '_vc', 'name', 'pk'\n\n ]\n\n fields = {\n '_type': RawSQL(TYPE_RE.format(sql_col), []),\n '_id': RawSQL(ID_RE.format(sql_col), []),\n '_slot': RawSQL(SLOT_RE.format(sql_col), []),\n '_subslot': RawSQL(SUBSLOT_RE.format(sql_col), []),\n '_position': RawSQL(POSITION_RE.format(sql_col), []),\n '_subposition': RawSQL(SUBPOSITION_RE.format(sql_col), []),\n '_channel': RawSQL(CHANNEL_RE.format(sql_col), []),\n '_vc': RawSQL(VC_RE.format(sql_col), []),\n }\n\n return InterfaceQuerySet(self.model, using=self._db).annotate(**fields).order_by(*ordering)\n", "path": "netbox/dcim/managers.py"}]} | 1,971 | 210 |
gh_patches_debug_18703 | rasdani/github-patches | git_diff | mdn__kuma-7102 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
TypeError on sendinblue
https://sentry.prod.mozaws.net/operations/mdn-prod/issues/8473154/?referrer=github_plugin
```
TypeError: int() argument must be a string, a bytes-like object or a number, not 'NoneType'
File "celery/app/trace.py", line 385, in trace_task
R = retval = fun(*args, **kwargs)
File "newrelic/hooks/application_celery.py", line 85, in wrapper
return wrapped(*args, **kwargs)
File "celery/app/trace.py", line 650, in __protected_call__
return self.run(*args, **kwargs)
File "kuma/users/newsletter/tasks.py", line 29, in create_or_update_contact
"listIds": [int(settings.SENDINBLUE_LIST_ID)],
TypeError: int() argument must be a string, a bytes-like object or a number, not 'NoneType'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kuma/users/newsletter/apps.py`
Content:
```
1 from django.apps import AppConfig
2 from django.core.checks import register
3 from django.utils.translation import gettext_lazy as _
4
5
6 class UserNewsletterConfig(AppConfig):
7 """
8 The Django App Config class to store information about the users app
9 and do startup time things.
10 """
11
12 name = "kuma.users.newsletter"
13 verbose_name = _("UserNewsletter")
14
15 def ready(self):
16 # Connect signal handlers
17 from . import signal_handlers # noqa
18
19 from .checks import sendinblue_check
20
21 register(sendinblue_check)
22
```
Path: `kuma/settings/pytest.py`
Content:
```
1 from .local import *
2
3 DEBUG = False
4 ENABLE_RESTRICTIONS_BY_HOST = False
5 TEMPLATES[0]["OPTIONS"]["debug"] = True # Enable recording of templates
6 CELERY_TASK_ALWAYS_EAGER = True
7 CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
8 ES_LIVE_INDEX = config("ES_LIVE_INDEX", default=False, cast=bool)
9
10 # Disable the Constance database cache
11 CONSTANCE_DATABASE_CACHE_BACKEND = False
12
13 # SHA1 because it is fast, and hard-coded in the test fixture JSON.
14 PASSWORD_HASHERS = ("django.contrib.auth.hashers.SHA1PasswordHasher",)
15
16 INSTALLED_APPS += ("kuma.core.tests.taggit_extras",)
17
18 LOGGING["loggers"].update(
19 {
20 "django.db.backends": {
21 "handlers": ["console"],
22 "propagate": True,
23 "level": "WARNING",
24 },
25 "kuma.search.utils": {"handlers": [], "propagate": False, "level": "CRITICAL"},
26 }
27 )
28
29
30 # Change the cache key prefix for tests, to avoid overwriting runtime.
31 for cache_settings in CACHES.values():
32 current_prefix = cache_settings.get("KEY_PREFIX", "")
33 cache_settings["KEY_PREFIX"] = "test." + current_prefix
34
35 # Use un-versioned file names, like main.css, instead of versioned
36 # filenames requiring hashing, like mdn.1cb62215bf0c.css
37 STATICFILES_STORAGE = "pipeline.storage.PipelineStorage"
38
39 # Switch Pipeline to DEBUG=False / Production values
40
41 # The documents claim True means assets should be compressed, which seems like
42 # more work, but it is 4x slower when False, maybe because it detects the
43 # existence of the file and skips generating a new one.
44 PIPELINE["PIPELINE_ENABLED"] = True
45
46 # The documents suggest this does nothing when PIPELINE_ENABLED=True. But,
47 # testing shows that tests run faster when set to True.
48 PIPELINE["PIPELINE_COLLECTOR_ENABLED"] = True
49
50 # We need the real Sass compiler here instead of the pass-through used for
51 # local dev.
52 PIPELINE["COMPILERS"] = ("pipeline.compilers.sass.SASSCompiler",)
53
54 # Testing with django-pipeline 1.6.8, PipelineStorage
55 # Enabled=T, Collector=T - 482s
56 # Enabled=T, Collector=F - 535s
57 # Enabled=F, Collector=T - 18262s
58 # Enabled=F, Collector=F - 2043s
59
60 # Defer to django-pipeline's finders for testing
61 # This avoids reading the static folder for each test client request, for
62 # a 10x speedup on Docker on MacOS.
63 WHITENOISE_AUTOREFRESH = True
64 WHITENOISE_USE_FINDERS = True
65
66 # Never rely on the .env
67 GOOGLE_ANALYTICS_ACCOUNT = None
68
69 # Silence warnings about defaults that change in django-storages 2.0
70 AWS_BUCKET_ACL = None
71 AWS_DEFAULT_ACL = None
72
73 # Use a dedicated minio bucket for tests
74 ATTACHMENTS_AWS_STORAGE_BUCKET_NAME = "test"
75
76 # Never enabled in tests.
77 SENTRY_DSN = None
78
79 # To make absolutely sure we never accidentally trigger the GA tracking
80 # within tests to the actual (and default) www.google-analytics.com this is
81 # an extra safeguard.
82 GOOGLE_ANALYTICS_TRACKING_URL = "https://thisllneverwork.example.com/collect"
83
84 # Because that's what all the tests presume.
85 SITE_ID = 1
86
87 # Because it's on by default
88 ENABLE_BCD_SIGNAL = True
89
90 # Stripe API KEY settings
91 STRIPE_PUBLIC_KEY = "testing"
92 STRIPE_SECRET_KEY = "testing"
93 STRIPE_PLAN_ID = "testing"
94
95 # For legacy reasons, the tests assume these are always true so don't
96 # let local overrides take effect.
97 INDEX_HTML_ATTRIBUTES = True
98 INDEX_CSS_CLASSNAMES = True
99
100 # Amount for the monthly subscription.
101 # It's hardcoded here in case some test depends on the number and it futureproofs
102 # our tests to not deviate when the actual number changes since that number
103 # change shouldn't affect the tests.
104 CONTRIBUTION_AMOUNT_USD = 4.99
105
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/kuma/settings/pytest.py b/kuma/settings/pytest.py
--- a/kuma/settings/pytest.py
+++ b/kuma/settings/pytest.py
@@ -102,3 +102,6 @@
# our tests to not deviate when the actual number changes since that number
# change shouldn't affect the tests.
CONTRIBUTION_AMOUNT_USD = 4.99
+
+SENDINBLUE_API_KEY = "testing"
+SENDINBLUE_LIST_ID = 7327
diff --git a/kuma/users/newsletter/apps.py b/kuma/users/newsletter/apps.py
--- a/kuma/users/newsletter/apps.py
+++ b/kuma/users/newsletter/apps.py
@@ -1,4 +1,5 @@
from django.apps import AppConfig
+from django.conf import settings
from django.core.checks import register
from django.utils.translation import gettext_lazy as _
@@ -13,6 +14,9 @@
verbose_name = _("UserNewsletter")
def ready(self):
+ if not settings.SENDINBLUE_API_KEY:
+ return
+
# Connect signal handlers
from . import signal_handlers # noqa
| {"golden_diff": "diff --git a/kuma/settings/pytest.py b/kuma/settings/pytest.py\n--- a/kuma/settings/pytest.py\n+++ b/kuma/settings/pytest.py\n@@ -102,3 +102,6 @@\n # our tests to not deviate when the actual number changes since that number\n # change shouldn't affect the tests.\n CONTRIBUTION_AMOUNT_USD = 4.99\n+\n+SENDINBLUE_API_KEY = \"testing\"\n+SENDINBLUE_LIST_ID = 7327\ndiff --git a/kuma/users/newsletter/apps.py b/kuma/users/newsletter/apps.py\n--- a/kuma/users/newsletter/apps.py\n+++ b/kuma/users/newsletter/apps.py\n@@ -1,4 +1,5 @@\n from django.apps import AppConfig\n+from django.conf import settings\n from django.core.checks import register\n from django.utils.translation import gettext_lazy as _\n \n@@ -13,6 +14,9 @@\n verbose_name = _(\"UserNewsletter\")\n \n def ready(self):\n+ if not settings.SENDINBLUE_API_KEY:\n+ return\n+\n # Connect signal handlers\n from . import signal_handlers # noqa\n", "issue": "TypeError on sendinblue\nhttps://sentry.prod.mozaws.net/operations/mdn-prod/issues/8473154/?referrer=github_plugin\n\n```\nTypeError: int() argument must be a string, a bytes-like object or a number, not 'NoneType'\n File \"celery/app/trace.py\", line 385, in trace_task\n R = retval = fun(*args, **kwargs)\n File \"newrelic/hooks/application_celery.py\", line 85, in wrapper\n return wrapped(*args, **kwargs)\n File \"celery/app/trace.py\", line 650, in __protected_call__\n return self.run(*args, **kwargs)\n File \"kuma/users/newsletter/tasks.py\", line 29, in create_or_update_contact\n \"listIds\": [int(settings.SENDINBLUE_LIST_ID)],\n\nTypeError: int() argument must be a string, a bytes-like object or a number, not 'NoneType'\n```\n", "before_files": [{"content": "from django.apps import AppConfig\nfrom django.core.checks import register\nfrom django.utils.translation import gettext_lazy as _\n\n\nclass UserNewsletterConfig(AppConfig):\n \"\"\"\n The Django App Config class to store information about the users app\n and do startup time things.\n \"\"\"\n\n name = \"kuma.users.newsletter\"\n verbose_name = _(\"UserNewsletter\")\n\n def ready(self):\n # Connect signal handlers\n from . import signal_handlers # noqa\n\n from .checks import sendinblue_check\n\n register(sendinblue_check)\n", "path": "kuma/users/newsletter/apps.py"}, {"content": "from .local import *\n\nDEBUG = False\nENABLE_RESTRICTIONS_BY_HOST = False\nTEMPLATES[0][\"OPTIONS\"][\"debug\"] = True # Enable recording of templates\nCELERY_TASK_ALWAYS_EAGER = True\nCELERY_EAGER_PROPAGATES_EXCEPTIONS = True\nES_LIVE_INDEX = config(\"ES_LIVE_INDEX\", default=False, cast=bool)\n\n# Disable the Constance database cache\nCONSTANCE_DATABASE_CACHE_BACKEND = False\n\n# SHA1 because it is fast, and hard-coded in the test fixture JSON.\nPASSWORD_HASHERS = (\"django.contrib.auth.hashers.SHA1PasswordHasher\",)\n\nINSTALLED_APPS += (\"kuma.core.tests.taggit_extras\",)\n\nLOGGING[\"loggers\"].update(\n {\n \"django.db.backends\": {\n \"handlers\": [\"console\"],\n \"propagate\": True,\n \"level\": \"WARNING\",\n },\n \"kuma.search.utils\": {\"handlers\": [], \"propagate\": False, \"level\": \"CRITICAL\"},\n }\n)\n\n\n# Change the cache key prefix for tests, to avoid overwriting runtime.\nfor cache_settings in CACHES.values():\n current_prefix = cache_settings.get(\"KEY_PREFIX\", \"\")\n cache_settings[\"KEY_PREFIX\"] = \"test.\" + current_prefix\n\n# Use un-versioned file names, like main.css, instead of versioned\n# filenames requiring hashing, like mdn.1cb62215bf0c.css\nSTATICFILES_STORAGE = \"pipeline.storage.PipelineStorage\"\n\n# Switch Pipeline to DEBUG=False / Production values\n\n# The documents claim True means assets should be compressed, which seems like\n# more work, but it is 4x slower when False, maybe because it detects the\n# existence of the file and skips generating a new one.\nPIPELINE[\"PIPELINE_ENABLED\"] = True\n\n# The documents suggest this does nothing when PIPELINE_ENABLED=True. But,\n# testing shows that tests run faster when set to True.\nPIPELINE[\"PIPELINE_COLLECTOR_ENABLED\"] = True\n\n# We need the real Sass compiler here instead of the pass-through used for\n# local dev.\nPIPELINE[\"COMPILERS\"] = (\"pipeline.compilers.sass.SASSCompiler\",)\n\n# Testing with django-pipeline 1.6.8, PipelineStorage\n# Enabled=T, Collector=T - 482s\n# Enabled=T, Collector=F - 535s\n# Enabled=F, Collector=T - 18262s\n# Enabled=F, Collector=F - 2043s\n\n# Defer to django-pipeline's finders for testing\n# This avoids reading the static folder for each test client request, for\n# a 10x speedup on Docker on MacOS.\nWHITENOISE_AUTOREFRESH = True\nWHITENOISE_USE_FINDERS = True\n\n# Never rely on the .env\nGOOGLE_ANALYTICS_ACCOUNT = None\n\n# Silence warnings about defaults that change in django-storages 2.0\nAWS_BUCKET_ACL = None\nAWS_DEFAULT_ACL = None\n\n# Use a dedicated minio bucket for tests\nATTACHMENTS_AWS_STORAGE_BUCKET_NAME = \"test\"\n\n# Never enabled in tests.\nSENTRY_DSN = None\n\n# To make absolutely sure we never accidentally trigger the GA tracking\n# within tests to the actual (and default) www.google-analytics.com this is\n# an extra safeguard.\nGOOGLE_ANALYTICS_TRACKING_URL = \"https://thisllneverwork.example.com/collect\"\n\n# Because that's what all the tests presume.\nSITE_ID = 1\n\n# Because it's on by default\nENABLE_BCD_SIGNAL = True\n\n# Stripe API KEY settings\nSTRIPE_PUBLIC_KEY = \"testing\"\nSTRIPE_SECRET_KEY = \"testing\"\nSTRIPE_PLAN_ID = \"testing\"\n\n# For legacy reasons, the tests assume these are always true so don't\n# let local overrides take effect.\nINDEX_HTML_ATTRIBUTES = True\nINDEX_CSS_CLASSNAMES = True\n\n# Amount for the monthly subscription.\n# It's hardcoded here in case some test depends on the number and it futureproofs\n# our tests to not deviate when the actual number changes since that number\n# change shouldn't affect the tests.\nCONTRIBUTION_AMOUNT_USD = 4.99\n", "path": "kuma/settings/pytest.py"}], "after_files": [{"content": "from django.apps import AppConfig\nfrom django.conf import settings\nfrom django.core.checks import register\nfrom django.utils.translation import gettext_lazy as _\n\n\nclass UserNewsletterConfig(AppConfig):\n \"\"\"\n The Django App Config class to store information about the users app\n and do startup time things.\n \"\"\"\n\n name = \"kuma.users.newsletter\"\n verbose_name = _(\"UserNewsletter\")\n\n def ready(self):\n if not settings.SENDINBLUE_API_KEY:\n return\n\n # Connect signal handlers\n from . import signal_handlers # noqa\n\n from .checks import sendinblue_check\n\n register(sendinblue_check)\n", "path": "kuma/users/newsletter/apps.py"}, {"content": "from .local import *\n\nDEBUG = False\nENABLE_RESTRICTIONS_BY_HOST = False\nTEMPLATES[0][\"OPTIONS\"][\"debug\"] = True # Enable recording of templates\nCELERY_TASK_ALWAYS_EAGER = True\nCELERY_EAGER_PROPAGATES_EXCEPTIONS = True\nES_LIVE_INDEX = config(\"ES_LIVE_INDEX\", default=False, cast=bool)\n\n# Disable the Constance database cache\nCONSTANCE_DATABASE_CACHE_BACKEND = False\n\n# SHA1 because it is fast, and hard-coded in the test fixture JSON.\nPASSWORD_HASHERS = (\"django.contrib.auth.hashers.SHA1PasswordHasher\",)\n\nINSTALLED_APPS += (\"kuma.core.tests.taggit_extras\",)\n\nLOGGING[\"loggers\"].update(\n {\n \"django.db.backends\": {\n \"handlers\": [\"console\"],\n \"propagate\": True,\n \"level\": \"WARNING\",\n },\n \"kuma.search.utils\": {\"handlers\": [], \"propagate\": False, \"level\": \"CRITICAL\"},\n }\n)\n\n\n# Change the cache key prefix for tests, to avoid overwriting runtime.\nfor cache_settings in CACHES.values():\n current_prefix = cache_settings.get(\"KEY_PREFIX\", \"\")\n cache_settings[\"KEY_PREFIX\"] = \"test.\" + current_prefix\n\n# Use un-versioned file names, like main.css, instead of versioned\n# filenames requiring hashing, like mdn.1cb62215bf0c.css\nSTATICFILES_STORAGE = \"pipeline.storage.PipelineStorage\"\n\n# Switch Pipeline to DEBUG=False / Production values\n\n# The documents claim True means assets should be compressed, which seems like\n# more work, but it is 4x slower when False, maybe because it detects the\n# existence of the file and skips generating a new one.\nPIPELINE[\"PIPELINE_ENABLED\"] = True\n\n# The documents suggest this does nothing when PIPELINE_ENABLED=True. But,\n# testing shows that tests run faster when set to True.\nPIPELINE[\"PIPELINE_COLLECTOR_ENABLED\"] = True\n\n# We need the real Sass compiler here instead of the pass-through used for\n# local dev.\nPIPELINE[\"COMPILERS\"] = (\"pipeline.compilers.sass.SASSCompiler\",)\n\n# Testing with django-pipeline 1.6.8, PipelineStorage\n# Enabled=T, Collector=T - 482s\n# Enabled=T, Collector=F - 535s\n# Enabled=F, Collector=T - 18262s\n# Enabled=F, Collector=F - 2043s\n\n# Defer to django-pipeline's finders for testing\n# This avoids reading the static folder for each test client request, for\n# a 10x speedup on Docker on MacOS.\nWHITENOISE_AUTOREFRESH = True\nWHITENOISE_USE_FINDERS = True\n\n# Never rely on the .env\nGOOGLE_ANALYTICS_ACCOUNT = None\n\n# Silence warnings about defaults that change in django-storages 2.0\nAWS_BUCKET_ACL = None\nAWS_DEFAULT_ACL = None\n\n# Use a dedicated minio bucket for tests\nATTACHMENTS_AWS_STORAGE_BUCKET_NAME = \"test\"\n\n# Never enabled in tests.\nSENTRY_DSN = None\n\n# To make absolutely sure we never accidentally trigger the GA tracking\n# within tests to the actual (and default) www.google-analytics.com this is\n# an extra safeguard.\nGOOGLE_ANALYTICS_TRACKING_URL = \"https://thisllneverwork.example.com/collect\"\n\n# Because that's what all the tests presume.\nSITE_ID = 1\n\n# Because it's on by default\nENABLE_BCD_SIGNAL = True\n\n# Stripe API KEY settings\nSTRIPE_PUBLIC_KEY = \"testing\"\nSTRIPE_SECRET_KEY = \"testing\"\nSTRIPE_PLAN_ID = \"testing\"\n\n# For legacy reasons, the tests assume these are always true so don't\n# let local overrides take effect.\nINDEX_HTML_ATTRIBUTES = True\nINDEX_CSS_CLASSNAMES = True\n\n# Amount for the monthly subscription.\n# It's hardcoded here in case some test depends on the number and it futureproofs\n# our tests to not deviate when the actual number changes since that number\n# change shouldn't affect the tests.\nCONTRIBUTION_AMOUNT_USD = 4.99\n\nSENDINBLUE_API_KEY = \"testing\"\nSENDINBLUE_LIST_ID = 7327\n", "path": "kuma/settings/pytest.py"}]} | 1,768 | 250 |
gh_patches_debug_630 | rasdani/github-patches | git_diff | pex-tool__pex-2240 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 2.1.146
On the docket:
+ [x] Fix non executable venv sys path bug #2236
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = "2.1.145"
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = "2.1.145"
+__version__ = "2.1.146"
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.145\"\n+__version__ = \"2.1.146\"\n", "issue": "Release 2.1.146\nOn the docket:\r\n+ [x] Fix non executable venv sys path bug #2236\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.145\"\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.146\"\n", "path": "pex/version.py"}]} | 341 | 98 |
gh_patches_debug_17884 | rasdani/github-patches | git_diff | deis__deis-1517 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
`deis run` generates 500 error
[Integration tests](http://ci.deis.io/view/example-apps/job/test-integration-clojure-ring/47/console) against master found an error in `deis run`:
```
=== appssample Domains
No domains
ok
/home/jenkins/workspace/test-integration-clojure-ring/src/github.com/deis/deis/tests/example-clojure-ring
apps:run echo hello
500 INTERNAL SERVER ERROR
<h1>Server Error (500)</h1>
error at command wait
--- FAIL: TestApps (76.15 seconds)
itutils.go:199: Failed:
exit status 1
FAIL
exit status 1
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `controller/api/tasks.py`
Content:
```
1 """
2 Long-running tasks for the Deis Controller API
3
4 This module orchestrates the real "heavy lifting" of Deis, and as such these
5 functions are decorated to run as asynchronous celery tasks.
6 """
7
8 from __future__ import unicode_literals
9
10 import requests
11 import threading
12
13 from celery import task
14 from django.conf import settings
15
16
17 @task
18 def create_cluster(cluster):
19 cluster._scheduler.setUp()
20
21
22 @task
23 def destroy_cluster(cluster):
24 for app in cluster.app_set.all():
25 app.destroy()
26 cluster._scheduler.tearDown()
27
28
29 @task
30 def deploy_release(app, release):
31 containers = app.container_set.all()
32 threads = []
33 for c in containers:
34 threads.append(threading.Thread(target=c.deploy, args=(release,)))
35 [t.start() for t in threads]
36 [t.join() for t in threads]
37
38
39 @task
40 def import_repository(source, target_repository):
41 """Imports an image from a remote registry into our own private registry"""
42 data = {
43 'src': source,
44 }
45 requests.post(
46 '{}/v1/repositories/{}/tags'.format(settings.REGISTRY_URL,
47 target_repository),
48 data=data,
49 )
50
51
52 @task
53 def start_containers(containers):
54 create_threads = []
55 start_threads = []
56 for c in containers:
57 create_threads.append(threading.Thread(target=c.create))
58 start_threads.append(threading.Thread(target=c.start))
59 [t.start() for t in create_threads]
60 [t.join() for t in create_threads]
61 [t.start() for t in start_threads]
62 [t.join() for t in start_threads]
63
64
65 @task
66 def stop_containers(containers):
67 destroy_threads = []
68 delete_threads = []
69 for c in containers:
70 destroy_threads.append(threading.Thread(target=c.destroy))
71 delete_threads.append(threading.Thread(target=c.delete))
72 [t.start() for t in destroy_threads]
73 [t.join() for t in destroy_threads]
74 [t.start() for t in delete_threads]
75 [t.join() for t in delete_threads]
76
77
78 @task
79 def run_command(c, command):
80 release = c.release
81 version = release.version
82 image = release.image
83 try:
84 # pull the image first
85 rc, pull_output = c.run("docker pull {image}".format(**locals()))
86 if rc != 0:
87 raise EnvironmentError('Could not pull image: {pull_image}'.format(**locals()))
88 # run the command
89 docker_args = ' '.join(['--entrypoint=/bin/sh',
90 '-a', 'stdout', '-a', 'stderr', '--rm', image])
91 escaped_command = command.replace("'", "'\\''")
92 command = r"docker run {docker_args} -c \'{escaped_command}\'".format(**locals())
93 return c.run(command)
94 finally:
95 c.delete()
96
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/controller/api/tasks.py b/controller/api/tasks.py
--- a/controller/api/tasks.py
+++ b/controller/api/tasks.py
@@ -79,12 +79,14 @@
def run_command(c, command):
release = c.release
version = release.version
- image = release.image
+ image = '{}:{}/{}'.format(settings.REGISTRY_HOST,
+ settings.REGISTRY_PORT,
+ release.image)
try:
# pull the image first
rc, pull_output = c.run("docker pull {image}".format(**locals()))
if rc != 0:
- raise EnvironmentError('Could not pull image: {pull_image}'.format(**locals()))
+ raise EnvironmentError('Could not pull image: {image}'.format(**locals()))
# run the command
docker_args = ' '.join(['--entrypoint=/bin/sh',
'-a', 'stdout', '-a', 'stderr', '--rm', image])
| {"golden_diff": "diff --git a/controller/api/tasks.py b/controller/api/tasks.py\n--- a/controller/api/tasks.py\n+++ b/controller/api/tasks.py\n@@ -79,12 +79,14 @@\n def run_command(c, command):\n release = c.release\n version = release.version\n- image = release.image\n+ image = '{}:{}/{}'.format(settings.REGISTRY_HOST,\n+ settings.REGISTRY_PORT,\n+ release.image)\n try:\n # pull the image first\n rc, pull_output = c.run(\"docker pull {image}\".format(**locals()))\n if rc != 0:\n- raise EnvironmentError('Could not pull image: {pull_image}'.format(**locals()))\n+ raise EnvironmentError('Could not pull image: {image}'.format(**locals()))\n # run the command\n docker_args = ' '.join(['--entrypoint=/bin/sh',\n '-a', 'stdout', '-a', 'stderr', '--rm', image])\n", "issue": "`deis run` generates 500 error\n[Integration tests](http://ci.deis.io/view/example-apps/job/test-integration-clojure-ring/47/console) against master found an error in `deis run`:\n\n```\n=== appssample Domains\nNo domains\n\n\nok\n/home/jenkins/workspace/test-integration-clojure-ring/src/github.com/deis/deis/tests/example-clojure-ring\napps:run echo hello\n\n500 INTERNAL SERVER ERROR\n<h1>Server Error (500)</h1>\n\nerror at command wait\n--- FAIL: TestApps (76.15 seconds)\n itutils.go:199: Failed:\n exit status 1\nFAIL\nexit status 1\n```\n\n", "before_files": [{"content": "\"\"\"\nLong-running tasks for the Deis Controller API\n\nThis module orchestrates the real \"heavy lifting\" of Deis, and as such these\nfunctions are decorated to run as asynchronous celery tasks.\n\"\"\"\n\nfrom __future__ import unicode_literals\n\nimport requests\nimport threading\n\nfrom celery import task\nfrom django.conf import settings\n\n\n@task\ndef create_cluster(cluster):\n cluster._scheduler.setUp()\n\n\n@task\ndef destroy_cluster(cluster):\n for app in cluster.app_set.all():\n app.destroy()\n cluster._scheduler.tearDown()\n\n\n@task\ndef deploy_release(app, release):\n containers = app.container_set.all()\n threads = []\n for c in containers:\n threads.append(threading.Thread(target=c.deploy, args=(release,)))\n [t.start() for t in threads]\n [t.join() for t in threads]\n\n\n@task\ndef import_repository(source, target_repository):\n \"\"\"Imports an image from a remote registry into our own private registry\"\"\"\n data = {\n 'src': source,\n }\n requests.post(\n '{}/v1/repositories/{}/tags'.format(settings.REGISTRY_URL,\n target_repository),\n data=data,\n )\n\n\n@task\ndef start_containers(containers):\n create_threads = []\n start_threads = []\n for c in containers:\n create_threads.append(threading.Thread(target=c.create))\n start_threads.append(threading.Thread(target=c.start))\n [t.start() for t in create_threads]\n [t.join() for t in create_threads]\n [t.start() for t in start_threads]\n [t.join() for t in start_threads]\n\n\n@task\ndef stop_containers(containers):\n destroy_threads = []\n delete_threads = []\n for c in containers:\n destroy_threads.append(threading.Thread(target=c.destroy))\n delete_threads.append(threading.Thread(target=c.delete))\n [t.start() for t in destroy_threads]\n [t.join() for t in destroy_threads]\n [t.start() for t in delete_threads]\n [t.join() for t in delete_threads]\n\n\n@task\ndef run_command(c, command):\n release = c.release\n version = release.version\n image = release.image\n try:\n # pull the image first\n rc, pull_output = c.run(\"docker pull {image}\".format(**locals()))\n if rc != 0:\n raise EnvironmentError('Could not pull image: {pull_image}'.format(**locals()))\n # run the command\n docker_args = ' '.join(['--entrypoint=/bin/sh',\n '-a', 'stdout', '-a', 'stderr', '--rm', image])\n escaped_command = command.replace(\"'\", \"'\\\\''\")\n command = r\"docker run {docker_args} -c \\'{escaped_command}\\'\".format(**locals())\n return c.run(command)\n finally:\n c.delete()\n", "path": "controller/api/tasks.py"}], "after_files": [{"content": "\"\"\"\nLong-running tasks for the Deis Controller API\n\nThis module orchestrates the real \"heavy lifting\" of Deis, and as such these\nfunctions are decorated to run as asynchronous celery tasks.\n\"\"\"\n\nfrom __future__ import unicode_literals\n\nimport requests\nimport threading\n\nfrom celery import task\nfrom django.conf import settings\n\n\n@task\ndef create_cluster(cluster):\n cluster._scheduler.setUp()\n\n\n@task\ndef destroy_cluster(cluster):\n for app in cluster.app_set.all():\n app.destroy()\n cluster._scheduler.tearDown()\n\n\n@task\ndef deploy_release(app, release):\n containers = app.container_set.all()\n threads = []\n for c in containers:\n threads.append(threading.Thread(target=c.deploy, args=(release,)))\n [t.start() for t in threads]\n [t.join() for t in threads]\n\n\n@task\ndef import_repository(source, target_repository):\n \"\"\"Imports an image from a remote registry into our own private registry\"\"\"\n data = {\n 'src': source,\n }\n requests.post(\n '{}/v1/repositories/{}/tags'.format(settings.REGISTRY_URL,\n target_repository),\n data=data,\n )\n\n\n@task\ndef start_containers(containers):\n create_threads = []\n start_threads = []\n for c in containers:\n create_threads.append(threading.Thread(target=c.create))\n start_threads.append(threading.Thread(target=c.start))\n [t.start() for t in create_threads]\n [t.join() for t in create_threads]\n [t.start() for t in start_threads]\n [t.join() for t in start_threads]\n\n\n@task\ndef stop_containers(containers):\n destroy_threads = []\n delete_threads = []\n for c in containers:\n destroy_threads.append(threading.Thread(target=c.destroy))\n delete_threads.append(threading.Thread(target=c.delete))\n [t.start() for t in destroy_threads]\n [t.join() for t in destroy_threads]\n [t.start() for t in delete_threads]\n [t.join() for t in delete_threads]\n\n\n@task\ndef run_command(c, command):\n release = c.release\n version = release.version\n image = '{}:{}/{}'.format(settings.REGISTRY_HOST,\n settings.REGISTRY_PORT,\n release.image)\n try:\n # pull the image first\n rc, pull_output = c.run(\"docker pull {image}\".format(**locals()))\n if rc != 0:\n raise EnvironmentError('Could not pull image: {image}'.format(**locals()))\n # run the command\n docker_args = ' '.join(['--entrypoint=/bin/sh',\n '-a', 'stdout', '-a', 'stderr', '--rm', image])\n escaped_command = command.replace(\"'\", \"'\\\\''\")\n command = r\"docker run {docker_args} -c \\'{escaped_command}\\'\".format(**locals())\n return c.run(command)\n finally:\n c.delete()\n", "path": "controller/api/tasks.py"}]} | 1,210 | 205 |
gh_patches_debug_38869 | rasdani/github-patches | git_diff | mathesar-foundation__mathesar-3133 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Implement 'Shares'
## Issues
- [x] https://github.com/centerofci/mathesar/issues/3033
- [x] https://github.com/centerofci/mathesar/issues/3034
- [x] https://github.com/centerofci/mathesar/issues/3035
- [x] https://github.com/centerofci/mathesar/issues/3036
## Tasks:
- [ ] Add regenerate slug endpoints
### https://github.com/centerofci/mathesar/pull/3093#pullrequestreview-1546069582
- [ ] Address the following in shared table consumer page
- [ ] Disable re-reordering of columns
- [ ] Don't show the icon hyperlink to the record page within the PK cell
- [ ] Remove the following entries in the cell context menu:
- "Set to NULL"
- "Go to Record Page"
- "Go to Linked Record" (shown only for FK columns)
- [ ] Remove the "Go to Record Page" entry from the row header context menu
- [ ] Disable record selector in filtering for FK columns
- [ ] Come up with a better term for 'ShareConsumer'. Some suggestions:
- ShareAccessInfo
- SharedLink
- ConsumableShare
## Related:
* [Product spec](https://wiki.mathesar.org/en/product/specs/publicly-shareable-links)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mathesar/api/ui/viewsets/shares.py`
Content:
```
1 from rest_framework import viewsets
2 from rest_access_policy import AccessViewSetMixin
3
4 from mathesar.api.pagination import DefaultLimitOffsetPagination
5 from mathesar.api.ui.serializers.shares import SharedTableSerializer, SharedQuerySerializer
6 from mathesar.api.ui.permissions.shares import SharedTableAccessPolicy, SharedQueryAccessPolicy
7 from mathesar.models.shares import SharedTable, SharedQuery
8
9
10 class SharedTableViewSet(AccessViewSetMixin, viewsets.ModelViewSet):
11 pagination_class = DefaultLimitOffsetPagination
12 serializer_class = SharedTableSerializer
13 access_policy = SharedTableAccessPolicy
14
15 def get_queryset(self):
16 return SharedTable.objects.filter(table_id=self.kwargs['table_pk']).order_by('-created_at')
17
18 def perform_create(self, serializer):
19 serializer.save(table_id=self.kwargs['table_pk'])
20
21
22 class SharedQueryViewSet(AccessViewSetMixin, viewsets.ModelViewSet):
23 pagination_class = DefaultLimitOffsetPagination
24 serializer_class = SharedQuerySerializer
25 access_policy = SharedQueryAccessPolicy
26
27 def get_queryset(self):
28 return SharedQuery.objects.filter(query_id=self.kwargs['query_pk']).order_by('-created_at')
29
30 def perform_create(self, serializer):
31 serializer.save(query_id=self.kwargs['query_pk'])
32
```
Path: `mathesar/api/ui/permissions/shares.py`
Content:
```
1 from rest_access_policy import AccessPolicy
2
3 from mathesar.api.utils import get_query_or_404
4 from mathesar.api.permission_utils import QueryAccessInspector
5
6
7 class SharedTableAccessPolicy(AccessPolicy):
8 statements = [
9 {
10 'action': ['list', 'retrieve'],
11 'principal': 'authenticated',
12 'effect': 'allow',
13 'condition_expression': 'is_atleast_viewer_nested_table_resource'
14 },
15 {
16 'action': ['create', 'destroy', 'update', 'partial_update'],
17 'principal': 'authenticated',
18 'effect': 'allow',
19 'condition_expression': 'is_atleast_editor_nested_table_resource'
20 },
21 ]
22
23
24 class SharedQueryAccessPolicy(AccessPolicy):
25 statements = [
26 {
27 'action': ['list', 'retrieve'],
28 'principal': 'authenticated',
29 'effect': 'allow',
30 'condition_expression': 'is_atleast_query_viewer'
31 },
32 {
33 'action': ['create', 'destroy', 'update', 'partial_update'],
34 'principal': 'authenticated',
35 'effect': 'allow',
36 'condition_expression': 'is_atleast_query_editor'
37 },
38 ]
39
40 def is_atleast_query_viewer(self, request, view, action):
41 query = get_query_or_404(view.kwargs['query_pk'])
42 return QueryAccessInspector(request.user, query).is_atleast_viewer()
43
44 def is_atleast_query_editor(self, request, view, action):
45 query = get_query_or_404(view.kwargs['query_pk'])
46 return QueryAccessInspector(request.user, query).is_atleast_editor()
47
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mathesar/api/ui/permissions/shares.py b/mathesar/api/ui/permissions/shares.py
--- a/mathesar/api/ui/permissions/shares.py
+++ b/mathesar/api/ui/permissions/shares.py
@@ -13,7 +13,7 @@
'condition_expression': 'is_atleast_viewer_nested_table_resource'
},
{
- 'action': ['create', 'destroy', 'update', 'partial_update'],
+ 'action': ['create', 'destroy', 'update', 'partial_update', 'regenerate'],
'principal': 'authenticated',
'effect': 'allow',
'condition_expression': 'is_atleast_editor_nested_table_resource'
@@ -30,7 +30,7 @@
'condition_expression': 'is_atleast_query_viewer'
},
{
- 'action': ['create', 'destroy', 'update', 'partial_update'],
+ 'action': ['create', 'destroy', 'update', 'partial_update', 'regenerate'],
'principal': 'authenticated',
'effect': 'allow',
'condition_expression': 'is_atleast_query_editor'
diff --git a/mathesar/api/ui/viewsets/shares.py b/mathesar/api/ui/viewsets/shares.py
--- a/mathesar/api/ui/viewsets/shares.py
+++ b/mathesar/api/ui/viewsets/shares.py
@@ -1,5 +1,8 @@
+import uuid
from rest_framework import viewsets
from rest_access_policy import AccessViewSetMixin
+from rest_framework.decorators import action
+from rest_framework.response import Response
from mathesar.api.pagination import DefaultLimitOffsetPagination
from mathesar.api.ui.serializers.shares import SharedTableSerializer, SharedQuerySerializer
@@ -7,7 +10,17 @@
from mathesar.models.shares import SharedTable, SharedQuery
-class SharedTableViewSet(AccessViewSetMixin, viewsets.ModelViewSet):
+class RegenerateSlugMixin(viewsets.GenericViewSet):
+ @action(methods=['post'], detail=True)
+ def regenerate(self, *args, **kwargs):
+ share = self.get_object()
+ share.slug = uuid.uuid4()
+ share.save()
+ serializer = self.get_serializer(share)
+ return Response(serializer.data)
+
+
+class SharedTableViewSet(AccessViewSetMixin, viewsets.ModelViewSet, RegenerateSlugMixin):
pagination_class = DefaultLimitOffsetPagination
serializer_class = SharedTableSerializer
access_policy = SharedTableAccessPolicy
@@ -19,7 +32,7 @@
serializer.save(table_id=self.kwargs['table_pk'])
-class SharedQueryViewSet(AccessViewSetMixin, viewsets.ModelViewSet):
+class SharedQueryViewSet(AccessViewSetMixin, viewsets.ModelViewSet, RegenerateSlugMixin):
pagination_class = DefaultLimitOffsetPagination
serializer_class = SharedQuerySerializer
access_policy = SharedQueryAccessPolicy
| {"golden_diff": "diff --git a/mathesar/api/ui/permissions/shares.py b/mathesar/api/ui/permissions/shares.py\n--- a/mathesar/api/ui/permissions/shares.py\n+++ b/mathesar/api/ui/permissions/shares.py\n@@ -13,7 +13,7 @@\n 'condition_expression': 'is_atleast_viewer_nested_table_resource'\n },\n {\n- 'action': ['create', 'destroy', 'update', 'partial_update'],\n+ 'action': ['create', 'destroy', 'update', 'partial_update', 'regenerate'],\n 'principal': 'authenticated',\n 'effect': 'allow',\n 'condition_expression': 'is_atleast_editor_nested_table_resource'\n@@ -30,7 +30,7 @@\n 'condition_expression': 'is_atleast_query_viewer'\n },\n {\n- 'action': ['create', 'destroy', 'update', 'partial_update'],\n+ 'action': ['create', 'destroy', 'update', 'partial_update', 'regenerate'],\n 'principal': 'authenticated',\n 'effect': 'allow',\n 'condition_expression': 'is_atleast_query_editor'\ndiff --git a/mathesar/api/ui/viewsets/shares.py b/mathesar/api/ui/viewsets/shares.py\n--- a/mathesar/api/ui/viewsets/shares.py\n+++ b/mathesar/api/ui/viewsets/shares.py\n@@ -1,5 +1,8 @@\n+import uuid\n from rest_framework import viewsets\n from rest_access_policy import AccessViewSetMixin\n+from rest_framework.decorators import action\n+from rest_framework.response import Response\n \n from mathesar.api.pagination import DefaultLimitOffsetPagination\n from mathesar.api.ui.serializers.shares import SharedTableSerializer, SharedQuerySerializer\n@@ -7,7 +10,17 @@\n from mathesar.models.shares import SharedTable, SharedQuery\n \n \n-class SharedTableViewSet(AccessViewSetMixin, viewsets.ModelViewSet):\n+class RegenerateSlugMixin(viewsets.GenericViewSet):\n+ @action(methods=['post'], detail=True)\n+ def regenerate(self, *args, **kwargs):\n+ share = self.get_object()\n+ share.slug = uuid.uuid4()\n+ share.save()\n+ serializer = self.get_serializer(share)\n+ return Response(serializer.data)\n+\n+\n+class SharedTableViewSet(AccessViewSetMixin, viewsets.ModelViewSet, RegenerateSlugMixin):\n pagination_class = DefaultLimitOffsetPagination\n serializer_class = SharedTableSerializer\n access_policy = SharedTableAccessPolicy\n@@ -19,7 +32,7 @@\n serializer.save(table_id=self.kwargs['table_pk'])\n \n \n-class SharedQueryViewSet(AccessViewSetMixin, viewsets.ModelViewSet):\n+class SharedQueryViewSet(AccessViewSetMixin, viewsets.ModelViewSet, RegenerateSlugMixin):\n pagination_class = DefaultLimitOffsetPagination\n serializer_class = SharedQuerySerializer\n access_policy = SharedQueryAccessPolicy\n", "issue": "Implement 'Shares'\n## Issues\r\n- [x] https://github.com/centerofci/mathesar/issues/3033\r\n- [x] https://github.com/centerofci/mathesar/issues/3034\r\n- [x] https://github.com/centerofci/mathesar/issues/3035\r\n- [x] https://github.com/centerofci/mathesar/issues/3036\r\n\r\n## Tasks:\r\n- [ ] Add regenerate slug endpoints\r\n\r\n### https://github.com/centerofci/mathesar/pull/3093#pullrequestreview-1546069582\r\n- [ ] Address the following in shared table consumer page\r\n - [ ] Disable re-reordering of columns\r\n - [ ] Don't show the icon hyperlink to the record page within the PK cell\r\n - [ ] Remove the following entries in the cell context menu:\r\n - \"Set to NULL\"\r\n - \"Go to Record Page\"\r\n - \"Go to Linked Record\" (shown only for FK columns)\r\n - [ ] Remove the \"Go to Record Page\" entry from the row header context menu\r\n - [ ] Disable record selector in filtering for FK columns\r\n- [ ] Come up with a better term for 'ShareConsumer'. Some suggestions:\r\n - ShareAccessInfo\r\n - SharedLink\r\n - ConsumableShare\r\n\r\n## Related:\r\n* [Product spec](https://wiki.mathesar.org/en/product/specs/publicly-shareable-links)\n", "before_files": [{"content": "from rest_framework import viewsets\nfrom rest_access_policy import AccessViewSetMixin\n\nfrom mathesar.api.pagination import DefaultLimitOffsetPagination\nfrom mathesar.api.ui.serializers.shares import SharedTableSerializer, SharedQuerySerializer\nfrom mathesar.api.ui.permissions.shares import SharedTableAccessPolicy, SharedQueryAccessPolicy\nfrom mathesar.models.shares import SharedTable, SharedQuery\n\n\nclass SharedTableViewSet(AccessViewSetMixin, viewsets.ModelViewSet):\n pagination_class = DefaultLimitOffsetPagination\n serializer_class = SharedTableSerializer\n access_policy = SharedTableAccessPolicy\n\n def get_queryset(self):\n return SharedTable.objects.filter(table_id=self.kwargs['table_pk']).order_by('-created_at')\n\n def perform_create(self, serializer):\n serializer.save(table_id=self.kwargs['table_pk'])\n\n\nclass SharedQueryViewSet(AccessViewSetMixin, viewsets.ModelViewSet):\n pagination_class = DefaultLimitOffsetPagination\n serializer_class = SharedQuerySerializer\n access_policy = SharedQueryAccessPolicy\n\n def get_queryset(self):\n return SharedQuery.objects.filter(query_id=self.kwargs['query_pk']).order_by('-created_at')\n\n def perform_create(self, serializer):\n serializer.save(query_id=self.kwargs['query_pk'])\n", "path": "mathesar/api/ui/viewsets/shares.py"}, {"content": "from rest_access_policy import AccessPolicy\n\nfrom mathesar.api.utils import get_query_or_404\nfrom mathesar.api.permission_utils import QueryAccessInspector\n\n\nclass SharedTableAccessPolicy(AccessPolicy):\n statements = [\n {\n 'action': ['list', 'retrieve'],\n 'principal': 'authenticated',\n 'effect': 'allow',\n 'condition_expression': 'is_atleast_viewer_nested_table_resource'\n },\n {\n 'action': ['create', 'destroy', 'update', 'partial_update'],\n 'principal': 'authenticated',\n 'effect': 'allow',\n 'condition_expression': 'is_atleast_editor_nested_table_resource'\n },\n ]\n\n\nclass SharedQueryAccessPolicy(AccessPolicy):\n statements = [\n {\n 'action': ['list', 'retrieve'],\n 'principal': 'authenticated',\n 'effect': 'allow',\n 'condition_expression': 'is_atleast_query_viewer'\n },\n {\n 'action': ['create', 'destroy', 'update', 'partial_update'],\n 'principal': 'authenticated',\n 'effect': 'allow',\n 'condition_expression': 'is_atleast_query_editor'\n },\n ]\n\n def is_atleast_query_viewer(self, request, view, action):\n query = get_query_or_404(view.kwargs['query_pk'])\n return QueryAccessInspector(request.user, query).is_atleast_viewer()\n\n def is_atleast_query_editor(self, request, view, action):\n query = get_query_or_404(view.kwargs['query_pk'])\n return QueryAccessInspector(request.user, query).is_atleast_editor()\n", "path": "mathesar/api/ui/permissions/shares.py"}], "after_files": [{"content": "import uuid\nfrom rest_framework import viewsets\nfrom rest_access_policy import AccessViewSetMixin\nfrom rest_framework.decorators import action\nfrom rest_framework.response import Response\n\nfrom mathesar.api.pagination import DefaultLimitOffsetPagination\nfrom mathesar.api.ui.serializers.shares import SharedTableSerializer, SharedQuerySerializer\nfrom mathesar.api.ui.permissions.shares import SharedTableAccessPolicy, SharedQueryAccessPolicy\nfrom mathesar.models.shares import SharedTable, SharedQuery\n\n\nclass RegenerateSlugMixin(viewsets.GenericViewSet):\n @action(methods=['post'], detail=True)\n def regenerate(self, *args, **kwargs):\n share = self.get_object()\n share.slug = uuid.uuid4()\n share.save()\n serializer = self.get_serializer(share)\n return Response(serializer.data)\n\n\nclass SharedTableViewSet(AccessViewSetMixin, viewsets.ModelViewSet, RegenerateSlugMixin):\n pagination_class = DefaultLimitOffsetPagination\n serializer_class = SharedTableSerializer\n access_policy = SharedTableAccessPolicy\n\n def get_queryset(self):\n return SharedTable.objects.filter(table_id=self.kwargs['table_pk']).order_by('-created_at')\n\n def perform_create(self, serializer):\n serializer.save(table_id=self.kwargs['table_pk'])\n\n\nclass SharedQueryViewSet(AccessViewSetMixin, viewsets.ModelViewSet, RegenerateSlugMixin):\n pagination_class = DefaultLimitOffsetPagination\n serializer_class = SharedQuerySerializer\n access_policy = SharedQueryAccessPolicy\n\n def get_queryset(self):\n return SharedQuery.objects.filter(query_id=self.kwargs['query_pk']).order_by('-created_at')\n\n def perform_create(self, serializer):\n serializer.save(query_id=self.kwargs['query_pk'])\n", "path": "mathesar/api/ui/viewsets/shares.py"}, {"content": "from rest_access_policy import AccessPolicy\n\nfrom mathesar.api.utils import get_query_or_404\nfrom mathesar.api.permission_utils import QueryAccessInspector\n\n\nclass SharedTableAccessPolicy(AccessPolicy):\n statements = [\n {\n 'action': ['list', 'retrieve'],\n 'principal': 'authenticated',\n 'effect': 'allow',\n 'condition_expression': 'is_atleast_viewer_nested_table_resource'\n },\n {\n 'action': ['create', 'destroy', 'update', 'partial_update', 'regenerate'],\n 'principal': 'authenticated',\n 'effect': 'allow',\n 'condition_expression': 'is_atleast_editor_nested_table_resource'\n },\n ]\n\n\nclass SharedQueryAccessPolicy(AccessPolicy):\n statements = [\n {\n 'action': ['list', 'retrieve'],\n 'principal': 'authenticated',\n 'effect': 'allow',\n 'condition_expression': 'is_atleast_query_viewer'\n },\n {\n 'action': ['create', 'destroy', 'update', 'partial_update', 'regenerate'],\n 'principal': 'authenticated',\n 'effect': 'allow',\n 'condition_expression': 'is_atleast_query_editor'\n },\n ]\n\n def is_atleast_query_viewer(self, request, view, action):\n query = get_query_or_404(view.kwargs['query_pk'])\n return QueryAccessInspector(request.user, query).is_atleast_viewer()\n\n def is_atleast_query_editor(self, request, view, action):\n query = get_query_or_404(view.kwargs['query_pk'])\n return QueryAccessInspector(request.user, query).is_atleast_editor()\n", "path": "mathesar/api/ui/permissions/shares.py"}]} | 1,332 | 609 |
gh_patches_debug_39187 | rasdani/github-patches | git_diff | deepset-ai__haystack-7205 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Docstrings - `haystack.components.samplers`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `haystack/components/samplers/top_p.py`
Content:
```
1 import logging
2 from typing import List, Optional
3
4 from haystack import ComponentError, Document, component
5 from haystack.lazy_imports import LazyImport
6
7 logger = logging.getLogger(__name__)
8
9
10 with LazyImport(message="Run 'pip install \"torch>=1.13\"'") as torch_import:
11 import torch
12
13
14 @component
15 class TopPSampler:
16 """
17 Implements top-p (nucleus) sampling for document filtering based on cumulative probability scores.
18
19 This class provides functionality to filter a list of documents by selecting those whose scores fall
20 within the top 'p' percent of the cumulative distribution. The method is useful for focusing on high-probability
21 documents while filtering out less relevant ones based on their assigned scores.
22
23 Usage example:
24
25 ```python
26 from haystack import Document
27 from haystack.components.samplers import TopPSampler
28
29 sampler = TopPSampler(top_p=0.95, score_field="similarity_score")
30 docs = [
31 Document(text="Berlin", meta={"similarity_score": -10.6}),
32 Document(text="Belgrade", meta={"similarity_score": -8.9}),
33 Document(text="Sarajevo", meta={"similarity_score": -4.6}),
34 ]
35 output = sampler.run(documents=docs)
36 docs = output["documents"]
37 assert len(docs) == 1
38 assert docs[0].content == "Sarajevo"
39 ```
40 """
41
42 def __init__(self, top_p: float = 1.0, score_field: Optional[str] = None):
43 """
44 Creates an instance of TopPSampler.
45
46 :param top_p: Float between 0 and 1 representing the cumulative probability threshold for document selection.
47 Defaults to 1.0, indicating no filtering (all documents are retained).
48 :param score_field: Name of the field in each document's metadata that contains the score. If None, the default
49 document score field is used.
50 """
51 torch_import.check()
52
53 self.top_p = top_p
54 self.score_field = score_field
55
56 @component.output_types(documents=List[Document])
57 def run(self, documents: List[Document], top_p: Optional[float] = None):
58 """
59 Filters documents using top-p sampling based on their scores.
60
61 :param documents: List of Document objects to be filtered.
62 :param top_p: Optional. A float to override the cumulative probability threshold set during initialization.
63 If None, the class's top_p value is used.
64 :return: A dictionary with a key 'documents' containing the list of filtered Document objects.
65
66 This method applies top-p sampling to filter out documents. It selects those documents whose similarity scores
67 are within the top 'p' percent of the cumulative distribution, based on the specified or default top_p value.
68
69 If the specified top_p results in no documents being selected (especially in cases of a low top_p value), the
70 method defaults to returning the document with the highest similarity score.
71
72 :raises ValueError: If the top_p value is not within the range [0, 1].
73 """
74 if not documents:
75 return {"documents": []}
76
77 top_p = top_p or self.top_p or 1.0 # default to 1.0 if both are None
78
79 if not 0 <= top_p <= 1:
80 raise ValueError(f"top_p must be between 0 and 1. Got {top_p}.")
81
82 similarity_scores = torch.tensor(self._collect_scores(documents), dtype=torch.float32)
83
84 # Apply softmax normalization to the similarity scores
85 probs = torch.nn.functional.softmax(similarity_scores, dim=-1)
86
87 # Sort the probabilities and calculate their cumulative sum
88 sorted_probs, sorted_indices = torch.sort(probs, descending=True)
89 cumulative_probs = torch.cumsum(sorted_probs, dim=-1)
90
91 # Check if the cumulative probabilities are close to top_p with a 1e-6 tolerance
92 close_to_top_p = torch.isclose(cumulative_probs, torch.tensor(top_p, device=cumulative_probs.device), atol=1e-6)
93
94 # Combine the close_to_top_p with original condition using logical OR
95 condition = (cumulative_probs <= top_p) | close_to_top_p
96
97 # Find the indices with cumulative probabilities that exceed top_p
98 top_p_indices = torch.where(torch.BoolTensor(condition))[0]
99
100 # Map the selected indices back to their original indices
101 original_indices = sorted_indices[top_p_indices]
102 selected_docs = [documents[i.item()] for i in original_indices]
103
104 # If low p resulted in no documents being selected, then
105 # return at least one document
106 if not selected_docs:
107 logger.warning(
108 "Top-p sampling with p=%s resulted in no documents being selected. "
109 "Returning the document with the highest similarity score.",
110 top_p,
111 )
112 highest_prob_indices = torch.argsort(probs, descending=True)
113 selected_docs = [documents[int(highest_prob_indices[0].item())]]
114
115 return {"documents": selected_docs}
116
117 def _collect_scores(self, documents: List[Document]) -> List[float]:
118 """
119 Collect the scores from the documents' metadata.
120 :param documents: List of Documents.
121 :return: List of scores.
122 """
123 if self.score_field:
124 missing_scores_docs = [d for d in documents if self.score_field not in d.meta]
125 if missing_scores_docs:
126 missing_scores_docs_ids = [d.id for d in missing_scores_docs if d.id]
127 raise ComponentError(
128 f"Score field '{self.score_field}' not found in metadata of documents "
129 f"with IDs: {missing_scores_docs_ids}."
130 f"Make sure that all documents have a score field '{self.score_field}' in their metadata."
131 )
132 return [d.meta[self.score_field] for d in documents]
133 else:
134 missing_scores_docs = [d for d in documents if d.score is None]
135 if missing_scores_docs:
136 missing_scores_docs_ids = [d.id for d in missing_scores_docs if d.id]
137 raise ComponentError(
138 f"Ensure all documents have a valid score value. These docs {missing_scores_docs_ids} don't."
139 )
140 return [d.score for d in documents] # type: ignore ## because Document score is Optional
141
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/haystack/components/samplers/top_p.py b/haystack/components/samplers/top_p.py
--- a/haystack/components/samplers/top_p.py
+++ b/haystack/components/samplers/top_p.py
@@ -16,8 +16,8 @@
"""
Implements top-p (nucleus) sampling for document filtering based on cumulative probability scores.
- This class provides functionality to filter a list of documents by selecting those whose scores fall
- within the top 'p' percent of the cumulative distribution. The method is useful for focusing on high-probability
+ This component provides functionality to filter a list of documents by selecting those whose scores fall
+ within the top 'p' percent of the cumulative distribution. It is useful for focusing on high-probability
documents while filtering out less relevant ones based on their assigned scores.
Usage example:
@@ -44,9 +44,9 @@
Creates an instance of TopPSampler.
:param top_p: Float between 0 and 1 representing the cumulative probability threshold for document selection.
- Defaults to 1.0, indicating no filtering (all documents are retained).
+ A value of 1.0 indicates no filtering (all documents are retained).
:param score_field: Name of the field in each document's metadata that contains the score. If None, the default
- document score field is used.
+ document score field is used.
"""
torch_import.check()
@@ -57,17 +57,14 @@
def run(self, documents: List[Document], top_p: Optional[float] = None):
"""
Filters documents using top-p sampling based on their scores.
+ If the specified top_p results in no documents being selected (especially in cases of a low top_p value), the
+ method returns the document with the highest similarity score.
:param documents: List of Document objects to be filtered.
:param top_p: Optional. A float to override the cumulative probability threshold set during initialization.
- If None, the class's top_p value is used.
- :return: A dictionary with a key 'documents' containing the list of filtered Document objects.
-
- This method applies top-p sampling to filter out documents. It selects those documents whose similarity scores
- are within the top 'p' percent of the cumulative distribution, based on the specified or default top_p value.
- If the specified top_p results in no documents being selected (especially in cases of a low top_p value), the
- method defaults to returning the document with the highest similarity score.
+ :returns: A dictionary with the following key:
+ - `documents`: List of Document objects that have been selected based on the top-p sampling.
:raises ValueError: If the top_p value is not within the range [0, 1].
"""
| {"golden_diff": "diff --git a/haystack/components/samplers/top_p.py b/haystack/components/samplers/top_p.py\n--- a/haystack/components/samplers/top_p.py\n+++ b/haystack/components/samplers/top_p.py\n@@ -16,8 +16,8 @@\n \"\"\"\n Implements top-p (nucleus) sampling for document filtering based on cumulative probability scores.\n \n- This class provides functionality to filter a list of documents by selecting those whose scores fall\n- within the top 'p' percent of the cumulative distribution. The method is useful for focusing on high-probability\n+ This component provides functionality to filter a list of documents by selecting those whose scores fall\n+ within the top 'p' percent of the cumulative distribution. It is useful for focusing on high-probability\n documents while filtering out less relevant ones based on their assigned scores.\n \n Usage example:\n@@ -44,9 +44,9 @@\n Creates an instance of TopPSampler.\n \n :param top_p: Float between 0 and 1 representing the cumulative probability threshold for document selection.\n- Defaults to 1.0, indicating no filtering (all documents are retained).\n+ A value of 1.0 indicates no filtering (all documents are retained).\n :param score_field: Name of the field in each document's metadata that contains the score. If None, the default\n- document score field is used.\n+ document score field is used.\n \"\"\"\n torch_import.check()\n \n@@ -57,17 +57,14 @@\n def run(self, documents: List[Document], top_p: Optional[float] = None):\n \"\"\"\n Filters documents using top-p sampling based on their scores.\n+ If the specified top_p results in no documents being selected (especially in cases of a low top_p value), the\n+ method returns the document with the highest similarity score.\n \n :param documents: List of Document objects to be filtered.\n :param top_p: Optional. A float to override the cumulative probability threshold set during initialization.\n- If None, the class's top_p value is used.\n- :return: A dictionary with a key 'documents' containing the list of filtered Document objects.\n-\n- This method applies top-p sampling to filter out documents. It selects those documents whose similarity scores\n- are within the top 'p' percent of the cumulative distribution, based on the specified or default top_p value.\n \n- If the specified top_p results in no documents being selected (especially in cases of a low top_p value), the\n- method defaults to returning the document with the highest similarity score.\n+ :returns: A dictionary with the following key:\n+ - `documents`: List of Document objects that have been selected based on the top-p sampling.\n \n :raises ValueError: If the top_p value is not within the range [0, 1].\n \"\"\"\n", "issue": "Docstrings - `haystack.components.samplers`\n\n", "before_files": [{"content": "import logging\nfrom typing import List, Optional\n\nfrom haystack import ComponentError, Document, component\nfrom haystack.lazy_imports import LazyImport\n\nlogger = logging.getLogger(__name__)\n\n\nwith LazyImport(message=\"Run 'pip install \\\"torch>=1.13\\\"'\") as torch_import:\n import torch\n\n\n@component\nclass TopPSampler:\n \"\"\"\n Implements top-p (nucleus) sampling for document filtering based on cumulative probability scores.\n\n This class provides functionality to filter a list of documents by selecting those whose scores fall\n within the top 'p' percent of the cumulative distribution. The method is useful for focusing on high-probability\n documents while filtering out less relevant ones based on their assigned scores.\n\n Usage example:\n\n ```python\n from haystack import Document\n from haystack.components.samplers import TopPSampler\n\n sampler = TopPSampler(top_p=0.95, score_field=\"similarity_score\")\n docs = [\n Document(text=\"Berlin\", meta={\"similarity_score\": -10.6}),\n Document(text=\"Belgrade\", meta={\"similarity_score\": -8.9}),\n Document(text=\"Sarajevo\", meta={\"similarity_score\": -4.6}),\n ]\n output = sampler.run(documents=docs)\n docs = output[\"documents\"]\n assert len(docs) == 1\n assert docs[0].content == \"Sarajevo\"\n ```\n \"\"\"\n\n def __init__(self, top_p: float = 1.0, score_field: Optional[str] = None):\n \"\"\"\n Creates an instance of TopPSampler.\n\n :param top_p: Float between 0 and 1 representing the cumulative probability threshold for document selection.\n Defaults to 1.0, indicating no filtering (all documents are retained).\n :param score_field: Name of the field in each document's metadata that contains the score. If None, the default\n document score field is used.\n \"\"\"\n torch_import.check()\n\n self.top_p = top_p\n self.score_field = score_field\n\n @component.output_types(documents=List[Document])\n def run(self, documents: List[Document], top_p: Optional[float] = None):\n \"\"\"\n Filters documents using top-p sampling based on their scores.\n\n :param documents: List of Document objects to be filtered.\n :param top_p: Optional. A float to override the cumulative probability threshold set during initialization.\n If None, the class's top_p value is used.\n :return: A dictionary with a key 'documents' containing the list of filtered Document objects.\n\n This method applies top-p sampling to filter out documents. It selects those documents whose similarity scores\n are within the top 'p' percent of the cumulative distribution, based on the specified or default top_p value.\n\n If the specified top_p results in no documents being selected (especially in cases of a low top_p value), the\n method defaults to returning the document with the highest similarity score.\n\n :raises ValueError: If the top_p value is not within the range [0, 1].\n \"\"\"\n if not documents:\n return {\"documents\": []}\n\n top_p = top_p or self.top_p or 1.0 # default to 1.0 if both are None\n\n if not 0 <= top_p <= 1:\n raise ValueError(f\"top_p must be between 0 and 1. Got {top_p}.\")\n\n similarity_scores = torch.tensor(self._collect_scores(documents), dtype=torch.float32)\n\n # Apply softmax normalization to the similarity scores\n probs = torch.nn.functional.softmax(similarity_scores, dim=-1)\n\n # Sort the probabilities and calculate their cumulative sum\n sorted_probs, sorted_indices = torch.sort(probs, descending=True)\n cumulative_probs = torch.cumsum(sorted_probs, dim=-1)\n\n # Check if the cumulative probabilities are close to top_p with a 1e-6 tolerance\n close_to_top_p = torch.isclose(cumulative_probs, torch.tensor(top_p, device=cumulative_probs.device), atol=1e-6)\n\n # Combine the close_to_top_p with original condition using logical OR\n condition = (cumulative_probs <= top_p) | close_to_top_p\n\n # Find the indices with cumulative probabilities that exceed top_p\n top_p_indices = torch.where(torch.BoolTensor(condition))[0]\n\n # Map the selected indices back to their original indices\n original_indices = sorted_indices[top_p_indices]\n selected_docs = [documents[i.item()] for i in original_indices]\n\n # If low p resulted in no documents being selected, then\n # return at least one document\n if not selected_docs:\n logger.warning(\n \"Top-p sampling with p=%s resulted in no documents being selected. \"\n \"Returning the document with the highest similarity score.\",\n top_p,\n )\n highest_prob_indices = torch.argsort(probs, descending=True)\n selected_docs = [documents[int(highest_prob_indices[0].item())]]\n\n return {\"documents\": selected_docs}\n\n def _collect_scores(self, documents: List[Document]) -> List[float]:\n \"\"\"\n Collect the scores from the documents' metadata.\n :param documents: List of Documents.\n :return: List of scores.\n \"\"\"\n if self.score_field:\n missing_scores_docs = [d for d in documents if self.score_field not in d.meta]\n if missing_scores_docs:\n missing_scores_docs_ids = [d.id for d in missing_scores_docs if d.id]\n raise ComponentError(\n f\"Score field '{self.score_field}' not found in metadata of documents \"\n f\"with IDs: {missing_scores_docs_ids}.\"\n f\"Make sure that all documents have a score field '{self.score_field}' in their metadata.\"\n )\n return [d.meta[self.score_field] for d in documents]\n else:\n missing_scores_docs = [d for d in documents if d.score is None]\n if missing_scores_docs:\n missing_scores_docs_ids = [d.id for d in missing_scores_docs if d.id]\n raise ComponentError(\n f\"Ensure all documents have a valid score value. These docs {missing_scores_docs_ids} don't.\"\n )\n return [d.score for d in documents] # type: ignore ## because Document score is Optional\n", "path": "haystack/components/samplers/top_p.py"}], "after_files": [{"content": "import logging\nfrom typing import List, Optional\n\nfrom haystack import ComponentError, Document, component\nfrom haystack.lazy_imports import LazyImport\n\nlogger = logging.getLogger(__name__)\n\n\nwith LazyImport(message=\"Run 'pip install \\\"torch>=1.13\\\"'\") as torch_import:\n import torch\n\n\n@component\nclass TopPSampler:\n \"\"\"\n Implements top-p (nucleus) sampling for document filtering based on cumulative probability scores.\n\n This component provides functionality to filter a list of documents by selecting those whose scores fall\n within the top 'p' percent of the cumulative distribution. It is useful for focusing on high-probability\n documents while filtering out less relevant ones based on their assigned scores.\n\n Usage example:\n\n ```python\n from haystack import Document\n from haystack.components.samplers import TopPSampler\n\n sampler = TopPSampler(top_p=0.95, score_field=\"similarity_score\")\n docs = [\n Document(text=\"Berlin\", meta={\"similarity_score\": -10.6}),\n Document(text=\"Belgrade\", meta={\"similarity_score\": -8.9}),\n Document(text=\"Sarajevo\", meta={\"similarity_score\": -4.6}),\n ]\n output = sampler.run(documents=docs)\n docs = output[\"documents\"]\n assert len(docs) == 1\n assert docs[0].content == \"Sarajevo\"\n ```\n \"\"\"\n\n def __init__(self, top_p: float = 1.0, score_field: Optional[str] = None):\n \"\"\"\n Creates an instance of TopPSampler.\n\n :param top_p: Float between 0 and 1 representing the cumulative probability threshold for document selection.\n A value of 1.0 indicates no filtering (all documents are retained).\n :param score_field: Name of the field in each document's metadata that contains the score. If None, the default\n document score field is used.\n \"\"\"\n torch_import.check()\n\n self.top_p = top_p\n self.score_field = score_field\n\n @component.output_types(documents=List[Document])\n def run(self, documents: List[Document], top_p: Optional[float] = None):\n \"\"\"\n Filters documents using top-p sampling based on their scores.\n If the specified top_p results in no documents being selected (especially in cases of a low top_p value), the\n method returns the document with the highest similarity score.\n\n :param documents: List of Document objects to be filtered.\n :param top_p: Optional. A float to override the cumulative probability threshold set during initialization.\n\n :returns: A dictionary with the following key:\n - `documents`: List of Document objects that have been selected based on the top-p sampling.\n\n :raises ValueError: If the top_p value is not within the range [0, 1].\n \"\"\"\n if not documents:\n return {\"documents\": []}\n\n top_p = top_p or self.top_p or 1.0 # default to 1.0 if both are None\n\n if not 0 <= top_p <= 1:\n raise ValueError(f\"top_p must be between 0 and 1. Got {top_p}.\")\n\n similarity_scores = torch.tensor(self._collect_scores(documents), dtype=torch.float32)\n\n # Apply softmax normalization to the similarity scores\n probs = torch.nn.functional.softmax(similarity_scores, dim=-1)\n\n # Sort the probabilities and calculate their cumulative sum\n sorted_probs, sorted_indices = torch.sort(probs, descending=True)\n cumulative_probs = torch.cumsum(sorted_probs, dim=-1)\n\n # Check if the cumulative probabilities are close to top_p with a 1e-6 tolerance\n close_to_top_p = torch.isclose(cumulative_probs, torch.tensor(top_p, device=cumulative_probs.device), atol=1e-6)\n\n # Combine the close_to_top_p with original condition using logical OR\n condition = (cumulative_probs <= top_p) | close_to_top_p\n\n # Find the indices with cumulative probabilities that exceed top_p\n top_p_indices = torch.where(torch.BoolTensor(condition))[0]\n\n # Map the selected indices back to their original indices\n original_indices = sorted_indices[top_p_indices]\n selected_docs = [documents[i.item()] for i in original_indices]\n\n # If low p resulted in no documents being selected, then\n # return at least one document\n if not selected_docs:\n logger.warning(\n \"Top-p sampling with p=%s resulted in no documents being selected. \"\n \"Returning the document with the highest similarity score.\",\n top_p,\n )\n highest_prob_indices = torch.argsort(probs, descending=True)\n selected_docs = [documents[int(highest_prob_indices[0].item())]]\n\n return {\"documents\": selected_docs}\n\n def _collect_scores(self, documents: List[Document]) -> List[float]:\n \"\"\"\n Collect the scores from the documents' metadata.\n :param documents: List of Documents.\n :return: List of scores.\n \"\"\"\n if self.score_field:\n missing_scores_docs = [d for d in documents if self.score_field not in d.meta]\n if missing_scores_docs:\n missing_scores_docs_ids = [d.id for d in missing_scores_docs if d.id]\n raise ComponentError(\n f\"Score field '{self.score_field}' not found in metadata of documents \"\n f\"with IDs: {missing_scores_docs_ids}.\"\n f\"Make sure that all documents have a score field '{self.score_field}' in their metadata.\"\n )\n return [d.meta[self.score_field] for d in documents]\n else:\n missing_scores_docs = [d for d in documents if d.score is None]\n if missing_scores_docs:\n missing_scores_docs_ids = [d.id for d in missing_scores_docs if d.id]\n raise ComponentError(\n f\"Ensure all documents have a valid score value. These docs {missing_scores_docs_ids} don't.\"\n )\n return [d.score for d in documents] # type: ignore ## because Document score is Optional\n", "path": "haystack/components/samplers/top_p.py"}]} | 1,950 | 624 |
gh_patches_debug_28163 | rasdani/github-patches | git_diff | plone__Products.CMFPlone-1614 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Allow to hide/show actions directly from the Actions control panel list
As @esteele mentionned in #1342
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `Products/CMFPlone/controlpanel/browser/actions.py`
Content:
```
1 from plone.autoform.form import AutoExtensibleForm
2 from Products.CMFCore.ActionInformation import Action
3 from Products.CMFCore.interfaces import IAction, IActionCategory
4 from Products.CMFCore.utils import getToolByName
5 from Products.CMFPlone import PloneMessageFactory as _
6 from Products.CMFPlone.interfaces import IActionSchema, INewActionSchema
7 from Products.Five import BrowserView
8 from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
9 from z3c.form import form
10 from zope.component import adapts
11 from zope.event import notify
12 from zope.interface import implements
13 from zope.lifecycleevent import ObjectCreatedEvent
14
15
16 class ActionListControlPanel(BrowserView):
17 """Control panel for the portal actions."""
18
19 template = ViewPageTemplateFile("actions.pt")
20
21 def __init__(self, context, request):
22 self.context = context
23 self.request = request
24 self.portal_actions = getToolByName(self.context, 'portal_actions')
25
26 def display(self):
27 actions = []
28 for category in self.portal_actions.objectValues():
29 if category.id == 'controlpanel':
30 continue
31 if not IActionCategory.providedBy(category):
32 continue
33 cat_infos = {
34 'id': category.id,
35 'title': category.title or category.id,
36 }
37 action_list = []
38 for action in category.objectValues():
39 if IAction.providedBy(action):
40 action_list.append({
41 'id': action.id,
42 'title': action.title,
43 'url': action.absolute_url(),
44 })
45 cat_infos['actions'] = action_list
46 actions.append(cat_infos)
47
48 self.actions = actions
49 return self.template()
50
51 def __call__(self):
52 if self.request.get('deleteaction'):
53 action_id = self.request['deleteaction']
54 category = self.portal_actions[self.request['category']]
55 category.manage_delObjects([action_id])
56 self.request.RESPONSE.redirect('@@actions-controlpanel')
57 return self.display()
58
59
60 class ActionControlPanelAdapter(object):
61 """Adapter for action form."""
62
63 adapts(IAction)
64 implements(IActionSchema)
65
66 def __init__(self, context):
67 self.context = context
68 self.current_category = self.context.getParentNode()
69
70 def get_category(self):
71 return self.current_category.id
72
73 def set_category(self, value):
74 portal_actions = getToolByName(self.context, 'portal_actions')
75 new_category = portal_actions.get(value)
76 cookie = self.current_category.manage_cutObjects(ids=[self.context.id])
77 new_category.manage_pasteObjects(cookie)
78
79 category = property(get_category, set_category)
80
81 def get_title(self):
82 return self.context.title
83
84 def set_title(self, value):
85 self.context._setPropValue('title', value)
86
87 title = property(get_title, set_title)
88
89 def get_description(self):
90 return self.context.description
91
92 def set_description(self, value):
93 self.context._setPropValue('description', value)
94
95 description = property(get_description, set_description)
96
97 def get_i18n_domain(self):
98 return self.context.i18n_domain
99
100 def set_i18n_domain(self, value):
101 self.context._setPropValue('i18n_domain', value)
102
103 i18n_domain = property(get_i18n_domain, set_i18n_domain)
104
105 def get_url_expr(self):
106 return self.context.url_expr
107
108 def set_url_expr(self, value):
109 self.context._setPropValue('url_expr', value)
110
111 url_expr = property(get_url_expr, set_url_expr)
112
113 def get_available_expr(self):
114 return self.context.available_expr
115
116 def set_available_expr(self, value):
117 self.context._setPropValue('available_expr', value)
118
119 available_expr = property(get_available_expr, set_available_expr)
120
121 def get_permissions(self):
122 return self.context.permissions
123
124 def set_permissions(self, value):
125 self.context._setPropValue('permissions', value)
126
127 permissions = property(get_permissions, set_permissions)
128
129 def get_visible(self):
130 return self.context.visible
131
132 def set_visible(self, value):
133 self.context._setPropValue('visible', value)
134
135 visible = property(get_visible, set_visible)
136
137 def get_position(self):
138 position = self.current_category.objectIds().index(self.context.id)
139 return position + 1
140
141 def set_position(self, value):
142 current_position = self.current_category.objectIds().index(
143 self.context.id)
144 all_actions = list(self.current_category._objects)
145 current_action = all_actions.pop(current_position)
146 new_position = value - 1
147 all_actions = all_actions[0:new_position] + [current_action] + \
148 all_actions[new_position:]
149 self.current_category._objects = tuple(all_actions)
150
151 position = property(get_position, set_position)
152
153
154 class ActionControlPanel(AutoExtensibleForm, form.EditForm):
155 """A form to edit a portal action."""
156
157 schema = IActionSchema
158 ignoreContext = False
159 label = _(u'Action Settings')
160
161
162 class NewActionControlPanel(AutoExtensibleForm, form.AddForm):
163 """A form to add a new portal action."""
164
165 schema = INewActionSchema
166 ignoreContext = True
167 label = _(u'New action')
168
169 def createAndAdd(self, data):
170 portal_actions = getToolByName(self.context, 'portal_actions')
171 category = portal_actions.get(data['category'])
172 action_id = data['id']
173 action = Action(
174 action_id,
175 title=action_id,
176 i18n_domain='plone',
177 permissions=['View'],
178 )
179 category[action_id] = action
180 notify(ObjectCreatedEvent(action))
181
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/Products/CMFPlone/controlpanel/browser/actions.py b/Products/CMFPlone/controlpanel/browser/actions.py
--- a/Products/CMFPlone/controlpanel/browser/actions.py
+++ b/Products/CMFPlone/controlpanel/browser/actions.py
@@ -41,6 +41,7 @@
'id': action.id,
'title': action.title,
'url': action.absolute_url(),
+ 'visible': action.visible,
})
cat_infos['actions'] = action_list
actions.append(cat_infos)
@@ -49,11 +50,21 @@
return self.template()
def __call__(self):
- if self.request.get('deleteaction'):
- action_id = self.request['deleteaction']
+ if self.request.get('delete'):
+ action_id = self.request['actionid']
category = self.portal_actions[self.request['category']]
category.manage_delObjects([action_id])
self.request.RESPONSE.redirect('@@actions-controlpanel')
+ if self.request.get('hide'):
+ action_id = self.request['actionid']
+ category = self.portal_actions[self.request['category']]
+ category[action_id].visible = False
+ self.request.RESPONSE.redirect('@@actions-controlpanel')
+ if self.request.get('show'):
+ action_id = self.request['actionid']
+ category = self.portal_actions[self.request['category']]
+ category[action_id].visible = True
+ self.request.RESPONSE.redirect('@@actions-controlpanel')
return self.display()
| {"golden_diff": "diff --git a/Products/CMFPlone/controlpanel/browser/actions.py b/Products/CMFPlone/controlpanel/browser/actions.py\n--- a/Products/CMFPlone/controlpanel/browser/actions.py\n+++ b/Products/CMFPlone/controlpanel/browser/actions.py\n@@ -41,6 +41,7 @@\n 'id': action.id,\n 'title': action.title,\n 'url': action.absolute_url(),\n+ 'visible': action.visible,\n })\n cat_infos['actions'] = action_list\n actions.append(cat_infos)\n@@ -49,11 +50,21 @@\n return self.template()\n \n def __call__(self):\n- if self.request.get('deleteaction'):\n- action_id = self.request['deleteaction']\n+ if self.request.get('delete'):\n+ action_id = self.request['actionid']\n category = self.portal_actions[self.request['category']]\n category.manage_delObjects([action_id])\n self.request.RESPONSE.redirect('@@actions-controlpanel')\n+ if self.request.get('hide'):\n+ action_id = self.request['actionid']\n+ category = self.portal_actions[self.request['category']]\n+ category[action_id].visible = False\n+ self.request.RESPONSE.redirect('@@actions-controlpanel')\n+ if self.request.get('show'):\n+ action_id = self.request['actionid']\n+ category = self.portal_actions[self.request['category']]\n+ category[action_id].visible = True\n+ self.request.RESPONSE.redirect('@@actions-controlpanel')\n return self.display()\n", "issue": "Allow to hide/show actions directly from the Actions control panel list\nAs @esteele mentionned in #1342\n\n", "before_files": [{"content": "from plone.autoform.form import AutoExtensibleForm\nfrom Products.CMFCore.ActionInformation import Action\nfrom Products.CMFCore.interfaces import IAction, IActionCategory\nfrom Products.CMFCore.utils import getToolByName\nfrom Products.CMFPlone import PloneMessageFactory as _\nfrom Products.CMFPlone.interfaces import IActionSchema, INewActionSchema\nfrom Products.Five import BrowserView\nfrom Products.Five.browser.pagetemplatefile import ViewPageTemplateFile\nfrom z3c.form import form\nfrom zope.component import adapts\nfrom zope.event import notify\nfrom zope.interface import implements\nfrom zope.lifecycleevent import ObjectCreatedEvent\n\n\nclass ActionListControlPanel(BrowserView):\n \"\"\"Control panel for the portal actions.\"\"\"\n\n template = ViewPageTemplateFile(\"actions.pt\")\n\n def __init__(self, context, request):\n self.context = context\n self.request = request\n self.portal_actions = getToolByName(self.context, 'portal_actions')\n\n def display(self):\n actions = []\n for category in self.portal_actions.objectValues():\n if category.id == 'controlpanel':\n continue\n if not IActionCategory.providedBy(category):\n continue\n cat_infos = {\n 'id': category.id,\n 'title': category.title or category.id,\n }\n action_list = []\n for action in category.objectValues():\n if IAction.providedBy(action):\n action_list.append({\n 'id': action.id,\n 'title': action.title,\n 'url': action.absolute_url(),\n })\n cat_infos['actions'] = action_list\n actions.append(cat_infos)\n\n self.actions = actions\n return self.template()\n\n def __call__(self):\n if self.request.get('deleteaction'):\n action_id = self.request['deleteaction']\n category = self.portal_actions[self.request['category']]\n category.manage_delObjects([action_id])\n self.request.RESPONSE.redirect('@@actions-controlpanel')\n return self.display()\n\n\nclass ActionControlPanelAdapter(object):\n \"\"\"Adapter for action form.\"\"\"\n\n adapts(IAction)\n implements(IActionSchema)\n\n def __init__(self, context):\n self.context = context\n self.current_category = self.context.getParentNode()\n\n def get_category(self):\n return self.current_category.id\n\n def set_category(self, value):\n portal_actions = getToolByName(self.context, 'portal_actions')\n new_category = portal_actions.get(value)\n cookie = self.current_category.manage_cutObjects(ids=[self.context.id])\n new_category.manage_pasteObjects(cookie)\n\n category = property(get_category, set_category)\n\n def get_title(self):\n return self.context.title\n\n def set_title(self, value):\n self.context._setPropValue('title', value)\n\n title = property(get_title, set_title)\n\n def get_description(self):\n return self.context.description\n\n def set_description(self, value):\n self.context._setPropValue('description', value)\n\n description = property(get_description, set_description)\n\n def get_i18n_domain(self):\n return self.context.i18n_domain\n\n def set_i18n_domain(self, value):\n self.context._setPropValue('i18n_domain', value)\n\n i18n_domain = property(get_i18n_domain, set_i18n_domain)\n\n def get_url_expr(self):\n return self.context.url_expr\n\n def set_url_expr(self, value):\n self.context._setPropValue('url_expr', value)\n\n url_expr = property(get_url_expr, set_url_expr)\n\n def get_available_expr(self):\n return self.context.available_expr\n\n def set_available_expr(self, value):\n self.context._setPropValue('available_expr', value)\n\n available_expr = property(get_available_expr, set_available_expr)\n\n def get_permissions(self):\n return self.context.permissions\n\n def set_permissions(self, value):\n self.context._setPropValue('permissions', value)\n\n permissions = property(get_permissions, set_permissions)\n\n def get_visible(self):\n return self.context.visible\n\n def set_visible(self, value):\n self.context._setPropValue('visible', value)\n\n visible = property(get_visible, set_visible)\n\n def get_position(self):\n position = self.current_category.objectIds().index(self.context.id)\n return position + 1\n\n def set_position(self, value):\n current_position = self.current_category.objectIds().index(\n self.context.id)\n all_actions = list(self.current_category._objects)\n current_action = all_actions.pop(current_position)\n new_position = value - 1\n all_actions = all_actions[0:new_position] + [current_action] + \\\n all_actions[new_position:]\n self.current_category._objects = tuple(all_actions)\n\n position = property(get_position, set_position)\n\n\nclass ActionControlPanel(AutoExtensibleForm, form.EditForm):\n \"\"\"A form to edit a portal action.\"\"\"\n\n schema = IActionSchema\n ignoreContext = False\n label = _(u'Action Settings')\n\n\nclass NewActionControlPanel(AutoExtensibleForm, form.AddForm):\n \"\"\"A form to add a new portal action.\"\"\"\n\n schema = INewActionSchema\n ignoreContext = True\n label = _(u'New action')\n\n def createAndAdd(self, data):\n portal_actions = getToolByName(self.context, 'portal_actions')\n category = portal_actions.get(data['category'])\n action_id = data['id']\n action = Action(\n action_id,\n title=action_id,\n i18n_domain='plone',\n permissions=['View'],\n )\n category[action_id] = action\n notify(ObjectCreatedEvent(action))\n", "path": "Products/CMFPlone/controlpanel/browser/actions.py"}], "after_files": [{"content": "from plone.autoform.form import AutoExtensibleForm\nfrom Products.CMFCore.ActionInformation import Action\nfrom Products.CMFCore.interfaces import IAction, IActionCategory\nfrom Products.CMFCore.utils import getToolByName\nfrom Products.CMFPlone import PloneMessageFactory as _\nfrom Products.CMFPlone.interfaces import IActionSchema, INewActionSchema\nfrom Products.Five import BrowserView\nfrom Products.Five.browser.pagetemplatefile import ViewPageTemplateFile\nfrom z3c.form import form\nfrom zope.component import adapts\nfrom zope.event import notify\nfrom zope.interface import implements\nfrom zope.lifecycleevent import ObjectCreatedEvent\n\n\nclass ActionListControlPanel(BrowserView):\n \"\"\"Control panel for the portal actions.\"\"\"\n\n template = ViewPageTemplateFile(\"actions.pt\")\n\n def __init__(self, context, request):\n self.context = context\n self.request = request\n self.portal_actions = getToolByName(self.context, 'portal_actions')\n\n def display(self):\n actions = []\n for category in self.portal_actions.objectValues():\n if category.id == 'controlpanel':\n continue\n if not IActionCategory.providedBy(category):\n continue\n cat_infos = {\n 'id': category.id,\n 'title': category.title or category.id,\n }\n action_list = []\n for action in category.objectValues():\n if IAction.providedBy(action):\n action_list.append({\n 'id': action.id,\n 'title': action.title,\n 'url': action.absolute_url(),\n 'visible': action.visible,\n })\n cat_infos['actions'] = action_list\n actions.append(cat_infos)\n\n self.actions = actions\n return self.template()\n\n def __call__(self):\n if self.request.get('delete'):\n action_id = self.request['actionid']\n category = self.portal_actions[self.request['category']]\n category.manage_delObjects([action_id])\n self.request.RESPONSE.redirect('@@actions-controlpanel')\n if self.request.get('hide'):\n action_id = self.request['actionid']\n category = self.portal_actions[self.request['category']]\n category[action_id].visible = False\n self.request.RESPONSE.redirect('@@actions-controlpanel')\n if self.request.get('show'):\n action_id = self.request['actionid']\n category = self.portal_actions[self.request['category']]\n category[action_id].visible = True\n self.request.RESPONSE.redirect('@@actions-controlpanel')\n return self.display()\n\n\nclass ActionControlPanelAdapter(object):\n \"\"\"Adapter for action form.\"\"\"\n\n adapts(IAction)\n implements(IActionSchema)\n\n def __init__(self, context):\n self.context = context\n self.current_category = self.context.getParentNode()\n\n def get_category(self):\n return self.current_category.id\n\n def set_category(self, value):\n portal_actions = getToolByName(self.context, 'portal_actions')\n new_category = portal_actions.get(value)\n cookie = self.current_category.manage_cutObjects(ids=[self.context.id])\n new_category.manage_pasteObjects(cookie)\n\n category = property(get_category, set_category)\n\n def get_title(self):\n return self.context.title\n\n def set_title(self, value):\n self.context._setPropValue('title', value)\n\n title = property(get_title, set_title)\n\n def get_description(self):\n return self.context.description\n\n def set_description(self, value):\n self.context._setPropValue('description', value)\n\n description = property(get_description, set_description)\n\n def get_i18n_domain(self):\n return self.context.i18n_domain\n\n def set_i18n_domain(self, value):\n self.context._setPropValue('i18n_domain', value)\n\n i18n_domain = property(get_i18n_domain, set_i18n_domain)\n\n def get_url_expr(self):\n return self.context.url_expr\n\n def set_url_expr(self, value):\n self.context._setPropValue('url_expr', value)\n\n url_expr = property(get_url_expr, set_url_expr)\n\n def get_available_expr(self):\n return self.context.available_expr\n\n def set_available_expr(self, value):\n self.context._setPropValue('available_expr', value)\n\n available_expr = property(get_available_expr, set_available_expr)\n\n def get_permissions(self):\n return self.context.permissions\n\n def set_permissions(self, value):\n self.context._setPropValue('permissions', value)\n\n permissions = property(get_permissions, set_permissions)\n\n def get_visible(self):\n return self.context.visible\n\n def set_visible(self, value):\n self.context._setPropValue('visible', value)\n\n visible = property(get_visible, set_visible)\n\n def get_position(self):\n position = self.current_category.objectIds().index(self.context.id)\n return position + 1\n\n def set_position(self, value):\n current_position = self.current_category.objectIds().index(\n self.context.id)\n all_actions = list(self.current_category._objects)\n current_action = all_actions.pop(current_position)\n new_position = value - 1\n all_actions = all_actions[0:new_position] + [current_action] + \\\n all_actions[new_position:]\n self.current_category._objects = tuple(all_actions)\n\n position = property(get_position, set_position)\n\n\nclass ActionControlPanel(AutoExtensibleForm, form.EditForm):\n \"\"\"A form to edit a portal action.\"\"\"\n\n schema = IActionSchema\n ignoreContext = False\n label = _(u'Action Settings')\n\n\nclass NewActionControlPanel(AutoExtensibleForm, form.AddForm):\n \"\"\"A form to add a new portal action.\"\"\"\n\n schema = INewActionSchema\n ignoreContext = True\n label = _(u'New action')\n\n def createAndAdd(self, data):\n portal_actions = getToolByName(self.context, 'portal_actions')\n category = portal_actions.get(data['category'])\n action_id = data['id']\n action = Action(\n action_id,\n title=action_id,\n i18n_domain='plone',\n permissions=['View'],\n )\n category[action_id] = action\n notify(ObjectCreatedEvent(action))\n", "path": "Products/CMFPlone/controlpanel/browser/actions.py"}]} | 1,965 | 338 |
gh_patches_debug_47888 | rasdani/github-patches | git_diff | keras-team__keras-7955 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Wrong result for cosine proximity: keras 2.0.8
# Conclusion: Observation of keras cosine proximity stuck as -1/3 #
As noted by numerous post, Keras seriously currently has an issue with cosine proximity:
https://github.com/fchollet/keras/issues/3031
https://github.com/fchollet/keras/issues/5046
Here is the code in jupyter notebook for simple test:
```
import keras
from keras.layers import Input, Dense
from keras.models import Model
import numpy as np
# --> print keras version
print keras.__version__
# --> compute average cosine between all angles samples
def computeMeanConsineAngle(x,y):
cosMean = 0
numSample = x.shape[0]
for i in xrange(numSample):
cosMean += np.dot(x[i,:],y[i,:])/np.sqrt(np.dot(x[i,:],x[i,:])*np.dot(y[i,:],y[i,:]))
return cosMean/float(numSample)
X = np.random.random((1000,3))
Y = X
inputs = Input(shape=(3,))
preds = Dense(3,activation='linear')(inputs)
model = Model(inputs=inputs,outputs=preds)
sgd=keras.optimizers.Adam(lr=1e-2)
model.compile(optimizer=sgd ,loss='mse',metrics=['cosine_proximity'])
model.fit(X,Y, batch_size=1000, epochs=500, shuffle=False)
pred = model.predict(X)
from sklearn.metrics import mean_squared_error
mse = mean_squared_error(X, pred)
%pylab
%matplotlib inline
plt.scatter(pred,Y)
print 'mse = ', mse
print computeMeanConsineAngle(pred, Y)
testX = np.array([[1,0]])
testY = np.array([[1,0]])
- computeMeanConsineAngle(testX,testY)
```
The printed result is
```
Epoch 500/500
1000/1000 [==============================] - 0s - loss: 7.1132e-04
- cosine_proximity: -0.3329
Using matplotlib backend: TkAgg
Populating the interactive namespace from numpy and matplotlib
mse = 0.000703760391565
0.998615947541
```
**So the true cosine proximity is actually 0.9986, but keras shows near -1/3. Of course keras would use the negative of cosine proximity for minimization purpose, but it should be -0.9986.., in any case, don't trust the outcome of metric in keras cosine proximity**
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `keras/losses.py`
Content:
```
1 from __future__ import absolute_import
2 import six
3 from . import backend as K
4 from .utils.generic_utils import deserialize_keras_object
5
6
7 # noinspection SpellCheckingInspection
8 def mean_squared_error(y_true, y_pred):
9 return K.mean(K.square(y_pred - y_true), axis=-1)
10
11
12 def mean_absolute_error(y_true, y_pred):
13 return K.mean(K.abs(y_pred - y_true), axis=-1)
14
15
16 def mean_absolute_percentage_error(y_true, y_pred):
17 diff = K.abs((y_true - y_pred) / K.clip(K.abs(y_true),
18 K.epsilon(),
19 None))
20 return 100. * K.mean(diff, axis=-1)
21
22
23 def mean_squared_logarithmic_error(y_true, y_pred):
24 first_log = K.log(K.clip(y_pred, K.epsilon(), None) + 1.)
25 second_log = K.log(K.clip(y_true, K.epsilon(), None) + 1.)
26 return K.mean(K.square(first_log - second_log), axis=-1)
27
28
29 def squared_hinge(y_true, y_pred):
30 return K.mean(K.square(K.maximum(1. - y_true * y_pred, 0.)), axis=-1)
31
32
33 def hinge(y_true, y_pred):
34 return K.mean(K.maximum(1. - y_true * y_pred, 0.), axis=-1)
35
36
37 def categorical_hinge(y_true, y_pred):
38 pos = K.sum(y_true * y_pred, axis=-1)
39 neg = K.max((1. - y_true) * y_pred, axis=-1)
40 return K.maximum(0., neg - pos + 1.)
41
42
43 def logcosh(y_true, y_pred):
44 def cosh(x):
45 return (K.exp(x) + K.exp(-x)) / 2
46 return K.mean(K.log(cosh(y_pred - y_true)), axis=-1)
47
48
49 def categorical_crossentropy(y_true, y_pred):
50 return K.categorical_crossentropy(y_true, y_pred)
51
52
53 def sparse_categorical_crossentropy(y_true, y_pred):
54 return K.sparse_categorical_crossentropy(y_true, y_pred)
55
56
57 def binary_crossentropy(y_true, y_pred):
58 return K.mean(K.binary_crossentropy(y_true, y_pred), axis=-1)
59
60
61 def kullback_leibler_divergence(y_true, y_pred):
62 y_true = K.clip(y_true, K.epsilon(), 1)
63 y_pred = K.clip(y_pred, K.epsilon(), 1)
64 return K.sum(y_true * K.log(y_true / y_pred), axis=-1)
65
66
67 def poisson(y_true, y_pred):
68 return K.mean(y_pred - y_true * K.log(y_pred + K.epsilon()), axis=-1)
69
70
71 def cosine_proximity(y_true, y_pred):
72 y_true = K.l2_normalize(y_true, axis=-1)
73 y_pred = K.l2_normalize(y_pred, axis=-1)
74 return -K.mean(y_true * y_pred, axis=-1)
75
76
77 # Aliases.
78
79 mse = MSE = mean_squared_error
80 mae = MAE = mean_absolute_error
81 mape = MAPE = mean_absolute_percentage_error
82 msle = MSLE = mean_squared_logarithmic_error
83 kld = KLD = kullback_leibler_divergence
84 cosine = cosine_proximity
85
86
87 def serialize(loss):
88 return loss.__name__
89
90
91 def deserialize(name, custom_objects=None):
92 return deserialize_keras_object(name,
93 module_objects=globals(),
94 custom_objects=custom_objects,
95 printable_module_name='loss function')
96
97
98 def get(identifier):
99 if identifier is None:
100 return None
101 if isinstance(identifier, six.string_types):
102 identifier = str(identifier)
103 return deserialize(identifier)
104 elif callable(identifier):
105 return identifier
106 else:
107 raise ValueError('Could not interpret '
108 'loss function identifier:', identifier)
109
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/keras/losses.py b/keras/losses.py
--- a/keras/losses.py
+++ b/keras/losses.py
@@ -71,7 +71,7 @@
def cosine_proximity(y_true, y_pred):
y_true = K.l2_normalize(y_true, axis=-1)
y_pred = K.l2_normalize(y_pred, axis=-1)
- return -K.mean(y_true * y_pred, axis=-1)
+ return -K.sum(y_true * y_pred, axis=-1)
# Aliases.
| {"golden_diff": "diff --git a/keras/losses.py b/keras/losses.py\n--- a/keras/losses.py\n+++ b/keras/losses.py\n@@ -71,7 +71,7 @@\n def cosine_proximity(y_true, y_pred):\n y_true = K.l2_normalize(y_true, axis=-1)\n y_pred = K.l2_normalize(y_pred, axis=-1)\n- return -K.mean(y_true * y_pred, axis=-1)\n+ return -K.sum(y_true * y_pred, axis=-1)\n \n \n # Aliases.\n", "issue": "Wrong result for cosine proximity: keras 2.0.8\n# Conclusion: Observation of keras cosine proximity stuck as -1/3 #\r\nAs noted by numerous post, Keras seriously currently has an issue with cosine proximity:\r\n\r\nhttps://github.com/fchollet/keras/issues/3031\r\nhttps://github.com/fchollet/keras/issues/5046\r\n\r\nHere is the code in jupyter notebook for simple test:\r\n\r\n\r\n```\r\nimport keras\r\nfrom keras.layers import Input, Dense\r\nfrom keras.models import Model\r\nimport numpy as np\r\n\r\n# --> print keras version\r\nprint keras.__version__\r\n\r\n# --> compute average cosine between all angles samples\r\ndef computeMeanConsineAngle(x,y):\r\n cosMean = 0\r\n numSample = x.shape[0]\r\n for i in xrange(numSample):\r\n cosMean += np.dot(x[i,:],y[i,:])/np.sqrt(np.dot(x[i,:],x[i,:])*np.dot(y[i,:],y[i,:]))\r\n \r\n return cosMean/float(numSample)\r\n\r\nX = np.random.random((1000,3))\r\nY = X\r\n\r\ninputs = Input(shape=(3,))\r\npreds = Dense(3,activation='linear')(inputs)\r\nmodel = Model(inputs=inputs,outputs=preds)\r\n\r\nsgd=keras.optimizers.Adam(lr=1e-2)\r\nmodel.compile(optimizer=sgd ,loss='mse',metrics=['cosine_proximity'])\r\nmodel.fit(X,Y, batch_size=1000, epochs=500, shuffle=False)\r\n\r\npred = model.predict(X)\r\n\r\nfrom sklearn.metrics import mean_squared_error\r\nmse = mean_squared_error(X, pred)\r\n\r\n\r\n%pylab\r\n%matplotlib inline\r\nplt.scatter(pred,Y)\r\n\r\nprint 'mse = ', mse\r\nprint computeMeanConsineAngle(pred, Y)\r\n\r\ntestX = np.array([[1,0]])\r\ntestY = np.array([[1,0]])\r\n- computeMeanConsineAngle(testX,testY)\r\n```\r\n\r\nThe printed result is \r\n```\r\nEpoch 500/500\r\n1000/1000 [==============================] - 0s - loss: 7.1132e-04 \r\n- cosine_proximity: -0.3329\r\nUsing matplotlib backend: TkAgg\r\nPopulating the interactive namespace from numpy and matplotlib\r\nmse = 0.000703760391565\r\n0.998615947541\r\n```\r\n\r\n**So the true cosine proximity is actually 0.9986, but keras shows near -1/3. Of course keras would use the negative of cosine proximity for minimization purpose, but it should be -0.9986.., in any case, don't trust the outcome of metric in keras cosine proximity**\r\n\n", "before_files": [{"content": "from __future__ import absolute_import\nimport six\nfrom . import backend as K\nfrom .utils.generic_utils import deserialize_keras_object\n\n\n# noinspection SpellCheckingInspection\ndef mean_squared_error(y_true, y_pred):\n return K.mean(K.square(y_pred - y_true), axis=-1)\n\n\ndef mean_absolute_error(y_true, y_pred):\n return K.mean(K.abs(y_pred - y_true), axis=-1)\n\n\ndef mean_absolute_percentage_error(y_true, y_pred):\n diff = K.abs((y_true - y_pred) / K.clip(K.abs(y_true),\n K.epsilon(),\n None))\n return 100. * K.mean(diff, axis=-1)\n\n\ndef mean_squared_logarithmic_error(y_true, y_pred):\n first_log = K.log(K.clip(y_pred, K.epsilon(), None) + 1.)\n second_log = K.log(K.clip(y_true, K.epsilon(), None) + 1.)\n return K.mean(K.square(first_log - second_log), axis=-1)\n\n\ndef squared_hinge(y_true, y_pred):\n return K.mean(K.square(K.maximum(1. - y_true * y_pred, 0.)), axis=-1)\n\n\ndef hinge(y_true, y_pred):\n return K.mean(K.maximum(1. - y_true * y_pred, 0.), axis=-1)\n\n\ndef categorical_hinge(y_true, y_pred):\n pos = K.sum(y_true * y_pred, axis=-1)\n neg = K.max((1. - y_true) * y_pred, axis=-1)\n return K.maximum(0., neg - pos + 1.)\n\n\ndef logcosh(y_true, y_pred):\n def cosh(x):\n return (K.exp(x) + K.exp(-x)) / 2\n return K.mean(K.log(cosh(y_pred - y_true)), axis=-1)\n\n\ndef categorical_crossentropy(y_true, y_pred):\n return K.categorical_crossentropy(y_true, y_pred)\n\n\ndef sparse_categorical_crossentropy(y_true, y_pred):\n return K.sparse_categorical_crossentropy(y_true, y_pred)\n\n\ndef binary_crossentropy(y_true, y_pred):\n return K.mean(K.binary_crossentropy(y_true, y_pred), axis=-1)\n\n\ndef kullback_leibler_divergence(y_true, y_pred):\n y_true = K.clip(y_true, K.epsilon(), 1)\n y_pred = K.clip(y_pred, K.epsilon(), 1)\n return K.sum(y_true * K.log(y_true / y_pred), axis=-1)\n\n\ndef poisson(y_true, y_pred):\n return K.mean(y_pred - y_true * K.log(y_pred + K.epsilon()), axis=-1)\n\n\ndef cosine_proximity(y_true, y_pred):\n y_true = K.l2_normalize(y_true, axis=-1)\n y_pred = K.l2_normalize(y_pred, axis=-1)\n return -K.mean(y_true * y_pred, axis=-1)\n\n\n# Aliases.\n\nmse = MSE = mean_squared_error\nmae = MAE = mean_absolute_error\nmape = MAPE = mean_absolute_percentage_error\nmsle = MSLE = mean_squared_logarithmic_error\nkld = KLD = kullback_leibler_divergence\ncosine = cosine_proximity\n\n\ndef serialize(loss):\n return loss.__name__\n\n\ndef deserialize(name, custom_objects=None):\n return deserialize_keras_object(name,\n module_objects=globals(),\n custom_objects=custom_objects,\n printable_module_name='loss function')\n\n\ndef get(identifier):\n if identifier is None:\n return None\n if isinstance(identifier, six.string_types):\n identifier = str(identifier)\n return deserialize(identifier)\n elif callable(identifier):\n return identifier\n else:\n raise ValueError('Could not interpret '\n 'loss function identifier:', identifier)\n", "path": "keras/losses.py"}], "after_files": [{"content": "from __future__ import absolute_import\nimport six\nfrom . import backend as K\nfrom .utils.generic_utils import deserialize_keras_object\n\n\n# noinspection SpellCheckingInspection\ndef mean_squared_error(y_true, y_pred):\n return K.mean(K.square(y_pred - y_true), axis=-1)\n\n\ndef mean_absolute_error(y_true, y_pred):\n return K.mean(K.abs(y_pred - y_true), axis=-1)\n\n\ndef mean_absolute_percentage_error(y_true, y_pred):\n diff = K.abs((y_true - y_pred) / K.clip(K.abs(y_true),\n K.epsilon(),\n None))\n return 100. * K.mean(diff, axis=-1)\n\n\ndef mean_squared_logarithmic_error(y_true, y_pred):\n first_log = K.log(K.clip(y_pred, K.epsilon(), None) + 1.)\n second_log = K.log(K.clip(y_true, K.epsilon(), None) + 1.)\n return K.mean(K.square(first_log - second_log), axis=-1)\n\n\ndef squared_hinge(y_true, y_pred):\n return K.mean(K.square(K.maximum(1. - y_true * y_pred, 0.)), axis=-1)\n\n\ndef hinge(y_true, y_pred):\n return K.mean(K.maximum(1. - y_true * y_pred, 0.), axis=-1)\n\n\ndef categorical_hinge(y_true, y_pred):\n pos = K.sum(y_true * y_pred, axis=-1)\n neg = K.max((1. - y_true) * y_pred, axis=-1)\n return K.maximum(0., neg - pos + 1.)\n\n\ndef logcosh(y_true, y_pred):\n def cosh(x):\n return (K.exp(x) + K.exp(-x)) / 2\n return K.mean(K.log(cosh(y_pred - y_true)), axis=-1)\n\n\ndef categorical_crossentropy(y_true, y_pred):\n return K.categorical_crossentropy(y_true, y_pred)\n\n\ndef sparse_categorical_crossentropy(y_true, y_pred):\n return K.sparse_categorical_crossentropy(y_true, y_pred)\n\n\ndef binary_crossentropy(y_true, y_pred):\n return K.mean(K.binary_crossentropy(y_true, y_pred), axis=-1)\n\n\ndef kullback_leibler_divergence(y_true, y_pred):\n y_true = K.clip(y_true, K.epsilon(), 1)\n y_pred = K.clip(y_pred, K.epsilon(), 1)\n return K.sum(y_true * K.log(y_true / y_pred), axis=-1)\n\n\ndef poisson(y_true, y_pred):\n return K.mean(y_pred - y_true * K.log(y_pred + K.epsilon()), axis=-1)\n\n\ndef cosine_proximity(y_true, y_pred):\n y_true = K.l2_normalize(y_true, axis=-1)\n y_pred = K.l2_normalize(y_pred, axis=-1)\n return -K.sum(y_true * y_pred, axis=-1)\n\n\n# Aliases.\n\nmse = MSE = mean_squared_error\nmae = MAE = mean_absolute_error\nmape = MAPE = mean_absolute_percentage_error\nmsle = MSLE = mean_squared_logarithmic_error\nkld = KLD = kullback_leibler_divergence\ncosine = cosine_proximity\n\n\ndef serialize(loss):\n return loss.__name__\n\n\ndef deserialize(name, custom_objects=None):\n return deserialize_keras_object(name,\n module_objects=globals(),\n custom_objects=custom_objects,\n printable_module_name='loss function')\n\n\ndef get(identifier):\n if identifier is None:\n return None\n if isinstance(identifier, six.string_types):\n identifier = str(identifier)\n return deserialize(identifier)\n elif callable(identifier):\n return identifier\n else:\n raise ValueError('Could not interpret '\n 'loss function identifier:', identifier)\n", "path": "keras/losses.py"}]} | 1,902 | 129 |
gh_patches_debug_10772 | rasdani/github-patches | git_diff | freedomofpress__securedrop-3082 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Delete setup.cfg
`setup.cfg` was used to set configuration options for `pytest` in the past, but since `pytest.ini` is now providing that configuration, it seems like `setup.cfg` should be deleted.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `testinfra/conftest.py`
Content:
```
1 """
2 Configuration for TestInfra test suite for SecureDrop.
3 Handles importing host-specific test vars, so test functions
4 can be reused across multiple hosts, with varied targets.
5
6 Vars should be placed in `testinfra/vars/<hostname>.yml`.
7 """
8
9 import os
10 import yaml
11
12
13 target_host = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
14 assert target_host != ""
15
16
17 def securedrop_import_testinfra_vars(hostname, with_header=False):
18 """
19 Import vars from a YAML file to populate tests with host-specific
20 values used in checks. For instance, the SecureDrop docroot will
21 be under /vagrant in development, but /var/www/securedrop in staging.
22
23 Vars must be stored in `testinfra/vars/<hostname>.yml`.
24 """
25 filepath = os.path.join(os.path.dirname(__file__), "vars", hostname+".yml")
26 with open(filepath, 'r') as f:
27 hostvars = yaml.safe_load(f)
28 # The directory Travis runs builds in varies by PR, so we cannot hardcode
29 # it in the YAML testvars. Read it from env var and concatenate.
30 if hostname.lower() == 'travis':
31 build_env = os.environ["TRAVIS_BUILD_DIR"]
32 hostvars['securedrop_code'] = build_env+"/securedrop"
33
34 if with_header:
35 hostvars = dict(securedrop_test_vars=hostvars)
36 return hostvars
37
38
39 def pytest_namespace():
40 return securedrop_import_testinfra_vars(target_host, with_header=True)
41
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/testinfra/conftest.py b/testinfra/conftest.py
--- a/testinfra/conftest.py
+++ b/testinfra/conftest.py
@@ -25,11 +25,6 @@
filepath = os.path.join(os.path.dirname(__file__), "vars", hostname+".yml")
with open(filepath, 'r') as f:
hostvars = yaml.safe_load(f)
- # The directory Travis runs builds in varies by PR, so we cannot hardcode
- # it in the YAML testvars. Read it from env var and concatenate.
- if hostname.lower() == 'travis':
- build_env = os.environ["TRAVIS_BUILD_DIR"]
- hostvars['securedrop_code'] = build_env+"/securedrop"
if with_header:
hostvars = dict(securedrop_test_vars=hostvars)
| {"golden_diff": "diff --git a/testinfra/conftest.py b/testinfra/conftest.py\n--- a/testinfra/conftest.py\n+++ b/testinfra/conftest.py\n@@ -25,11 +25,6 @@\n filepath = os.path.join(os.path.dirname(__file__), \"vars\", hostname+\".yml\")\n with open(filepath, 'r') as f:\n hostvars = yaml.safe_load(f)\n- # The directory Travis runs builds in varies by PR, so we cannot hardcode\n- # it in the YAML testvars. Read it from env var and concatenate.\n- if hostname.lower() == 'travis':\n- build_env = os.environ[\"TRAVIS_BUILD_DIR\"]\n- hostvars['securedrop_code'] = build_env+\"/securedrop\"\n \n if with_header:\n hostvars = dict(securedrop_test_vars=hostvars)\n", "issue": "Delete setup.cfg\n`setup.cfg` was used to set configuration options for `pytest` in the past, but since `pytest.ini` is now providing that configuration, it seems like `setup.cfg` should be deleted.\n", "before_files": [{"content": "\"\"\"\nConfiguration for TestInfra test suite for SecureDrop.\nHandles importing host-specific test vars, so test functions\ncan be reused across multiple hosts, with varied targets.\n\nVars should be placed in `testinfra/vars/<hostname>.yml`.\n\"\"\"\n\nimport os\nimport yaml\n\n\ntarget_host = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']\nassert target_host != \"\"\n\n\ndef securedrop_import_testinfra_vars(hostname, with_header=False):\n \"\"\"\n Import vars from a YAML file to populate tests with host-specific\n values used in checks. For instance, the SecureDrop docroot will\n be under /vagrant in development, but /var/www/securedrop in staging.\n\n Vars must be stored in `testinfra/vars/<hostname>.yml`.\n \"\"\"\n filepath = os.path.join(os.path.dirname(__file__), \"vars\", hostname+\".yml\")\n with open(filepath, 'r') as f:\n hostvars = yaml.safe_load(f)\n # The directory Travis runs builds in varies by PR, so we cannot hardcode\n # it in the YAML testvars. Read it from env var and concatenate.\n if hostname.lower() == 'travis':\n build_env = os.environ[\"TRAVIS_BUILD_DIR\"]\n hostvars['securedrop_code'] = build_env+\"/securedrop\"\n\n if with_header:\n hostvars = dict(securedrop_test_vars=hostvars)\n return hostvars\n\n\ndef pytest_namespace():\n return securedrop_import_testinfra_vars(target_host, with_header=True)\n", "path": "testinfra/conftest.py"}], "after_files": [{"content": "\"\"\"\nConfiguration for TestInfra test suite for SecureDrop.\nHandles importing host-specific test vars, so test functions\ncan be reused across multiple hosts, with varied targets.\n\nVars should be placed in `testinfra/vars/<hostname>.yml`.\n\"\"\"\n\nimport os\nimport yaml\n\n\ntarget_host = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']\nassert target_host != \"\"\n\n\ndef securedrop_import_testinfra_vars(hostname, with_header=False):\n \"\"\"\n Import vars from a YAML file to populate tests with host-specific\n values used in checks. For instance, the SecureDrop docroot will\n be under /vagrant in development, but /var/www/securedrop in staging.\n\n Vars must be stored in `testinfra/vars/<hostname>.yml`.\n \"\"\"\n filepath = os.path.join(os.path.dirname(__file__), \"vars\", hostname+\".yml\")\n with open(filepath, 'r') as f:\n hostvars = yaml.safe_load(f)\n\n if with_header:\n hostvars = dict(securedrop_test_vars=hostvars)\n return hostvars\n\n\ndef pytest_namespace():\n return securedrop_import_testinfra_vars(target_host, with_header=True)\n", "path": "testinfra/conftest.py"}]} | 705 | 186 |
gh_patches_debug_67394 | rasdani/github-patches | git_diff | pymodbus-dev__pymodbus-945 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
AsyncioModbusSerialClient TypeError Coroutine
### Versions
* Python: 3.9
* OS: Ubuntu 20.04
* Pymodbus: `3.0.0dev4`
* Modbus Hardware (if used):
### Pymodbus Specific
* Server: None
* Client: rtu - async
### Description
When I try `3.0.0dev4` and the latest commit as of today. I am getting a type error that variable `coro` is not a coroutine in file `serial.py`. I am trying to create `AsyncModbusSerialClient(schedulers.ASYNC_IO, port=connPort, baudrate=connSpeed, method=connMethod, timeout=commTimeout)` in an existing running loop.
I don't think the coroutine was created correctly. What do you think?
Old:
`future = asyncio.run_coroutine_threadsafe(coro, loop=loop)`
Proposed:
` future = asyncio.run_coroutine_threadsafe(coro(), loop=loop)`
"""Create asyncio based asynchronous serial clients.
:param port: Serial port
:param framer: Modbus Framer
:param kwargs: Serial port options
:return: asyncio event loop and serial client
"""
try:
loop = kwargs.pop("loop", None) or asyncio.get_running_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
proto_cls = kwargs.get("proto_cls") or ModbusClientProtocol
client = AsyncioModbusSerialClient(port, proto_cls, framer, loop, **kwargs)
coro = client.connect
if not loop.is_running():
loop.run_until_complete(coro())
else: # loop is not asyncio.get_event_loop():
future = asyncio.run_coroutine_threadsafe(coro, loop=loop) <- `Fails here`
future.result()
return loop, client
```
``` py
def async_io_factory(port=None, framer=None, **kwargs):
"""Create asyncio based asynchronous serial clients.
:param port: Serial port
:param framer: Modbus Framer
:param kwargs: Serial port options
:return: asyncio event loop and serial client
"""
try:
loop = kwargs.pop("loop", None) or asyncio.get_running_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
proto_cls = kwargs.get("proto_cls") or ModbusClientProtocol
client = AsyncioModbusSerialClient(port, proto_cls, framer, loop, **kwargs)
coro = client.connect
if not loop.is_running():
loop.run_until_complete(coro())
else: # loop is not asyncio.get_event_loop():
future = asyncio.run_coroutine_threadsafe(coro, loop=loop) <- `Fails here`
future.result()
return loop, client
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pymodbus/client/asynchronous/factory/serial.py`
Content:
```
1 """Factory to create asynchronous serial clients based on twisted/asyncio."""
2 # pylint: disable=missing-type-doc
3 import logging
4 import asyncio
5
6 from pymodbus.client.asynchronous import schedulers
7 from pymodbus.client.asynchronous.thread import EventLoopThread
8 from pymodbus.client.asynchronous.async_io import (
9 ModbusClientProtocol,
10 AsyncioModbusSerialClient,
11 )
12 from pymodbus.factory import ClientDecoder
13
14
15 _logger = logging.getLogger(__name__)
16
17
18 def reactor_factory(port, framer, **kwargs):
19 """Create twisted serial asynchronous client.
20
21 :param port: Serial port
22 :param framer: Modbus Framer
23 :param kwargs:
24 :return: event_loop_thread and twisted serial client
25 """
26 from twisted.internet import reactor # pylint: disable=import-outside-toplevel
27 from twisted.internet.serialport import ( # pylint: disable=import-outside-toplevel
28 SerialPort,
29 )
30 from twisted.internet.protocol import ( # pylint: disable=import-outside-toplevel
31 ClientFactory,
32 )
33
34 class SerialClientFactory(ClientFactory):
35 """Define serial client factory."""
36
37 def __init__(self, framer, proto_cls):
38 """Remember things necessary for building a protocols."""
39 self.proto_cls = proto_cls
40 self.framer = framer
41
42 def buildProtocol(self): # pylint: disable=arguments-differ
43 """Create a protocol and start the reading cycle-"""
44 proto = self.proto_cls(self.framer)
45 proto.factory = self
46 return proto
47
48 class SerialModbusClient(SerialPort): # pylint: disable=abstract-method
49 """Define serial client."""
50
51 def __init__(self, framer, *args, **kwargs):
52 """Initialize the client and start listening on the serial port.
53
54 :param factory: The factory to build clients with
55 """
56 self.decoder = ClientDecoder()
57 proto_cls = kwargs.pop("proto_cls", None)
58 proto = SerialClientFactory(framer, proto_cls).buildProtocol()
59 SerialPort.__init__(self, proto, *args, **kwargs)
60
61 proto = EventLoopThread(
62 "reactor",
63 reactor.run, # pylint: disable=no-member
64 reactor.stop, # pylint: disable=no-member
65 installSignalHandlers=0,
66 )
67 ser_client = SerialModbusClient(framer, port, reactor, **kwargs)
68
69 return proto, ser_client
70
71
72 def async_io_factory(port=None, framer=None, **kwargs):
73 """Create asyncio based asynchronous serial clients.
74
75 :param port: Serial port
76 :param framer: Modbus Framer
77 :param kwargs: Serial port options
78 :return: asyncio event loop and serial client
79 """
80 try:
81 loop = kwargs.pop("loop", None) or asyncio.get_running_loop()
82 except RuntimeError:
83 loop = asyncio.new_event_loop()
84
85 proto_cls = kwargs.get("proto_cls") or ModbusClientProtocol
86
87 client = AsyncioModbusSerialClient(port, proto_cls, framer, loop, **kwargs)
88 coro = client.connect
89 if not loop.is_running():
90 loop.run_until_complete(coro())
91 else: # loop is not asyncio.get_event_loop():
92 future = asyncio.run_coroutine_threadsafe(coro, loop=loop)
93 future.result()
94
95 return loop, client
96
97
98 def get_factory(scheduler):
99 """Get protocol factory based on the backend scheduler being used.
100
101 :param scheduler: REACTOR/ASYNC_IO
102 :return:
103 :raises Exception: Failure
104 """
105 if scheduler == schedulers.REACTOR:
106 return reactor_factory
107 if scheduler == schedulers.ASYNC_IO:
108 return async_io_factory
109
110 txt = f"Allowed Schedulers: {schedulers.REACTOR}, {schedulers.ASYNC_IO}"
111 _logger.warning(txt)
112 txt = f'Invalid Scheduler "{scheduler}"'
113 raise Exception(txt)
114
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pymodbus/client/asynchronous/factory/serial.py b/pymodbus/client/asynchronous/factory/serial.py
--- a/pymodbus/client/asynchronous/factory/serial.py
+++ b/pymodbus/client/asynchronous/factory/serial.py
@@ -89,7 +89,7 @@
if not loop.is_running():
loop.run_until_complete(coro())
else: # loop is not asyncio.get_event_loop():
- future = asyncio.run_coroutine_threadsafe(coro, loop=loop)
+ future = asyncio.run_coroutine_threadsafe(coro(), loop=loop)
future.result()
return loop, client
| {"golden_diff": "diff --git a/pymodbus/client/asynchronous/factory/serial.py b/pymodbus/client/asynchronous/factory/serial.py\n--- a/pymodbus/client/asynchronous/factory/serial.py\n+++ b/pymodbus/client/asynchronous/factory/serial.py\n@@ -89,7 +89,7 @@\n if not loop.is_running():\n loop.run_until_complete(coro())\n else: # loop is not asyncio.get_event_loop():\n- future = asyncio.run_coroutine_threadsafe(coro, loop=loop)\n+ future = asyncio.run_coroutine_threadsafe(coro(), loop=loop)\n future.result()\n \n return loop, client\n", "issue": "AsyncioModbusSerialClient TypeError Coroutine\n### Versions\r\n\r\n* Python: 3.9\r\n* OS: Ubuntu 20.04\r\n* Pymodbus: `3.0.0dev4`\r\n* Modbus Hardware (if used): \r\n\r\n### Pymodbus Specific\r\n* Server: None\r\n* Client: rtu - async\r\n\r\n### Description\r\n\r\nWhen I try `3.0.0dev4` and the latest commit as of today. I am getting a type error that variable `coro` is not a coroutine in file `serial.py`. I am trying to create `AsyncModbusSerialClient(schedulers.ASYNC_IO, port=connPort, baudrate=connSpeed, method=connMethod, timeout=commTimeout)` in an existing running loop.\r\n\r\nI don't think the coroutine was created correctly. What do you think?\r\n\r\nOld:\r\n`future = asyncio.run_coroutine_threadsafe(coro, loop=loop)` \r\n\r\nProposed:\r\n` future = asyncio.run_coroutine_threadsafe(coro(), loop=loop)`\r\n \"\"\"Create asyncio based asynchronous serial clients.\r\n :param port: Serial port\r\n :param framer: Modbus Framer\r\n :param kwargs: Serial port options\r\n :return: asyncio event loop and serial client\r\n \"\"\"\r\n try:\r\n loop = kwargs.pop(\"loop\", None) or asyncio.get_running_loop()\r\n except RuntimeError:\r\n loop = asyncio.new_event_loop()\r\n\r\n proto_cls = kwargs.get(\"proto_cls\") or ModbusClientProtocol\r\n\r\n client = AsyncioModbusSerialClient(port, proto_cls, framer, loop, **kwargs)\r\n coro = client.connect\r\n if not loop.is_running():\r\n loop.run_until_complete(coro())\r\n else: # loop is not asyncio.get_event_loop():\r\n future = asyncio.run_coroutine_threadsafe(coro, loop=loop) <- `Fails here`\r\n future.result()\r\n\r\n return loop, client\r\n```\r\n``` py\r\ndef async_io_factory(port=None, framer=None, **kwargs):\r\n \"\"\"Create asyncio based asynchronous serial clients.\r\n :param port: Serial port\r\n :param framer: Modbus Framer\r\n :param kwargs: Serial port options\r\n :return: asyncio event loop and serial client\r\n \"\"\"\r\n try:\r\n loop = kwargs.pop(\"loop\", None) or asyncio.get_running_loop()\r\n except RuntimeError:\r\n loop = asyncio.new_event_loop()\r\n\r\n proto_cls = kwargs.get(\"proto_cls\") or ModbusClientProtocol\r\n\r\n client = AsyncioModbusSerialClient(port, proto_cls, framer, loop, **kwargs)\r\n coro = client.connect\r\n if not loop.is_running():\r\n loop.run_until_complete(coro())\r\n else: # loop is not asyncio.get_event_loop():\r\n future = asyncio.run_coroutine_threadsafe(coro, loop=loop) <- `Fails here`\r\n future.result()\r\n\r\n return loop, client\r\n```\r\n\n", "before_files": [{"content": "\"\"\"Factory to create asynchronous serial clients based on twisted/asyncio.\"\"\"\n# pylint: disable=missing-type-doc\nimport logging\nimport asyncio\n\nfrom pymodbus.client.asynchronous import schedulers\nfrom pymodbus.client.asynchronous.thread import EventLoopThread\nfrom pymodbus.client.asynchronous.async_io import (\n ModbusClientProtocol,\n AsyncioModbusSerialClient,\n)\nfrom pymodbus.factory import ClientDecoder\n\n\n_logger = logging.getLogger(__name__)\n\n\ndef reactor_factory(port, framer, **kwargs):\n \"\"\"Create twisted serial asynchronous client.\n\n :param port: Serial port\n :param framer: Modbus Framer\n :param kwargs:\n :return: event_loop_thread and twisted serial client\n \"\"\"\n from twisted.internet import reactor # pylint: disable=import-outside-toplevel\n from twisted.internet.serialport import ( # pylint: disable=import-outside-toplevel\n SerialPort,\n )\n from twisted.internet.protocol import ( # pylint: disable=import-outside-toplevel\n ClientFactory,\n )\n\n class SerialClientFactory(ClientFactory):\n \"\"\"Define serial client factory.\"\"\"\n\n def __init__(self, framer, proto_cls):\n \"\"\"Remember things necessary for building a protocols.\"\"\"\n self.proto_cls = proto_cls\n self.framer = framer\n\n def buildProtocol(self): # pylint: disable=arguments-differ\n \"\"\"Create a protocol and start the reading cycle-\"\"\"\n proto = self.proto_cls(self.framer)\n proto.factory = self\n return proto\n\n class SerialModbusClient(SerialPort): # pylint: disable=abstract-method\n \"\"\"Define serial client.\"\"\"\n\n def __init__(self, framer, *args, **kwargs):\n \"\"\"Initialize the client and start listening on the serial port.\n\n :param factory: The factory to build clients with\n \"\"\"\n self.decoder = ClientDecoder()\n proto_cls = kwargs.pop(\"proto_cls\", None)\n proto = SerialClientFactory(framer, proto_cls).buildProtocol()\n SerialPort.__init__(self, proto, *args, **kwargs)\n\n proto = EventLoopThread(\n \"reactor\",\n reactor.run, # pylint: disable=no-member\n reactor.stop, # pylint: disable=no-member\n installSignalHandlers=0,\n )\n ser_client = SerialModbusClient(framer, port, reactor, **kwargs)\n\n return proto, ser_client\n\n\ndef async_io_factory(port=None, framer=None, **kwargs):\n \"\"\"Create asyncio based asynchronous serial clients.\n\n :param port: Serial port\n :param framer: Modbus Framer\n :param kwargs: Serial port options\n :return: asyncio event loop and serial client\n \"\"\"\n try:\n loop = kwargs.pop(\"loop\", None) or asyncio.get_running_loop()\n except RuntimeError:\n loop = asyncio.new_event_loop()\n\n proto_cls = kwargs.get(\"proto_cls\") or ModbusClientProtocol\n\n client = AsyncioModbusSerialClient(port, proto_cls, framer, loop, **kwargs)\n coro = client.connect\n if not loop.is_running():\n loop.run_until_complete(coro())\n else: # loop is not asyncio.get_event_loop():\n future = asyncio.run_coroutine_threadsafe(coro, loop=loop)\n future.result()\n\n return loop, client\n\n\ndef get_factory(scheduler):\n \"\"\"Get protocol factory based on the backend scheduler being used.\n\n :param scheduler: REACTOR/ASYNC_IO\n :return:\n :raises Exception: Failure\n \"\"\"\n if scheduler == schedulers.REACTOR:\n return reactor_factory\n if scheduler == schedulers.ASYNC_IO:\n return async_io_factory\n\n txt = f\"Allowed Schedulers: {schedulers.REACTOR}, {schedulers.ASYNC_IO}\"\n _logger.warning(txt)\n txt = f'Invalid Scheduler \"{scheduler}\"'\n raise Exception(txt)\n", "path": "pymodbus/client/asynchronous/factory/serial.py"}], "after_files": [{"content": "\"\"\"Factory to create asynchronous serial clients based on twisted/asyncio.\"\"\"\n# pylint: disable=missing-type-doc\nimport logging\nimport asyncio\n\nfrom pymodbus.client.asynchronous import schedulers\nfrom pymodbus.client.asynchronous.thread import EventLoopThread\nfrom pymodbus.client.asynchronous.async_io import (\n ModbusClientProtocol,\n AsyncioModbusSerialClient,\n)\nfrom pymodbus.factory import ClientDecoder\n\n\n_logger = logging.getLogger(__name__)\n\n\ndef reactor_factory(port, framer, **kwargs):\n \"\"\"Create twisted serial asynchronous client.\n\n :param port: Serial port\n :param framer: Modbus Framer\n :param kwargs:\n :return: event_loop_thread and twisted serial client\n \"\"\"\n from twisted.internet import reactor # pylint: disable=import-outside-toplevel\n from twisted.internet.serialport import ( # pylint: disable=import-outside-toplevel\n SerialPort,\n )\n from twisted.internet.protocol import ( # pylint: disable=import-outside-toplevel\n ClientFactory,\n )\n\n class SerialClientFactory(ClientFactory):\n \"\"\"Define serial client factory.\"\"\"\n\n def __init__(self, framer, proto_cls):\n \"\"\"Remember things necessary for building a protocols.\"\"\"\n self.proto_cls = proto_cls\n self.framer = framer\n\n def buildProtocol(self): # pylint: disable=arguments-differ\n \"\"\"Create a protocol and start the reading cycle-\"\"\"\n proto = self.proto_cls(self.framer)\n proto.factory = self\n return proto\n\n class SerialModbusClient(SerialPort): # pylint: disable=abstract-method\n \"\"\"Define serial client.\"\"\"\n\n def __init__(self, framer, *args, **kwargs):\n \"\"\"Initialize the client and start listening on the serial port.\n\n :param factory: The factory to build clients with\n \"\"\"\n self.decoder = ClientDecoder()\n proto_cls = kwargs.pop(\"proto_cls\", None)\n proto = SerialClientFactory(framer, proto_cls).buildProtocol()\n SerialPort.__init__(self, proto, *args, **kwargs)\n\n proto = EventLoopThread(\n \"reactor\",\n reactor.run, # pylint: disable=no-member\n reactor.stop, # pylint: disable=no-member\n installSignalHandlers=0,\n )\n ser_client = SerialModbusClient(framer, port, reactor, **kwargs)\n\n return proto, ser_client\n\n\ndef async_io_factory(port=None, framer=None, **kwargs):\n \"\"\"Create asyncio based asynchronous serial clients.\n\n :param port: Serial port\n :param framer: Modbus Framer\n :param kwargs: Serial port options\n :return: asyncio event loop and serial client\n \"\"\"\n try:\n loop = kwargs.pop(\"loop\", None) or asyncio.get_running_loop()\n except RuntimeError:\n loop = asyncio.new_event_loop()\n\n proto_cls = kwargs.get(\"proto_cls\") or ModbusClientProtocol\n\n client = AsyncioModbusSerialClient(port, proto_cls, framer, loop, **kwargs)\n coro = client.connect\n if not loop.is_running():\n loop.run_until_complete(coro())\n else: # loop is not asyncio.get_event_loop():\n future = asyncio.run_coroutine_threadsafe(coro(), loop=loop)\n future.result()\n\n return loop, client\n\n\ndef get_factory(scheduler):\n \"\"\"Get protocol factory based on the backend scheduler being used.\n\n :param scheduler: REACTOR/ASYNC_IO\n :return:\n :raises Exception: Failure\n \"\"\"\n if scheduler == schedulers.REACTOR:\n return reactor_factory\n if scheduler == schedulers.ASYNC_IO:\n return async_io_factory\n\n txt = f\"Allowed Schedulers: {schedulers.REACTOR}, {schedulers.ASYNC_IO}\"\n _logger.warning(txt)\n txt = f'Invalid Scheduler \"{scheduler}\"'\n raise Exception(txt)\n", "path": "pymodbus/client/asynchronous/factory/serial.py"}]} | 1,967 | 145 |
gh_patches_debug_3124 | rasdani/github-patches | git_diff | spack__spack-36099 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
LLNL Cardioid homepage no longer exists
https://github.com/spack/spack/blob/fe5865da0d4ee4480a85ee1257cb310c036b0c88/var/spack/repos/builtin/packages/cardioid/package.py#L12
@rblake-llnl it looks like Cardioid's [home page](https://baasic.llnl.gov/comp-bio/cardioid-code.php) redirects to a 404 these days. You're listed as the maintainer. Has the home of cardioid moved?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `var/spack/repos/builtin/packages/cardioid/package.py`
Content:
```
1 # Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
2 # Spack Project Developers. See the top-level COPYRIGHT file for details.
3 #
4 # SPDX-License-Identifier: (Apache-2.0 OR MIT)
5
6 from spack.package import *
7
8
9 class Cardioid(CMakePackage):
10 """Cardiac simulation suite."""
11
12 homepage = "https://baasic.llnl.gov/comp-bio/cardioid-code.php"
13 git = "https://github.com/LLNL/cardioid.git"
14 maintainers("rblake-llnl")
15
16 version("develop", branch="master")
17 version("elecfem", branch="elec-fem")
18
19 variant("cuda", default=False, description="Build with cuda support")
20 variant("mfem", default=False, description="Build with mfem support")
21
22 depends_on("blas")
23 depends_on("lapack")
24 depends_on("mpi")
25 depends_on("cuda", when="+cuda")
26 depends_on("mfem+mpi+superlu-dist+lapack", when="+mfem")
27 depends_on("hypre+cuda", when="+mfem+cuda")
28 depends_on("[email protected]:", type="build")
29 depends_on("perl", type="build")
30
31 def cmake_args(self):
32 spec = self.spec
33 args = [
34 "-DLAPACK_LIB:PATH=" + ";".join(spec["lapack"].libs.libraries),
35 "-DBLAS_LIB:PATH=" + ";".join(spec["blas"].libs.libraries),
36 "-DENABLE_OPENMP:BOOL=ON",
37 "-DENABLE_MPI:BOOL=ON",
38 "-DENABLE_FIND_MPI:BOOL=OFF",
39 "-DMPI_C_COMPILER:STRING=" + spec["mpi"].mpicc,
40 "-DMPI_CXX_COMPILER:STRING=" + spec["mpi"].mpicxx,
41 "-DCMAKE_C_COMPILER:STRING=" + spec["mpi"].mpicc,
42 "-DCMAKE_CXX_COMPILER:STRING=" + spec["mpi"].mpicxx,
43 ]
44
45 if "+cuda" in self.spec:
46 args.append("-DENABLE_CUDA:BOOL=ON")
47 args.append("-DCUDA_TOOLKIT_ROOT:PATH=" + spec["cuda"].prefix)
48 else:
49 args.append("-DENABLE_CUDA:BOOL=OFF")
50
51 if "+mfem" in self.spec:
52 args.append("-DMFEM_DIR:PATH=" + spec["mfem"].prefix)
53 return args
54
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/var/spack/repos/builtin/packages/cardioid/package.py b/var/spack/repos/builtin/packages/cardioid/package.py
--- a/var/spack/repos/builtin/packages/cardioid/package.py
+++ b/var/spack/repos/builtin/packages/cardioid/package.py
@@ -9,7 +9,7 @@
class Cardioid(CMakePackage):
"""Cardiac simulation suite."""
- homepage = "https://baasic.llnl.gov/comp-bio/cardioid-code.php"
+ homepage = "https://baasic.llnl.gov/comp-bio/cardioid-code"
git = "https://github.com/LLNL/cardioid.git"
maintainers("rblake-llnl")
| {"golden_diff": "diff --git a/var/spack/repos/builtin/packages/cardioid/package.py b/var/spack/repos/builtin/packages/cardioid/package.py\n--- a/var/spack/repos/builtin/packages/cardioid/package.py\n+++ b/var/spack/repos/builtin/packages/cardioid/package.py\n@@ -9,7 +9,7 @@\n class Cardioid(CMakePackage):\n \"\"\"Cardiac simulation suite.\"\"\"\n \n- homepage = \"https://baasic.llnl.gov/comp-bio/cardioid-code.php\"\n+ homepage = \"https://baasic.llnl.gov/comp-bio/cardioid-code\"\n git = \"https://github.com/LLNL/cardioid.git\"\n maintainers(\"rblake-llnl\")\n", "issue": "LLNL Cardioid homepage no longer exists\nhttps://github.com/spack/spack/blob/fe5865da0d4ee4480a85ee1257cb310c036b0c88/var/spack/repos/builtin/packages/cardioid/package.py#L12\r\n\r\n@rblake-llnl it looks like Cardioid's [home page](https://baasic.llnl.gov/comp-bio/cardioid-code.php) redirects to a 404 these days. You're listed as the maintainer. Has the home of cardioid moved?\n", "before_files": [{"content": "# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other\n# Spack Project Developers. See the top-level COPYRIGHT file for details.\n#\n# SPDX-License-Identifier: (Apache-2.0 OR MIT)\n\nfrom spack.package import *\n\n\nclass Cardioid(CMakePackage):\n \"\"\"Cardiac simulation suite.\"\"\"\n\n homepage = \"https://baasic.llnl.gov/comp-bio/cardioid-code.php\"\n git = \"https://github.com/LLNL/cardioid.git\"\n maintainers(\"rblake-llnl\")\n\n version(\"develop\", branch=\"master\")\n version(\"elecfem\", branch=\"elec-fem\")\n\n variant(\"cuda\", default=False, description=\"Build with cuda support\")\n variant(\"mfem\", default=False, description=\"Build with mfem support\")\n\n depends_on(\"blas\")\n depends_on(\"lapack\")\n depends_on(\"mpi\")\n depends_on(\"cuda\", when=\"+cuda\")\n depends_on(\"mfem+mpi+superlu-dist+lapack\", when=\"+mfem\")\n depends_on(\"hypre+cuda\", when=\"+mfem+cuda\")\n depends_on(\"[email protected]:\", type=\"build\")\n depends_on(\"perl\", type=\"build\")\n\n def cmake_args(self):\n spec = self.spec\n args = [\n \"-DLAPACK_LIB:PATH=\" + \";\".join(spec[\"lapack\"].libs.libraries),\n \"-DBLAS_LIB:PATH=\" + \";\".join(spec[\"blas\"].libs.libraries),\n \"-DENABLE_OPENMP:BOOL=ON\",\n \"-DENABLE_MPI:BOOL=ON\",\n \"-DENABLE_FIND_MPI:BOOL=OFF\",\n \"-DMPI_C_COMPILER:STRING=\" + spec[\"mpi\"].mpicc,\n \"-DMPI_CXX_COMPILER:STRING=\" + spec[\"mpi\"].mpicxx,\n \"-DCMAKE_C_COMPILER:STRING=\" + spec[\"mpi\"].mpicc,\n \"-DCMAKE_CXX_COMPILER:STRING=\" + spec[\"mpi\"].mpicxx,\n ]\n\n if \"+cuda\" in self.spec:\n args.append(\"-DENABLE_CUDA:BOOL=ON\")\n args.append(\"-DCUDA_TOOLKIT_ROOT:PATH=\" + spec[\"cuda\"].prefix)\n else:\n args.append(\"-DENABLE_CUDA:BOOL=OFF\")\n\n if \"+mfem\" in self.spec:\n args.append(\"-DMFEM_DIR:PATH=\" + spec[\"mfem\"].prefix)\n return args\n", "path": "var/spack/repos/builtin/packages/cardioid/package.py"}], "after_files": [{"content": "# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other\n# Spack Project Developers. See the top-level COPYRIGHT file for details.\n#\n# SPDX-License-Identifier: (Apache-2.0 OR MIT)\n\nfrom spack.package import *\n\n\nclass Cardioid(CMakePackage):\n \"\"\"Cardiac simulation suite.\"\"\"\n\n homepage = \"https://baasic.llnl.gov/comp-bio/cardioid-code\"\n git = \"https://github.com/LLNL/cardioid.git\"\n maintainers(\"rblake-llnl\")\n\n version(\"develop\", branch=\"master\")\n version(\"elecfem\", branch=\"elec-fem\")\n\n variant(\"cuda\", default=False, description=\"Build with cuda support\")\n variant(\"mfem\", default=False, description=\"Build with mfem support\")\n\n depends_on(\"blas\")\n depends_on(\"lapack\")\n depends_on(\"mpi\")\n depends_on(\"cuda\", when=\"+cuda\")\n depends_on(\"mfem+mpi+superlu-dist+lapack\", when=\"+mfem\")\n depends_on(\"hypre+cuda\", when=\"+mfem+cuda\")\n depends_on(\"[email protected]:\", type=\"build\")\n depends_on(\"perl\", type=\"build\")\n\n def cmake_args(self):\n spec = self.spec\n args = [\n \"-DLAPACK_LIB:PATH=\" + \";\".join(spec[\"lapack\"].libs.libraries),\n \"-DBLAS_LIB:PATH=\" + \";\".join(spec[\"blas\"].libs.libraries),\n \"-DENABLE_OPENMP:BOOL=ON\",\n \"-DENABLE_MPI:BOOL=ON\",\n \"-DENABLE_FIND_MPI:BOOL=OFF\",\n \"-DMPI_C_COMPILER:STRING=\" + spec[\"mpi\"].mpicc,\n \"-DMPI_CXX_COMPILER:STRING=\" + spec[\"mpi\"].mpicxx,\n \"-DCMAKE_C_COMPILER:STRING=\" + spec[\"mpi\"].mpicc,\n \"-DCMAKE_CXX_COMPILER:STRING=\" + spec[\"mpi\"].mpicxx,\n ]\n\n if \"+cuda\" in self.spec:\n args.append(\"-DENABLE_CUDA:BOOL=ON\")\n args.append(\"-DCUDA_TOOLKIT_ROOT:PATH=\" + spec[\"cuda\"].prefix)\n else:\n args.append(\"-DENABLE_CUDA:BOOL=OFF\")\n\n if \"+mfem\" in self.spec:\n args.append(\"-DMFEM_DIR:PATH=\" + spec[\"mfem\"].prefix)\n return args\n", "path": "var/spack/repos/builtin/packages/cardioid/package.py"}]} | 1,027 | 156 |
gh_patches_debug_37491 | rasdani/github-patches | git_diff | mitmproxy__mitmproxy-3404 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
query string arrays are not fully displayed
##### Steps to reproduce the problem:
1. visit through mitmproxy/mitmdump e.g. http://example.com/?first=value&arr[]=foo+bar&arr[]=baz
2. Check the query parameters in the request
3. Notice that they contain more data than mitmproxy/mitmdump shows
##### Any other comments? What have you tried so far?
The following script shows all the data:
```
#!/usr/bin/env python3
from urllib.parse import urlparse, parse_qs
url = "http://example.com/?first=value&arr[]=foo+bar&arr[]=baz"
parts = urlparse(url)
print(parse_qs(parts.query))
```
Output:
`{'first': ['value'], 'arr[]': ['foo bar', 'baz']}`
But mitmproxy/mitmdump only shows:
```
first: value
arr[]: foo bar
```
##### System information
<!-- Paste the output of "mitmproxy --version" here. -->
Mitmproxy: 3.0.4
Python: 3.5.2
OpenSSL: OpenSSL 1.0.2g 1 Mar 2016
Platform: Linux-4.10.0-42-generic-x86_64-with-Ubuntu-16.04-xenial
<!-- Please use the mitmproxy forums (https://discourse.mitmproxy.org/) for support/how-to questions. Thanks! :) -->
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mitmproxy/contentviews/base.py`
Content:
```
1 # Default view cutoff *in lines*
2 import typing
3
4 KEY_MAX = 30
5
6 TTextType = typing.Union[str, bytes] # FIXME: This should be either bytes or str ultimately.
7 TViewLine = typing.List[typing.Tuple[str, TTextType]]
8 TViewResult = typing.Tuple[str, typing.Iterator[TViewLine]]
9
10
11 class View:
12 name: str = None
13 content_types: typing.List[str] = []
14
15 def __call__(self, data: bytes, **metadata) -> TViewResult:
16 """
17 Transform raw data into human-readable output.
18
19 Args:
20 data: the data to decode/format.
21 metadata: optional keyword-only arguments for metadata. Implementations must not
22 rely on a given argument being present.
23
24 Returns:
25 A (description, content generator) tuple.
26
27 The content generator yields lists of (style, text) tuples, where each list represents
28 a single line. ``text`` is a unfiltered byte string which may need to be escaped,
29 depending on the used output.
30
31 Caveats:
32 The content generator must not yield tuples of tuples,
33 because urwid cannot process that. You have to yield a *list* of tuples per line.
34 """
35 raise NotImplementedError() # pragma: no cover
36
37
38 def format_dict(
39 d: typing.Mapping[TTextType, TTextType]
40 ) -> typing.Iterator[TViewLine]:
41 """
42 Helper function that transforms the given dictionary into a list of
43 [
44 ("key", key )
45 ("value", value)
46 ]
47 entries, where key is padded to a uniform width.
48 """
49
50 max_key_len = max((len(k) for k in d.keys()), default=0)
51 max_key_len = min((max_key_len, KEY_MAX), default=0)
52 for key, value in d.items():
53 if isinstance(key, bytes):
54 key += b":"
55 else:
56 key += ":"
57 key = key.ljust(max_key_len + 2)
58 yield [
59 ("header", key),
60 ("text", value)
61 ]
62
63
64 def format_text(text: TTextType) -> typing.Iterator[TViewLine]:
65 """
66 Helper function that transforms bytes into the view output format.
67 """
68 for line in text.splitlines():
69 yield [("text", line)]
70
```
Path: `mitmproxy/contentviews/urlencoded.py`
Content:
```
1 from mitmproxy.net.http import url
2 from mitmproxy.coretypes import multidict
3 from . import base
4
5
6 class ViewURLEncoded(base.View):
7 name = "URL-encoded"
8 content_types = ["application/x-www-form-urlencoded"]
9
10 def __call__(self, data, **metadata):
11 try:
12 data = data.decode("ascii", "strict")
13 except ValueError:
14 return None
15 d = url.decode(data)
16 return "URLEncoded form", base.format_dict(multidict.MultiDict(d))
17
```
Path: `mitmproxy/contentviews/query.py`
Content:
```
1 from typing import List # noqa
2
3 from . import base
4
5
6 class ViewQuery(base.View):
7 name = "Query"
8
9 def __call__(self, data, **metadata):
10 query = metadata.get("query")
11 if query:
12 return "Query", base.format_dict(query)
13 else:
14 return "Query", base.format_text("")
15
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mitmproxy/contentviews/base.py b/mitmproxy/contentviews/base.py
--- a/mitmproxy/contentviews/base.py
+++ b/mitmproxy/contentviews/base.py
@@ -35,32 +35,52 @@
raise NotImplementedError() # pragma: no cover
-def format_dict(
- d: typing.Mapping[TTextType, TTextType]
-) -> typing.Iterator[TViewLine]:
+def format_pairs(
+ items: typing.Iterable[typing.Tuple[TTextType, TTextType]]
+)-> typing.Iterator[TViewLine]:
+
"""
- Helper function that transforms the given dictionary into a list of
+ Helper function that accepts a list of (k,v) pairs into a list of
[
- ("key", key )
+ ("key", key )
("value", value)
]
- entries, where key is padded to a uniform width.
+ where key is padded to a uniform width
"""
- max_key_len = max((len(k) for k in d.keys()), default=0)
+ max_key_len = max((len(k[0]) for k in items), default=0)
max_key_len = min((max_key_len, KEY_MAX), default=0)
- for key, value in d.items():
+
+ for key, value in items:
if isinstance(key, bytes):
+
key += b":"
else:
key += ":"
+
key = key.ljust(max_key_len + 2)
+
yield [
("header", key),
("text", value)
]
+def format_dict(
+ d: typing.Mapping[TTextType, TTextType]
+) -> typing.Iterator[TViewLine]:
+ """
+ Helper function that transforms the given dictionary into a list of
+ [
+ ("key", key )
+ ("value", value)
+ ]
+ entries, where key is padded to a uniform width.
+ """
+
+ return format_pairs(d.items())
+
+
def format_text(text: TTextType) -> typing.Iterator[TViewLine]:
"""
Helper function that transforms bytes into the view output format.
diff --git a/mitmproxy/contentviews/query.py b/mitmproxy/contentviews/query.py
--- a/mitmproxy/contentviews/query.py
+++ b/mitmproxy/contentviews/query.py
@@ -9,6 +9,6 @@
def __call__(self, data, **metadata):
query = metadata.get("query")
if query:
- return "Query", base.format_dict(query)
+ return "Query", base.format_pairs(query.items(multi=True))
else:
return "Query", base.format_text("")
diff --git a/mitmproxy/contentviews/urlencoded.py b/mitmproxy/contentviews/urlencoded.py
--- a/mitmproxy/contentviews/urlencoded.py
+++ b/mitmproxy/contentviews/urlencoded.py
@@ -1,5 +1,4 @@
from mitmproxy.net.http import url
-from mitmproxy.coretypes import multidict
from . import base
@@ -13,4 +12,4 @@
except ValueError:
return None
d = url.decode(data)
- return "URLEncoded form", base.format_dict(multidict.MultiDict(d))
+ return "URLEncoded form", base.format_pairs(d)
| {"golden_diff": "diff --git a/mitmproxy/contentviews/base.py b/mitmproxy/contentviews/base.py\n--- a/mitmproxy/contentviews/base.py\n+++ b/mitmproxy/contentviews/base.py\n@@ -35,32 +35,52 @@\n raise NotImplementedError() # pragma: no cover\n \n \n-def format_dict(\n- d: typing.Mapping[TTextType, TTextType]\n-) -> typing.Iterator[TViewLine]:\n+def format_pairs(\n+ items: typing.Iterable[typing.Tuple[TTextType, TTextType]]\n+)-> typing.Iterator[TViewLine]:\n+\n \"\"\"\n- Helper function that transforms the given dictionary into a list of\n+ Helper function that accepts a list of (k,v) pairs into a list of\n [\n- (\"key\", key )\n+ (\"key\", key )\n (\"value\", value)\n ]\n- entries, where key is padded to a uniform width.\n+ where key is padded to a uniform width\n \"\"\"\n \n- max_key_len = max((len(k) for k in d.keys()), default=0)\n+ max_key_len = max((len(k[0]) for k in items), default=0)\n max_key_len = min((max_key_len, KEY_MAX), default=0)\n- for key, value in d.items():\n+\n+ for key, value in items:\n if isinstance(key, bytes):\n+\n key += b\":\"\n else:\n key += \":\"\n+\n key = key.ljust(max_key_len + 2)\n+\n yield [\n (\"header\", key),\n (\"text\", value)\n ]\n \n \n+def format_dict(\n+ d: typing.Mapping[TTextType, TTextType]\n+) -> typing.Iterator[TViewLine]:\n+ \"\"\"\n+ Helper function that transforms the given dictionary into a list of\n+ [\n+ (\"key\", key )\n+ (\"value\", value)\n+ ]\n+ entries, where key is padded to a uniform width.\n+ \"\"\"\n+\n+ return format_pairs(d.items())\n+\n+\n def format_text(text: TTextType) -> typing.Iterator[TViewLine]:\n \"\"\"\n Helper function that transforms bytes into the view output format.\ndiff --git a/mitmproxy/contentviews/query.py b/mitmproxy/contentviews/query.py\n--- a/mitmproxy/contentviews/query.py\n+++ b/mitmproxy/contentviews/query.py\n@@ -9,6 +9,6 @@\n def __call__(self, data, **metadata):\n query = metadata.get(\"query\")\n if query:\n- return \"Query\", base.format_dict(query)\n+ return \"Query\", base.format_pairs(query.items(multi=True))\n else:\n return \"Query\", base.format_text(\"\")\ndiff --git a/mitmproxy/contentviews/urlencoded.py b/mitmproxy/contentviews/urlencoded.py\n--- a/mitmproxy/contentviews/urlencoded.py\n+++ b/mitmproxy/contentviews/urlencoded.py\n@@ -1,5 +1,4 @@\n from mitmproxy.net.http import url\n-from mitmproxy.coretypes import multidict\n from . import base\n \n \n@@ -13,4 +12,4 @@\n except ValueError:\n return None\n d = url.decode(data)\n- return \"URLEncoded form\", base.format_dict(multidict.MultiDict(d))\n+ return \"URLEncoded form\", base.format_pairs(d)\n", "issue": "query string arrays are not fully displayed\n##### Steps to reproduce the problem:\r\n\r\n1. visit through mitmproxy/mitmdump e.g. http://example.com/?first=value&arr[]=foo+bar&arr[]=baz\r\n2. Check the query parameters in the request\r\n3. Notice that they contain more data than mitmproxy/mitmdump shows\r\n\r\n##### Any other comments? What have you tried so far?\r\n\r\nThe following script shows all the data:\r\n\r\n```\r\n#!/usr/bin/env python3\r\n\r\nfrom urllib.parse import urlparse, parse_qs\r\n\r\nurl = \"http://example.com/?first=value&arr[]=foo+bar&arr[]=baz\"\r\nparts = urlparse(url)\r\nprint(parse_qs(parts.query))\r\n```\r\n\r\nOutput:\r\n`{'first': ['value'], 'arr[]': ['foo bar', 'baz']}`\r\n\r\nBut mitmproxy/mitmdump only shows:\r\n```\r\n first: value\r\n arr[]: foo bar\r\n```\r\n\r\n##### System information\r\n\r\n<!-- Paste the output of \"mitmproxy --version\" here. -->\r\n\r\nMitmproxy: 3.0.4\r\nPython: 3.5.2\r\nOpenSSL: OpenSSL 1.0.2g 1 Mar 2016\r\nPlatform: Linux-4.10.0-42-generic-x86_64-with-Ubuntu-16.04-xenial\r\n\r\n<!-- Please use the mitmproxy forums (https://discourse.mitmproxy.org/) for support/how-to questions. Thanks! :) -->\r\n\n", "before_files": [{"content": "# Default view cutoff *in lines*\nimport typing\n\nKEY_MAX = 30\n\nTTextType = typing.Union[str, bytes] # FIXME: This should be either bytes or str ultimately.\nTViewLine = typing.List[typing.Tuple[str, TTextType]]\nTViewResult = typing.Tuple[str, typing.Iterator[TViewLine]]\n\n\nclass View:\n name: str = None\n content_types: typing.List[str] = []\n\n def __call__(self, data: bytes, **metadata) -> TViewResult:\n \"\"\"\n Transform raw data into human-readable output.\n\n Args:\n data: the data to decode/format.\n metadata: optional keyword-only arguments for metadata. Implementations must not\n rely on a given argument being present.\n\n Returns:\n A (description, content generator) tuple.\n\n The content generator yields lists of (style, text) tuples, where each list represents\n a single line. ``text`` is a unfiltered byte string which may need to be escaped,\n depending on the used output.\n\n Caveats:\n The content generator must not yield tuples of tuples,\n because urwid cannot process that. You have to yield a *list* of tuples per line.\n \"\"\"\n raise NotImplementedError() # pragma: no cover\n\n\ndef format_dict(\n d: typing.Mapping[TTextType, TTextType]\n) -> typing.Iterator[TViewLine]:\n \"\"\"\n Helper function that transforms the given dictionary into a list of\n [\n (\"key\", key )\n (\"value\", value)\n ]\n entries, where key is padded to a uniform width.\n \"\"\"\n\n max_key_len = max((len(k) for k in d.keys()), default=0)\n max_key_len = min((max_key_len, KEY_MAX), default=0)\n for key, value in d.items():\n if isinstance(key, bytes):\n key += b\":\"\n else:\n key += \":\"\n key = key.ljust(max_key_len + 2)\n yield [\n (\"header\", key),\n (\"text\", value)\n ]\n\n\ndef format_text(text: TTextType) -> typing.Iterator[TViewLine]:\n \"\"\"\n Helper function that transforms bytes into the view output format.\n \"\"\"\n for line in text.splitlines():\n yield [(\"text\", line)]\n", "path": "mitmproxy/contentviews/base.py"}, {"content": "from mitmproxy.net.http import url\nfrom mitmproxy.coretypes import multidict\nfrom . import base\n\n\nclass ViewURLEncoded(base.View):\n name = \"URL-encoded\"\n content_types = [\"application/x-www-form-urlencoded\"]\n\n def __call__(self, data, **metadata):\n try:\n data = data.decode(\"ascii\", \"strict\")\n except ValueError:\n return None\n d = url.decode(data)\n return \"URLEncoded form\", base.format_dict(multidict.MultiDict(d))\n", "path": "mitmproxy/contentviews/urlencoded.py"}, {"content": "from typing import List # noqa\n\nfrom . import base\n\n\nclass ViewQuery(base.View):\n name = \"Query\"\n\n def __call__(self, data, **metadata):\n query = metadata.get(\"query\")\n if query:\n return \"Query\", base.format_dict(query)\n else:\n return \"Query\", base.format_text(\"\")\n", "path": "mitmproxy/contentviews/query.py"}], "after_files": [{"content": "# Default view cutoff *in lines*\nimport typing\n\nKEY_MAX = 30\n\nTTextType = typing.Union[str, bytes] # FIXME: This should be either bytes or str ultimately.\nTViewLine = typing.List[typing.Tuple[str, TTextType]]\nTViewResult = typing.Tuple[str, typing.Iterator[TViewLine]]\n\n\nclass View:\n name: str = None\n content_types: typing.List[str] = []\n\n def __call__(self, data: bytes, **metadata) -> TViewResult:\n \"\"\"\n Transform raw data into human-readable output.\n\n Args:\n data: the data to decode/format.\n metadata: optional keyword-only arguments for metadata. Implementations must not\n rely on a given argument being present.\n\n Returns:\n A (description, content generator) tuple.\n\n The content generator yields lists of (style, text) tuples, where each list represents\n a single line. ``text`` is a unfiltered byte string which may need to be escaped,\n depending on the used output.\n\n Caveats:\n The content generator must not yield tuples of tuples,\n because urwid cannot process that. You have to yield a *list* of tuples per line.\n \"\"\"\n raise NotImplementedError() # pragma: no cover\n\n\ndef format_pairs(\n items: typing.Iterable[typing.Tuple[TTextType, TTextType]]\n)-> typing.Iterator[TViewLine]:\n\n \"\"\"\n Helper function that accepts a list of (k,v) pairs into a list of\n [\n (\"key\", key )\n (\"value\", value)\n ]\n where key is padded to a uniform width\n \"\"\"\n\n max_key_len = max((len(k[0]) for k in items), default=0)\n max_key_len = min((max_key_len, KEY_MAX), default=0)\n\n for key, value in items:\n if isinstance(key, bytes):\n\n key += b\":\"\n else:\n key += \":\"\n\n key = key.ljust(max_key_len + 2)\n\n yield [\n (\"header\", key),\n (\"text\", value)\n ]\n\n\ndef format_dict(\n d: typing.Mapping[TTextType, TTextType]\n) -> typing.Iterator[TViewLine]:\n \"\"\"\n Helper function that transforms the given dictionary into a list of\n [\n (\"key\", key )\n (\"value\", value)\n ]\n entries, where key is padded to a uniform width.\n \"\"\"\n\n return format_pairs(d.items())\n\n\ndef format_text(text: TTextType) -> typing.Iterator[TViewLine]:\n \"\"\"\n Helper function that transforms bytes into the view output format.\n \"\"\"\n for line in text.splitlines():\n yield [(\"text\", line)]\n", "path": "mitmproxy/contentviews/base.py"}, {"content": "from mitmproxy.net.http import url\nfrom . import base\n\n\nclass ViewURLEncoded(base.View):\n name = \"URL-encoded\"\n content_types = [\"application/x-www-form-urlencoded\"]\n\n def __call__(self, data, **metadata):\n try:\n data = data.decode(\"ascii\", \"strict\")\n except ValueError:\n return None\n d = url.decode(data)\n return \"URLEncoded form\", base.format_pairs(d)\n", "path": "mitmproxy/contentviews/urlencoded.py"}, {"content": "from typing import List # noqa\n\nfrom . import base\n\n\nclass ViewQuery(base.View):\n name = \"Query\"\n\n def __call__(self, data, **metadata):\n query = metadata.get(\"query\")\n if query:\n return \"Query\", base.format_pairs(query.items(multi=True))\n else:\n return \"Query\", base.format_text(\"\")\n", "path": "mitmproxy/contentviews/query.py"}]} | 1,485 | 727 |
gh_patches_debug_18641 | rasdani/github-patches | git_diff | litestar-org__litestar-1009 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Bug: Structlog example no longer works
Me again. Sorry 🙈
**Describe the bug**
Running the structlog example [here](https://starlite-api.github.io/starlite/1.48/usage/0-the-starlite-app/?h=structlog#using-structlog) results in an internal server error as of v1.45 (I think)
```
{"status_code":500,"detail":"TypeError(\"encode_json() got an unexpected keyword argument 'default'\")"}
```
The default encoder was changed [here](https://github.com/starlite-api/starlite/pull/891/files#diff-6b2294023eb60948cd9f742e4930255a72254daf74f9e3157df8d479a685b123R213)
Which doesn't accept the `default` argument given [here](https://github.com/hynek/structlog/blob/main/src/structlog/processors.py#L318)
I'm not sure if it's a structlog problem or a starlite problem.
Maybe the solution is to rename `enc_hook` to `default` then it mirrors the signature of `json.dumps`? I'm not sure, to be honest.
**To Reproduce**
Run the structlog example in the documentation:
```python
from starlite import Starlite, StructLoggingConfig, Request, get
@get("/")
def my_router_handler(request: Request) -> None:
request.logger.info("inside a request")
return None
logging_config = StructLoggingConfig()
app = Starlite(route_handlers=[my_router_handler], logging_config=logging_config)
```
**Additional context**
Add any other context about the problem here.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `starlite/utils/serialization.py`
Content:
```
1 from pathlib import PurePosixPath
2 from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Union
3
4 import msgspec
5 from pydantic import (
6 AnyUrl,
7 BaseModel,
8 ByteSize,
9 ConstrainedBytes,
10 ConstrainedDate,
11 ConstrainedDecimal,
12 ConstrainedFloat,
13 ConstrainedFrozenSet,
14 ConstrainedInt,
15 ConstrainedList,
16 ConstrainedSet,
17 ConstrainedStr,
18 EmailStr,
19 NameEmail,
20 PaymentCardNumber,
21 SecretField,
22 StrictBool,
23 )
24 from pydantic.color import Color
25
26 if TYPE_CHECKING:
27 from starlite.types import TypeEncodersMap
28
29 DEFAULT_TYPE_ENCODERS: "TypeEncodersMap" = {
30 PurePosixPath: str,
31 # pydantic specific types
32 BaseModel: lambda m: m.dict(),
33 ByteSize: lambda b: b.real,
34 EmailStr: str,
35 NameEmail: str,
36 Color: str,
37 AnyUrl: str,
38 SecretField: str,
39 ConstrainedInt: int,
40 ConstrainedFloat: float,
41 ConstrainedStr: str,
42 ConstrainedBytes: lambda b: b.decode("utf-8"),
43 ConstrainedList: list,
44 ConstrainedSet: set,
45 ConstrainedFrozenSet: frozenset,
46 ConstrainedDecimal: float,
47 ConstrainedDate: lambda d: d.isoformat(),
48 PaymentCardNumber: str,
49 StrictBool: int, # pydantic compatibility
50 }
51
52
53 def default_serializer(value: Any, type_encoders: Optional[Dict[Any, Callable[[Any], Any]]] = None) -> Any:
54 """Transform values non-natively supported by `msgspec`
55
56 Args:
57 value: A value to serialize#
58 type_encoders: Mapping of types to callables to transforming types
59 Returns:
60 A serialized value
61 Raises:
62 TypeError: if value is not supported
63 """
64 if type_encoders is None:
65 type_encoders = DEFAULT_TYPE_ENCODERS
66 for base in value.__class__.__mro__[:-1]:
67 try:
68 encoder = type_encoders[base]
69 except KeyError:
70 continue
71 return encoder(value)
72 raise TypeError(f"Unsupported type: {type(value)!r}")
73
74
75 def dec_hook(type_: Any, value: Any) -> Any: # pragma: no cover
76 """Transform values non-natively supported by `msgspec`
77
78 Args:
79 type_: Encountered type
80 value: Value to coerce
81
82 Returns:
83 A `msgspec`-supported type
84 """
85 if issubclass(type_, BaseModel):
86 return type_(**value)
87 raise TypeError(f"Unsupported type: {type(value)!r}")
88
89
90 _msgspec_json_encoder = msgspec.json.Encoder(enc_hook=default_serializer)
91 _msgspec_json_decoder = msgspec.json.Decoder(dec_hook=dec_hook)
92 _msgspec_msgpack_encoder = msgspec.msgpack.Encoder(enc_hook=default_serializer)
93 _msgspec_msgpack_decoder = msgspec.msgpack.Decoder(dec_hook=dec_hook)
94
95
96 def encode_json(obj: Any, enc_hook: Optional[Callable[[Any], Any]] = default_serializer) -> bytes:
97 """Encode a value into JSON.
98
99 Args:
100 obj: Value to encode
101 enc_hook: Optional callable to support non-natively supported types
102
103 Returns:
104 JSON as bytes
105 """
106 if enc_hook is None or enc_hook is default_serializer:
107 return _msgspec_json_encoder.encode(obj)
108 return msgspec.json.encode(obj, enc_hook=enc_hook)
109
110
111 def decode_json(raw: Union[str, bytes]) -> Any:
112 """Decode a JSON string/bytes into an object.
113
114 Args:
115 raw: Value to decode
116
117 Returns:
118 An object
119 """
120 return _msgspec_json_decoder.decode(raw)
121
122
123 def encode_msgpack(obj: Any, enc_hook: Optional[Callable[[Any], Any]] = default_serializer) -> bytes:
124 """Encode a value into MessagePack.
125
126 Args:
127 obj: Value to encode
128 enc_hook: Optional callable to support non-natively supported types
129
130 Returns:
131 MessagePack as bytes
132 """
133 if enc_hook is None or enc_hook is default_serializer:
134 return _msgspec_msgpack_encoder.encode(obj)
135 return msgspec.msgpack.encode(obj, enc_hook=enc_hook)
136
137
138 def decode_msgpack(raw: bytes) -> Any:
139 """Decode a MessagePack string/bytes into an object.
140
141 Args:
142 raw: Value to decode
143
144 Returns:
145 An object
146 """
147 return _msgspec_msgpack_decoder.decode(raw)
148
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/starlite/utils/serialization.py b/starlite/utils/serialization.py
--- a/starlite/utils/serialization.py
+++ b/starlite/utils/serialization.py
@@ -93,19 +93,19 @@
_msgspec_msgpack_decoder = msgspec.msgpack.Decoder(dec_hook=dec_hook)
-def encode_json(obj: Any, enc_hook: Optional[Callable[[Any], Any]] = default_serializer) -> bytes:
+def encode_json(obj: Any, default: Optional[Callable[[Any], Any]] = default_serializer) -> bytes:
"""Encode a value into JSON.
Args:
obj: Value to encode
- enc_hook: Optional callable to support non-natively supported types
+ default: Optional callable to support non-natively supported types.
Returns:
JSON as bytes
"""
- if enc_hook is None or enc_hook is default_serializer:
+ if default is None or default is default_serializer:
return _msgspec_json_encoder.encode(obj)
- return msgspec.json.encode(obj, enc_hook=enc_hook)
+ return msgspec.json.encode(obj, enc_hook=default)
def decode_json(raw: Union[str, bytes]) -> Any:
| {"golden_diff": "diff --git a/starlite/utils/serialization.py b/starlite/utils/serialization.py\n--- a/starlite/utils/serialization.py\n+++ b/starlite/utils/serialization.py\n@@ -93,19 +93,19 @@\n _msgspec_msgpack_decoder = msgspec.msgpack.Decoder(dec_hook=dec_hook)\n \n \n-def encode_json(obj: Any, enc_hook: Optional[Callable[[Any], Any]] = default_serializer) -> bytes:\n+def encode_json(obj: Any, default: Optional[Callable[[Any], Any]] = default_serializer) -> bytes:\n \"\"\"Encode a value into JSON.\n \n Args:\n obj: Value to encode\n- enc_hook: Optional callable to support non-natively supported types\n+ default: Optional callable to support non-natively supported types.\n \n Returns:\n JSON as bytes\n \"\"\"\n- if enc_hook is None or enc_hook is default_serializer:\n+ if default is None or default is default_serializer:\n return _msgspec_json_encoder.encode(obj)\n- return msgspec.json.encode(obj, enc_hook=enc_hook)\n+ return msgspec.json.encode(obj, enc_hook=default)\n \n \n def decode_json(raw: Union[str, bytes]) -> Any:\n", "issue": "Bug: Structlog example no longer works\nMe again. Sorry \ud83d\ude48 \r\n\r\n**Describe the bug**\r\nRunning the structlog example [here](https://starlite-api.github.io/starlite/1.48/usage/0-the-starlite-app/?h=structlog#using-structlog) results in an internal server error as of v1.45 (I think)\r\n\r\n```\r\n{\"status_code\":500,\"detail\":\"TypeError(\\\"encode_json() got an unexpected keyword argument 'default'\\\")\"}\r\n```\r\n\r\nThe default encoder was changed [here](https://github.com/starlite-api/starlite/pull/891/files#diff-6b2294023eb60948cd9f742e4930255a72254daf74f9e3157df8d479a685b123R213)\r\nWhich doesn't accept the `default` argument given [here](https://github.com/hynek/structlog/blob/main/src/structlog/processors.py#L318)\r\n\r\nI'm not sure if it's a structlog problem or a starlite problem.\r\n\r\nMaybe the solution is to rename `enc_hook` to `default` then it mirrors the signature of `json.dumps`? I'm not sure, to be honest.\r\n\r\n\r\n**To Reproduce**\r\nRun the structlog example in the documentation:\r\n```python\r\nfrom starlite import Starlite, StructLoggingConfig, Request, get\r\n\r\n\r\n@get(\"/\")\r\ndef my_router_handler(request: Request) -> None:\r\n request.logger.info(\"inside a request\")\r\n return None\r\n\r\n\r\nlogging_config = StructLoggingConfig()\r\n\r\napp = Starlite(route_handlers=[my_router_handler], logging_config=logging_config)\r\n```\r\n\r\n**Additional context**\r\nAdd any other context about the problem here.\r\n\n", "before_files": [{"content": "from pathlib import PurePosixPath\nfrom typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Union\n\nimport msgspec\nfrom pydantic import (\n AnyUrl,\n BaseModel,\n ByteSize,\n ConstrainedBytes,\n ConstrainedDate,\n ConstrainedDecimal,\n ConstrainedFloat,\n ConstrainedFrozenSet,\n ConstrainedInt,\n ConstrainedList,\n ConstrainedSet,\n ConstrainedStr,\n EmailStr,\n NameEmail,\n PaymentCardNumber,\n SecretField,\n StrictBool,\n)\nfrom pydantic.color import Color\n\nif TYPE_CHECKING:\n from starlite.types import TypeEncodersMap\n\nDEFAULT_TYPE_ENCODERS: \"TypeEncodersMap\" = {\n PurePosixPath: str,\n # pydantic specific types\n BaseModel: lambda m: m.dict(),\n ByteSize: lambda b: b.real,\n EmailStr: str,\n NameEmail: str,\n Color: str,\n AnyUrl: str,\n SecretField: str,\n ConstrainedInt: int,\n ConstrainedFloat: float,\n ConstrainedStr: str,\n ConstrainedBytes: lambda b: b.decode(\"utf-8\"),\n ConstrainedList: list,\n ConstrainedSet: set,\n ConstrainedFrozenSet: frozenset,\n ConstrainedDecimal: float,\n ConstrainedDate: lambda d: d.isoformat(),\n PaymentCardNumber: str,\n StrictBool: int, # pydantic compatibility\n}\n\n\ndef default_serializer(value: Any, type_encoders: Optional[Dict[Any, Callable[[Any], Any]]] = None) -> Any:\n \"\"\"Transform values non-natively supported by `msgspec`\n\n Args:\n value: A value to serialize#\n type_encoders: Mapping of types to callables to transforming types\n Returns:\n A serialized value\n Raises:\n TypeError: if value is not supported\n \"\"\"\n if type_encoders is None:\n type_encoders = DEFAULT_TYPE_ENCODERS\n for base in value.__class__.__mro__[:-1]:\n try:\n encoder = type_encoders[base]\n except KeyError:\n continue\n return encoder(value)\n raise TypeError(f\"Unsupported type: {type(value)!r}\")\n\n\ndef dec_hook(type_: Any, value: Any) -> Any: # pragma: no cover\n \"\"\"Transform values non-natively supported by `msgspec`\n\n Args:\n type_: Encountered type\n value: Value to coerce\n\n Returns:\n A `msgspec`-supported type\n \"\"\"\n if issubclass(type_, BaseModel):\n return type_(**value)\n raise TypeError(f\"Unsupported type: {type(value)!r}\")\n\n\n_msgspec_json_encoder = msgspec.json.Encoder(enc_hook=default_serializer)\n_msgspec_json_decoder = msgspec.json.Decoder(dec_hook=dec_hook)\n_msgspec_msgpack_encoder = msgspec.msgpack.Encoder(enc_hook=default_serializer)\n_msgspec_msgpack_decoder = msgspec.msgpack.Decoder(dec_hook=dec_hook)\n\n\ndef encode_json(obj: Any, enc_hook: Optional[Callable[[Any], Any]] = default_serializer) -> bytes:\n \"\"\"Encode a value into JSON.\n\n Args:\n obj: Value to encode\n enc_hook: Optional callable to support non-natively supported types\n\n Returns:\n JSON as bytes\n \"\"\"\n if enc_hook is None or enc_hook is default_serializer:\n return _msgspec_json_encoder.encode(obj)\n return msgspec.json.encode(obj, enc_hook=enc_hook)\n\n\ndef decode_json(raw: Union[str, bytes]) -> Any:\n \"\"\"Decode a JSON string/bytes into an object.\n\n Args:\n raw: Value to decode\n\n Returns:\n An object\n \"\"\"\n return _msgspec_json_decoder.decode(raw)\n\n\ndef encode_msgpack(obj: Any, enc_hook: Optional[Callable[[Any], Any]] = default_serializer) -> bytes:\n \"\"\"Encode a value into MessagePack.\n\n Args:\n obj: Value to encode\n enc_hook: Optional callable to support non-natively supported types\n\n Returns:\n MessagePack as bytes\n \"\"\"\n if enc_hook is None or enc_hook is default_serializer:\n return _msgspec_msgpack_encoder.encode(obj)\n return msgspec.msgpack.encode(obj, enc_hook=enc_hook)\n\n\ndef decode_msgpack(raw: bytes) -> Any:\n \"\"\"Decode a MessagePack string/bytes into an object.\n\n Args:\n raw: Value to decode\n\n Returns:\n An object\n \"\"\"\n return _msgspec_msgpack_decoder.decode(raw)\n", "path": "starlite/utils/serialization.py"}], "after_files": [{"content": "from pathlib import PurePosixPath\nfrom typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Union\n\nimport msgspec\nfrom pydantic import (\n AnyUrl,\n BaseModel,\n ByteSize,\n ConstrainedBytes,\n ConstrainedDate,\n ConstrainedDecimal,\n ConstrainedFloat,\n ConstrainedFrozenSet,\n ConstrainedInt,\n ConstrainedList,\n ConstrainedSet,\n ConstrainedStr,\n EmailStr,\n NameEmail,\n PaymentCardNumber,\n SecretField,\n StrictBool,\n)\nfrom pydantic.color import Color\n\nif TYPE_CHECKING:\n from starlite.types import TypeEncodersMap\n\nDEFAULT_TYPE_ENCODERS: \"TypeEncodersMap\" = {\n PurePosixPath: str,\n # pydantic specific types\n BaseModel: lambda m: m.dict(),\n ByteSize: lambda b: b.real,\n EmailStr: str,\n NameEmail: str,\n Color: str,\n AnyUrl: str,\n SecretField: str,\n ConstrainedInt: int,\n ConstrainedFloat: float,\n ConstrainedStr: str,\n ConstrainedBytes: lambda b: b.decode(\"utf-8\"),\n ConstrainedList: list,\n ConstrainedSet: set,\n ConstrainedFrozenSet: frozenset,\n ConstrainedDecimal: float,\n ConstrainedDate: lambda d: d.isoformat(),\n PaymentCardNumber: str,\n StrictBool: int, # pydantic compatibility\n}\n\n\ndef default_serializer(value: Any, type_encoders: Optional[Dict[Any, Callable[[Any], Any]]] = None) -> Any:\n \"\"\"Transform values non-natively supported by `msgspec`\n\n Args:\n value: A value to serialize#\n type_encoders: Mapping of types to callables to transforming types\n Returns:\n A serialized value\n Raises:\n TypeError: if value is not supported\n \"\"\"\n if type_encoders is None:\n type_encoders = DEFAULT_TYPE_ENCODERS\n for base in value.__class__.__mro__[:-1]:\n try:\n encoder = type_encoders[base]\n except KeyError:\n continue\n return encoder(value)\n raise TypeError(f\"Unsupported type: {type(value)!r}\")\n\n\ndef dec_hook(type_: Any, value: Any) -> Any: # pragma: no cover\n \"\"\"Transform values non-natively supported by `msgspec`\n\n Args:\n type_: Encountered type\n value: Value to coerce\n\n Returns:\n A `msgspec`-supported type\n \"\"\"\n if issubclass(type_, BaseModel):\n return type_(**value)\n raise TypeError(f\"Unsupported type: {type(value)!r}\")\n\n\n_msgspec_json_encoder = msgspec.json.Encoder(enc_hook=default_serializer)\n_msgspec_json_decoder = msgspec.json.Decoder(dec_hook=dec_hook)\n_msgspec_msgpack_encoder = msgspec.msgpack.Encoder(enc_hook=default_serializer)\n_msgspec_msgpack_decoder = msgspec.msgpack.Decoder(dec_hook=dec_hook)\n\n\ndef encode_json(obj: Any, default: Optional[Callable[[Any], Any]] = default_serializer) -> bytes:\n \"\"\"Encode a value into JSON.\n\n Args:\n obj: Value to encode\n default: Optional callable to support non-natively supported types.\n\n Returns:\n JSON as bytes\n \"\"\"\n if default is None or default is default_serializer:\n return _msgspec_json_encoder.encode(obj)\n return msgspec.json.encode(obj, enc_hook=default)\n\n\ndef decode_json(raw: Union[str, bytes]) -> Any:\n \"\"\"Decode a JSON string/bytes into an object.\n\n Args:\n raw: Value to decode\n\n Returns:\n An object\n \"\"\"\n return _msgspec_json_decoder.decode(raw)\n\n\ndef encode_msgpack(obj: Any, enc_hook: Optional[Callable[[Any], Any]] = default_serializer) -> bytes:\n \"\"\"Encode a value into MessagePack.\n\n Args:\n obj: Value to encode\n enc_hook: Optional callable to support non-natively supported types\n\n Returns:\n MessagePack as bytes\n \"\"\"\n if enc_hook is None or enc_hook is default_serializer:\n return _msgspec_msgpack_encoder.encode(obj)\n return msgspec.msgpack.encode(obj, enc_hook=enc_hook)\n\n\ndef decode_msgpack(raw: bytes) -> Any:\n \"\"\"Decode a MessagePack string/bytes into an object.\n\n Args:\n raw: Value to decode\n\n Returns:\n An object\n \"\"\"\n return _msgspec_msgpack_decoder.decode(raw)\n", "path": "starlite/utils/serialization.py"}]} | 1,977 | 257 |
gh_patches_debug_427 | rasdani/github-patches | git_diff | python__python-docs-es-1787 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Translate 'using/unix.po'
This needs to reach 100% translated.
The rendered version of this file will be available at https://docs.python.org/es/3.10/using/unix.html once translated.
Meanwhile, the English version is shown.
Current stats for `using/unix.po`:
* Fuzzy: 1
* Percent translated: 88.9%
* Entries: 40 / 45
* Untranslated: 5
Please, comment here if you want this file to be assigned to you and an member will assign it to you as soon as possible, so you can start working on it.
Remember to follow the steps in our [Contributing Guide](https://python-docs-es.readthedocs.io/page/CONTRIBUTING.html).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scripts/translate.py`
Content:
```
1 import os
2 import re
3 import sys
4 from typing import Dict, Tuple
5
6 import polib
7
8 VERBOSE = False
9 DEBUG = False
10 SKIP_TRANSLATED_ENTRIES = True
11
12 try:
13 from deep_translator import GoogleTranslator
14 except ImportError:
15 print("Error: This util script needs `deep_translator` to be installed")
16 sys.exit(1)
17
18 _patterns = [
19 ":c:func:`[^`]+`",
20 ":c:type:`[^`]+`",
21 ":c:macro:`[^`]+`",
22 ":c:member:`[^`]+`",
23 ":c:data:`[^`]+`",
24 ":py:data:`[^`]+`",
25 ":py:mod:`[^`]+`",
26 ":func:`[^`]+`",
27 ":mod:`[^`]+`",
28 ":ref:`[^`]+`",
29 ":class:`[^`]+`",
30 ":pep:`[^`]+`",
31 ":data:`[^`]+`",
32 ":exc:`[^`]+`",
33 ":term:`[^`]+`",
34 ":meth:`[^`]+`",
35 ":envvar:`[^`]+`",
36 ":file:`[^`]+`",
37 ":attr:`[^`]+`",
38 ":const:`[^`]+`",
39 ":issue:`[^`]+`",
40 ":opcode:`[^`]+`",
41 ":option:`[^`]+`",
42 ":program:`[^`]+`",
43 ":keyword:`[^`]+`",
44 ":RFC:`[^`]+`",
45 ":rfc:`[^`]+`",
46 ":doc:`[^`]+`",
47 ":manpage:`[^`]+`",
48 ":sup:`[^`]+`",
49 "``[^`]+``",
50 "`[^`]+`__",
51 "`[^`]+`_",
52 "\*\*[^\*]+\*\*", # bold text between **
53 "\*[^\*]+\*", # italic text between *
54 ]
55
56 _exps = [re.compile(e) for e in _patterns]
57
58 def protect_sphinx_directives(s: str) -> Tuple[dict, str]:
59 """
60 Parameters:
61 string containing the text to translate
62
63 Returns:
64 dictionary containing all the placeholder text as keys
65 and the correct value.
66 """
67
68 i = 0
69 d: Dict[str, str] = {}
70 for exp in _exps:
71 matches = exp.findall(s)
72 if DEBUG:
73 print(exp, matches)
74 for match in matches:
75 ph = f"XASDF{str(i).zfill(2)}"
76 s = s.replace(match, ph)
77 if ph in d and VERBOSE:
78 print(f"Error: {ph} is already in the dictionary")
79 print("new", match)
80 print("old", d[ph])
81 d[ph] = match
82 i += 1
83 return d, s
84
85
86 def undo_sphinx_directives_protection(placeholders: dict, translated_text: str) -> str:
87 for ph, value in placeholders.items():
88 translated_text = translated_text.replace(ph, value)
89 if DEBUG:
90 print(ph, value)
91 print(translated_text)
92 return translated_text
93
94
95 if __name__ == "__main__":
96 filename = sys.argv[1]
97 if not os.path.isfile(filename):
98 print(f"File not found: '{filename}'")
99 sys.exit(-1)
100
101 po = polib.pofile(filename)
102 translator = GoogleTranslator(source="en", target="es")
103
104 for entry in po:
105 # If the entry has already a translation, skip.
106 if SKIP_TRANSLATED_ENTRIES and entry.msgstr:
107 continue
108
109 print("\nEN|", entry.msgid)
110 placeholders, temp_text = protect_sphinx_directives(entry.msgid)
111 if VERBOSE:
112 print(temp_text)
113 print(placeholders)
114
115 # Translate the temporary text without sphinx statements
116 translated_text = translator.translate(temp_text)
117
118 # Recover sphinx statements
119 real_text = undo_sphinx_directives_protection(placeholders, translated_text)
120 print("ES|", real_text)
121
122 # Replace the po file translated entry
123 entry.msgstr = real_text
124
125 # Save the file after all the entries are translated
126 po.save()
127
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/scripts/translate.py b/scripts/translate.py
--- a/scripts/translate.py
+++ b/scripts/translate.py
@@ -44,6 +44,7 @@
":RFC:`[^`]+`",
":rfc:`[^`]+`",
":doc:`[^`]+`",
+ ":source:`[^`]+`",
":manpage:`[^`]+`",
":sup:`[^`]+`",
"``[^`]+``",
| {"golden_diff": "diff --git a/scripts/translate.py b/scripts/translate.py\n--- a/scripts/translate.py\n+++ b/scripts/translate.py\n@@ -44,6 +44,7 @@\n \":RFC:`[^`]+`\",\n \":rfc:`[^`]+`\",\n \":doc:`[^`]+`\",\n+ \":source:`[^`]+`\",\n \":manpage:`[^`]+`\",\n \":sup:`[^`]+`\",\n \"``[^`]+``\",\n", "issue": "Translate 'using/unix.po'\nThis needs to reach 100% translated.\n\nThe rendered version of this file will be available at https://docs.python.org/es/3.10/using/unix.html once translated.\nMeanwhile, the English version is shown.\n\nCurrent stats for `using/unix.po`:\n\n* Fuzzy: 1\n* Percent translated: 88.9%\n* Entries: 40 / 45\n* Untranslated: 5\n\nPlease, comment here if you want this file to be assigned to you and an member will assign it to you as soon as possible, so you can start working on it.\n\nRemember to follow the steps in our [Contributing Guide](https://python-docs-es.readthedocs.io/page/CONTRIBUTING.html).\n", "before_files": [{"content": "import os\nimport re\nimport sys\nfrom typing import Dict, Tuple\n\nimport polib\n\nVERBOSE = False\nDEBUG = False\nSKIP_TRANSLATED_ENTRIES = True\n\ntry:\n from deep_translator import GoogleTranslator\nexcept ImportError:\n print(\"Error: This util script needs `deep_translator` to be installed\")\n sys.exit(1)\n\n_patterns = [\n \":c:func:`[^`]+`\",\n \":c:type:`[^`]+`\",\n \":c:macro:`[^`]+`\",\n \":c:member:`[^`]+`\",\n \":c:data:`[^`]+`\",\n \":py:data:`[^`]+`\",\n \":py:mod:`[^`]+`\",\n \":func:`[^`]+`\",\n \":mod:`[^`]+`\",\n \":ref:`[^`]+`\",\n \":class:`[^`]+`\",\n \":pep:`[^`]+`\",\n \":data:`[^`]+`\",\n \":exc:`[^`]+`\",\n \":term:`[^`]+`\",\n \":meth:`[^`]+`\",\n \":envvar:`[^`]+`\",\n \":file:`[^`]+`\",\n \":attr:`[^`]+`\",\n \":const:`[^`]+`\",\n \":issue:`[^`]+`\",\n \":opcode:`[^`]+`\",\n \":option:`[^`]+`\",\n \":program:`[^`]+`\",\n \":keyword:`[^`]+`\",\n \":RFC:`[^`]+`\",\n \":rfc:`[^`]+`\",\n \":doc:`[^`]+`\",\n \":manpage:`[^`]+`\",\n \":sup:`[^`]+`\",\n \"``[^`]+``\",\n \"`[^`]+`__\",\n \"`[^`]+`_\",\n \"\\*\\*[^\\*]+\\*\\*\", # bold text between **\n \"\\*[^\\*]+\\*\", # italic text between *\n]\n\n_exps = [re.compile(e) for e in _patterns]\n\ndef protect_sphinx_directives(s: str) -> Tuple[dict, str]:\n \"\"\"\n Parameters:\n string containing the text to translate\n\n Returns:\n dictionary containing all the placeholder text as keys\n and the correct value.\n \"\"\"\n\n i = 0\n d: Dict[str, str] = {}\n for exp in _exps:\n matches = exp.findall(s)\n if DEBUG:\n print(exp, matches)\n for match in matches:\n ph = f\"XASDF{str(i).zfill(2)}\"\n s = s.replace(match, ph)\n if ph in d and VERBOSE:\n print(f\"Error: {ph} is already in the dictionary\")\n print(\"new\", match)\n print(\"old\", d[ph])\n d[ph] = match\n i += 1\n return d, s\n\n\ndef undo_sphinx_directives_protection(placeholders: dict, translated_text: str) -> str:\n for ph, value in placeholders.items():\n translated_text = translated_text.replace(ph, value)\n if DEBUG:\n print(ph, value)\n print(translated_text)\n return translated_text\n\n\nif __name__ == \"__main__\":\n filename = sys.argv[1]\n if not os.path.isfile(filename):\n print(f\"File not found: '{filename}'\")\n sys.exit(-1)\n\n po = polib.pofile(filename)\n translator = GoogleTranslator(source=\"en\", target=\"es\")\n\n for entry in po:\n # If the entry has already a translation, skip.\n if SKIP_TRANSLATED_ENTRIES and entry.msgstr:\n continue\n\n print(\"\\nEN|\", entry.msgid)\n placeholders, temp_text = protect_sphinx_directives(entry.msgid)\n if VERBOSE:\n print(temp_text)\n print(placeholders)\n\n # Translate the temporary text without sphinx statements\n translated_text = translator.translate(temp_text)\n\n # Recover sphinx statements\n real_text = undo_sphinx_directives_protection(placeholders, translated_text)\n print(\"ES|\", real_text)\n\n # Replace the po file translated entry\n entry.msgstr = real_text\n\n # Save the file after all the entries are translated\n po.save()\n", "path": "scripts/translate.py"}], "after_files": [{"content": "import os\nimport re\nimport sys\nfrom typing import Dict, Tuple\n\nimport polib\n\nVERBOSE = False\nDEBUG = False\nSKIP_TRANSLATED_ENTRIES = True\n\ntry:\n from deep_translator import GoogleTranslator\nexcept ImportError:\n print(\"Error: This util script needs `deep_translator` to be installed\")\n sys.exit(1)\n\n_patterns = [\n \":c:func:`[^`]+`\",\n \":c:type:`[^`]+`\",\n \":c:macro:`[^`]+`\",\n \":c:member:`[^`]+`\",\n \":c:data:`[^`]+`\",\n \":py:data:`[^`]+`\",\n \":py:mod:`[^`]+`\",\n \":func:`[^`]+`\",\n \":mod:`[^`]+`\",\n \":ref:`[^`]+`\",\n \":class:`[^`]+`\",\n \":pep:`[^`]+`\",\n \":data:`[^`]+`\",\n \":exc:`[^`]+`\",\n \":term:`[^`]+`\",\n \":meth:`[^`]+`\",\n \":envvar:`[^`]+`\",\n \":file:`[^`]+`\",\n \":attr:`[^`]+`\",\n \":const:`[^`]+`\",\n \":issue:`[^`]+`\",\n \":opcode:`[^`]+`\",\n \":option:`[^`]+`\",\n \":program:`[^`]+`\",\n \":keyword:`[^`]+`\",\n \":RFC:`[^`]+`\",\n \":rfc:`[^`]+`\",\n \":doc:`[^`]+`\",\n \":source:`[^`]+`\",\n \":manpage:`[^`]+`\",\n \":sup:`[^`]+`\",\n \"``[^`]+``\",\n \"`[^`]+`__\",\n \"`[^`]+`_\",\n \"\\*\\*[^\\*]+\\*\\*\", # bold text between **\n \"\\*[^\\*]+\\*\", # italic text between *\n]\n\n_exps = [re.compile(e) for e in _patterns]\n\ndef protect_sphinx_directives(s: str) -> Tuple[dict, str]:\n \"\"\"\n Parameters:\n string containing the text to translate\n\n Returns:\n dictionary containing all the placeholder text as keys\n and the correct value.\n \"\"\"\n\n i = 0\n d: Dict[str, str] = {}\n for exp in _exps:\n matches = exp.findall(s)\n if DEBUG:\n print(exp, matches)\n for match in matches:\n ph = f\"XASDF{str(i).zfill(2)}\"\n s = s.replace(match, ph)\n if ph in d and VERBOSE:\n print(f\"Error: {ph} is already in the dictionary\")\n print(\"new\", match)\n print(\"old\", d[ph])\n d[ph] = match\n i += 1\n return d, s\n\n\ndef undo_sphinx_directives_protection(placeholders: dict, translated_text: str) -> str:\n for ph, value in placeholders.items():\n translated_text = translated_text.replace(ph, value)\n if DEBUG:\n print(ph, value)\n print(translated_text)\n return translated_text\n\n\nif __name__ == \"__main__\":\n filename = sys.argv[1]\n if not os.path.isfile(filename):\n print(f\"File not found: '{filename}'\")\n sys.exit(-1)\n\n po = polib.pofile(filename)\n translator = GoogleTranslator(source=\"en\", target=\"es\")\n\n for entry in po:\n # If the entry has already a translation, skip.\n if SKIP_TRANSLATED_ENTRIES and entry.msgstr:\n continue\n\n print(\"\\nEN|\", entry.msgid)\n placeholders, temp_text = protect_sphinx_directives(entry.msgid)\n if VERBOSE:\n print(temp_text)\n print(placeholders)\n\n # Translate the temporary text without sphinx statements\n translated_text = translator.translate(temp_text)\n\n # Recover sphinx statements\n real_text = undo_sphinx_directives_protection(placeholders, translated_text)\n print(\"ES|\", real_text)\n\n # Replace the po file translated entry\n entry.msgstr = real_text\n\n # Save the file after all the entries are translated\n po.save()\n", "path": "scripts/translate.py"}]} | 1,612 | 105 |
gh_patches_debug_32832 | rasdani/github-patches | git_diff | cisagov__manage.get.gov-426 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Don't display redundant form field labels
- Don't display redundant field labels
- Retain field labels for screenreaders
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/registrar/templatetags/field_helpers.py`
Content:
```
1 """Custom field helpers for our inputs."""
2 import re
3
4 from django import template
5
6 register = template.Library()
7
8
9 @register.inclusion_tag("includes/input_with_errors.html", takes_context=True)
10 def input_with_errors(context, field=None): # noqa: C901
11 """Make an input field along with error handling.
12
13 Args:
14 field: The field instance.
15
16 In addition to the explicit `field` argument, this inclusion_tag takes the
17 following "widget-tweak-esque" parameters from the surrounding context.
18
19 Context args:
20 add_class: append to input element's `class` attribute
21 add_error_class: like `add_class` but only if field.errors is not empty
22 add_required_class: like `add_class` but only if field is required
23 add_label_class: append to input element's label's `class` attribute
24 add_group_class: append to input element's surrounding tag's `class` attribute
25 attr_* - adds or replaces any single html attribute for the input
26 add_error_attr_* - like `attr_*` but only if field.errors is not empty
27
28 Example usage:
29 ```
30 {% for form in forms.0 %}
31 {% with add_class="usa-input--medium" %}
32 {% with attr_required=True attr_disabled=False %}
33 {% input_with_errors form.street_address1 %}
34 {% endwith %}
35 {% endwith %}
36 {% endfor }
37
38 There are a few edge cases to keep in mind:
39 - a "maxlength" attribute will cause the input to use USWDS Character counter
40 - the field's `use_fieldset` controls whether the output is label/field or
41 fieldset/legend/field
42 - checkbox label styling is different (this is handled, don't worry about it)
43 """
44 context = context.flatten()
45 context["field"] = field
46
47 # get any attributes specified in the field's definition
48 attrs = dict(field.field.widget.attrs)
49
50 # these will be converted to CSS strings
51 classes = []
52 label_classes = []
53 group_classes = []
54
55 # this will be converted to an attribute string
56 described_by = []
57
58 if "class" in attrs:
59 classes.append(attrs.pop("class"))
60
61 # parse context for field attributes and classes
62 # ---
63 # here we loop through all items in the context dictionary
64 # (this is the context which was being used to render the
65 # outer template in which this {% input_with_errors %} appeared!)
66 # and look for "magic" keys -- these are used to modify the
67 # appearance and behavior of the final HTML
68 for key, value in context.items():
69 if key.startswith("attr_"):
70 attr_name = re.sub("_", "-", key[5:])
71 attrs[attr_name] = value
72 elif key.startswith("add_error_attr_") and field.errors:
73 attr_name = re.sub("_", "-", key[15:])
74 attrs[attr_name] = value
75
76 elif key == "add_class":
77 classes.append(value)
78 elif key == "add_required_class" and field.required:
79 classes.append(value)
80 elif key == "add_error_class" and field.errors:
81 classes.append(value)
82
83 elif key == "add_label_class":
84 label_classes.append(value)
85
86 elif key == "add_group_class":
87 group_classes.append(value)
88
89 attrs["id"] = field.auto_id
90
91 # do some work for various edge cases
92
93 if "maxlength" in attrs:
94 # associate the field programmatically with its hint text
95 described_by.append(f"{attrs['id']}__message")
96
97 if field.use_fieldset:
98 context["label_tag"] = "legend"
99 else:
100 context["label_tag"] = "label"
101
102 if field.use_fieldset:
103 label_classes.append("usa-legend")
104
105 if field.widget_type == "checkbox":
106 label_classes.append("usa-checkbox__label")
107 elif not field.use_fieldset:
108 label_classes.append("usa-label")
109
110 if field.errors:
111 # associate the field programmatically with its error message
112 message_div_id = f"{attrs['id']}__error-message"
113 described_by.append(message_div_id)
114
115 # set the field invalid
116 # due to weirdness, this must be a string, not a boolean
117 attrs["aria-invalid"] = "true"
118
119 # style the invalid field
120 classes.append("usa-input--error")
121 label_classes.append("usa-label--error")
122 group_classes.append("usa-form-group--error")
123
124 # convert lists into strings
125
126 if classes:
127 context["classes"] = " ".join(classes)
128
129 if label_classes:
130 context["label_classes"] = " ".join(label_classes)
131
132 if group_classes:
133 context["group_classes"] = " ".join(group_classes)
134
135 if described_by:
136 # ensure we don't overwrite existing attribute value
137 if "aria-describedby" in attrs:
138 described_by.append(attrs["aria-describedby"])
139 attrs["aria-describedby"] = " ".join(described_by)
140
141 # ask Django to give us the widget dict
142 # see Widget.get_context() on
143 # https://docs.djangoproject.com/en/4.1/ref/forms/widgets
144 widget = field.field.widget.get_context(
145 field.html_name, field.value() or field.initial, field.build_widget_attrs(attrs)
146 ) # -> {"widget": {"name": ...}}
147
148 context["widget"] = widget["widget"]
149
150 return context
151
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/registrar/templatetags/field_helpers.py b/src/registrar/templatetags/field_helpers.py
--- a/src/registrar/templatetags/field_helpers.py
+++ b/src/registrar/templatetags/field_helpers.py
@@ -21,6 +21,7 @@
add_error_class: like `add_class` but only if field.errors is not empty
add_required_class: like `add_class` but only if field is required
add_label_class: append to input element's label's `class` attribute
+ add_legend_class: append to input element's legend's `class` attribute
add_group_class: append to input element's surrounding tag's `class` attribute
attr_* - adds or replaces any single html attribute for the input
add_error_attr_* - like `attr_*` but only if field.errors is not empty
@@ -50,6 +51,7 @@
# these will be converted to CSS strings
classes = []
label_classes = []
+ legend_classes = []
group_classes = []
# this will be converted to an attribute string
@@ -82,6 +84,8 @@
elif key == "add_label_class":
label_classes.append(value)
+ elif key == "add_legend_class":
+ legend_classes.append(value)
elif key == "add_group_class":
group_classes.append(value)
@@ -129,6 +133,9 @@
if label_classes:
context["label_classes"] = " ".join(label_classes)
+ if legend_classes:
+ context["legend_classes"] = " ".join(legend_classes)
+
if group_classes:
context["group_classes"] = " ".join(group_classes)
| {"golden_diff": "diff --git a/src/registrar/templatetags/field_helpers.py b/src/registrar/templatetags/field_helpers.py\n--- a/src/registrar/templatetags/field_helpers.py\n+++ b/src/registrar/templatetags/field_helpers.py\n@@ -21,6 +21,7 @@\n add_error_class: like `add_class` but only if field.errors is not empty\n add_required_class: like `add_class` but only if field is required\n add_label_class: append to input element's label's `class` attribute\n+ add_legend_class: append to input element's legend's `class` attribute\n add_group_class: append to input element's surrounding tag's `class` attribute\n attr_* - adds or replaces any single html attribute for the input\n add_error_attr_* - like `attr_*` but only if field.errors is not empty\n@@ -50,6 +51,7 @@\n # these will be converted to CSS strings\n classes = []\n label_classes = []\n+ legend_classes = []\n group_classes = []\n \n # this will be converted to an attribute string\n@@ -82,6 +84,8 @@\n \n elif key == \"add_label_class\":\n label_classes.append(value)\n+ elif key == \"add_legend_class\":\n+ legend_classes.append(value)\n \n elif key == \"add_group_class\":\n group_classes.append(value)\n@@ -129,6 +133,9 @@\n if label_classes:\n context[\"label_classes\"] = \" \".join(label_classes)\n \n+ if legend_classes:\n+ context[\"legend_classes\"] = \" \".join(legend_classes)\n+\n if group_classes:\n context[\"group_classes\"] = \" \".join(group_classes)\n", "issue": "Don't display redundant form field labels\n- Don't display redundant field labels \n- Retain field labels for screenreaders\n", "before_files": [{"content": "\"\"\"Custom field helpers for our inputs.\"\"\"\nimport re\n\nfrom django import template\n\nregister = template.Library()\n\n\[email protected]_tag(\"includes/input_with_errors.html\", takes_context=True)\ndef input_with_errors(context, field=None): # noqa: C901\n \"\"\"Make an input field along with error handling.\n\n Args:\n field: The field instance.\n\n In addition to the explicit `field` argument, this inclusion_tag takes the\n following \"widget-tweak-esque\" parameters from the surrounding context.\n\n Context args:\n add_class: append to input element's `class` attribute\n add_error_class: like `add_class` but only if field.errors is not empty\n add_required_class: like `add_class` but only if field is required\n add_label_class: append to input element's label's `class` attribute\n add_group_class: append to input element's surrounding tag's `class` attribute\n attr_* - adds or replaces any single html attribute for the input\n add_error_attr_* - like `attr_*` but only if field.errors is not empty\n\n Example usage:\n ```\n {% for form in forms.0 %}\n {% with add_class=\"usa-input--medium\" %}\n {% with attr_required=True attr_disabled=False %}\n {% input_with_errors form.street_address1 %}\n {% endwith %}\n {% endwith %}\n {% endfor }\n\n There are a few edge cases to keep in mind:\n - a \"maxlength\" attribute will cause the input to use USWDS Character counter\n - the field's `use_fieldset` controls whether the output is label/field or\n fieldset/legend/field\n - checkbox label styling is different (this is handled, don't worry about it)\n \"\"\"\n context = context.flatten()\n context[\"field\"] = field\n\n # get any attributes specified in the field's definition\n attrs = dict(field.field.widget.attrs)\n\n # these will be converted to CSS strings\n classes = []\n label_classes = []\n group_classes = []\n\n # this will be converted to an attribute string\n described_by = []\n\n if \"class\" in attrs:\n classes.append(attrs.pop(\"class\"))\n\n # parse context for field attributes and classes\n # ---\n # here we loop through all items in the context dictionary\n # (this is the context which was being used to render the\n # outer template in which this {% input_with_errors %} appeared!)\n # and look for \"magic\" keys -- these are used to modify the\n # appearance and behavior of the final HTML\n for key, value in context.items():\n if key.startswith(\"attr_\"):\n attr_name = re.sub(\"_\", \"-\", key[5:])\n attrs[attr_name] = value\n elif key.startswith(\"add_error_attr_\") and field.errors:\n attr_name = re.sub(\"_\", \"-\", key[15:])\n attrs[attr_name] = value\n\n elif key == \"add_class\":\n classes.append(value)\n elif key == \"add_required_class\" and field.required:\n classes.append(value)\n elif key == \"add_error_class\" and field.errors:\n classes.append(value)\n\n elif key == \"add_label_class\":\n label_classes.append(value)\n\n elif key == \"add_group_class\":\n group_classes.append(value)\n\n attrs[\"id\"] = field.auto_id\n\n # do some work for various edge cases\n\n if \"maxlength\" in attrs:\n # associate the field programmatically with its hint text\n described_by.append(f\"{attrs['id']}__message\")\n\n if field.use_fieldset:\n context[\"label_tag\"] = \"legend\"\n else:\n context[\"label_tag\"] = \"label\"\n\n if field.use_fieldset:\n label_classes.append(\"usa-legend\")\n\n if field.widget_type == \"checkbox\":\n label_classes.append(\"usa-checkbox__label\")\n elif not field.use_fieldset:\n label_classes.append(\"usa-label\")\n\n if field.errors:\n # associate the field programmatically with its error message\n message_div_id = f\"{attrs['id']}__error-message\"\n described_by.append(message_div_id)\n\n # set the field invalid\n # due to weirdness, this must be a string, not a boolean\n attrs[\"aria-invalid\"] = \"true\"\n\n # style the invalid field\n classes.append(\"usa-input--error\")\n label_classes.append(\"usa-label--error\")\n group_classes.append(\"usa-form-group--error\")\n\n # convert lists into strings\n\n if classes:\n context[\"classes\"] = \" \".join(classes)\n\n if label_classes:\n context[\"label_classes\"] = \" \".join(label_classes)\n\n if group_classes:\n context[\"group_classes\"] = \" \".join(group_classes)\n\n if described_by:\n # ensure we don't overwrite existing attribute value\n if \"aria-describedby\" in attrs:\n described_by.append(attrs[\"aria-describedby\"])\n attrs[\"aria-describedby\"] = \" \".join(described_by)\n\n # ask Django to give us the widget dict\n # see Widget.get_context() on\n # https://docs.djangoproject.com/en/4.1/ref/forms/widgets\n widget = field.field.widget.get_context(\n field.html_name, field.value() or field.initial, field.build_widget_attrs(attrs)\n ) # -> {\"widget\": {\"name\": ...}}\n\n context[\"widget\"] = widget[\"widget\"]\n\n return context\n", "path": "src/registrar/templatetags/field_helpers.py"}], "after_files": [{"content": "\"\"\"Custom field helpers for our inputs.\"\"\"\nimport re\n\nfrom django import template\n\nregister = template.Library()\n\n\[email protected]_tag(\"includes/input_with_errors.html\", takes_context=True)\ndef input_with_errors(context, field=None): # noqa: C901\n \"\"\"Make an input field along with error handling.\n\n Args:\n field: The field instance.\n\n In addition to the explicit `field` argument, this inclusion_tag takes the\n following \"widget-tweak-esque\" parameters from the surrounding context.\n\n Context args:\n add_class: append to input element's `class` attribute\n add_error_class: like `add_class` but only if field.errors is not empty\n add_required_class: like `add_class` but only if field is required\n add_label_class: append to input element's label's `class` attribute\n add_legend_class: append to input element's legend's `class` attribute\n add_group_class: append to input element's surrounding tag's `class` attribute\n attr_* - adds or replaces any single html attribute for the input\n add_error_attr_* - like `attr_*` but only if field.errors is not empty\n\n Example usage:\n ```\n {% for form in forms.0 %}\n {% with add_class=\"usa-input--medium\" %}\n {% with attr_required=True attr_disabled=False %}\n {% input_with_errors form.street_address1 %}\n {% endwith %}\n {% endwith %}\n {% endfor }\n\n There are a few edge cases to keep in mind:\n - a \"maxlength\" attribute will cause the input to use USWDS Character counter\n - the field's `use_fieldset` controls whether the output is label/field or\n fieldset/legend/field\n - checkbox label styling is different (this is handled, don't worry about it)\n \"\"\"\n context = context.flatten()\n context[\"field\"] = field\n\n # get any attributes specified in the field's definition\n attrs = dict(field.field.widget.attrs)\n\n # these will be converted to CSS strings\n classes = []\n label_classes = []\n legend_classes = []\n group_classes = []\n\n # this will be converted to an attribute string\n described_by = []\n\n if \"class\" in attrs:\n classes.append(attrs.pop(\"class\"))\n\n # parse context for field attributes and classes\n # ---\n # here we loop through all items in the context dictionary\n # (this is the context which was being used to render the\n # outer template in which this {% input_with_errors %} appeared!)\n # and look for \"magic\" keys -- these are used to modify the\n # appearance and behavior of the final HTML\n for key, value in context.items():\n if key.startswith(\"attr_\"):\n attr_name = re.sub(\"_\", \"-\", key[5:])\n attrs[attr_name] = value\n elif key.startswith(\"add_error_attr_\") and field.errors:\n attr_name = re.sub(\"_\", \"-\", key[15:])\n attrs[attr_name] = value\n\n elif key == \"add_class\":\n classes.append(value)\n elif key == \"add_required_class\" and field.required:\n classes.append(value)\n elif key == \"add_error_class\" and field.errors:\n classes.append(value)\n\n elif key == \"add_label_class\":\n label_classes.append(value)\n elif key == \"add_legend_class\":\n legend_classes.append(value)\n\n elif key == \"add_group_class\":\n group_classes.append(value)\n\n attrs[\"id\"] = field.auto_id\n\n # do some work for various edge cases\n\n if \"maxlength\" in attrs:\n # associate the field programmatically with its hint text\n described_by.append(f\"{attrs['id']}__message\")\n\n if field.use_fieldset:\n context[\"label_tag\"] = \"legend\"\n else:\n context[\"label_tag\"] = \"label\"\n\n if field.use_fieldset:\n label_classes.append(\"usa-legend\")\n\n if field.widget_type == \"checkbox\":\n label_classes.append(\"usa-checkbox__label\")\n elif not field.use_fieldset:\n label_classes.append(\"usa-label\")\n\n if field.errors:\n # associate the field programmatically with its error message\n message_div_id = f\"{attrs['id']}__error-message\"\n described_by.append(message_div_id)\n\n # set the field invalid\n # due to weirdness, this must be a string, not a boolean\n attrs[\"aria-invalid\"] = \"true\"\n\n # style the invalid field\n classes.append(\"usa-input--error\")\n label_classes.append(\"usa-label--error\")\n group_classes.append(\"usa-form-group--error\")\n\n # convert lists into strings\n\n if classes:\n context[\"classes\"] = \" \".join(classes)\n\n if label_classes:\n context[\"label_classes\"] = \" \".join(label_classes)\n\n if legend_classes:\n context[\"legend_classes\"] = \" \".join(legend_classes)\n\n if group_classes:\n context[\"group_classes\"] = \" \".join(group_classes)\n\n if described_by:\n # ensure we don't overwrite existing attribute value\n if \"aria-describedby\" in attrs:\n described_by.append(attrs[\"aria-describedby\"])\n attrs[\"aria-describedby\"] = \" \".join(described_by)\n\n # ask Django to give us the widget dict\n # see Widget.get_context() on\n # https://docs.djangoproject.com/en/4.1/ref/forms/widgets\n widget = field.field.widget.get_context(\n field.html_name, field.value() or field.initial, field.build_widget_attrs(attrs)\n ) # -> {\"widget\": {\"name\": ...}}\n\n context[\"widget\"] = widget[\"widget\"]\n\n return context\n", "path": "src/registrar/templatetags/field_helpers.py"}]} | 1,812 | 384 |
gh_patches_debug_56800 | rasdani/github-patches | git_diff | wright-group__WrightTools-522 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
hide fit functionality
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `WrightTools/__init__.py`
Content:
```
1 """WrightTools init."""
2 # flake8: noqa
3
4
5 # --- import --------------------------------------------------------------------------------------
6
7
8 import sys as _sys
9
10 from .__version__ import *
11 from . import artists
12 from . import collection
13 from . import data
14 from . import diagrams
15 from . import fit
16 from . import kit
17 from . import units
18 from . import exceptions
19
20 from ._open import *
21 from .collection._collection import *
22 from .data._data import *
23
24
25 # --- rcparams ------------------------------------------------------------------------------------
26
27
28 if int(_sys.version.split('.')[0]) > 2:
29 artists.apply_rcparams('fast')
30
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/WrightTools/__init__.py b/WrightTools/__init__.py
--- a/WrightTools/__init__.py
+++ b/WrightTools/__init__.py
@@ -12,7 +12,6 @@
from . import collection
from . import data
from . import diagrams
-from . import fit
from . import kit
from . import units
from . import exceptions
| {"golden_diff": "diff --git a/WrightTools/__init__.py b/WrightTools/__init__.py\n--- a/WrightTools/__init__.py\n+++ b/WrightTools/__init__.py\n@@ -12,7 +12,6 @@\n from . import collection\n from . import data\n from . import diagrams\n-from . import fit\n from . import kit\n from . import units\n from . import exceptions\n", "issue": "hide fit functionality\n\n", "before_files": [{"content": "\"\"\"WrightTools init.\"\"\"\n# flake8: noqa\n\n\n# --- import --------------------------------------------------------------------------------------\n\n\nimport sys as _sys\n\nfrom .__version__ import *\nfrom . import artists\nfrom . import collection\nfrom . import data\nfrom . import diagrams\nfrom . import fit\nfrom . import kit\nfrom . import units\nfrom . import exceptions\n\nfrom ._open import *\nfrom .collection._collection import *\nfrom .data._data import *\n\n\n# --- rcparams ------------------------------------------------------------------------------------\n\n\nif int(_sys.version.split('.')[0]) > 2:\n artists.apply_rcparams('fast')\n", "path": "WrightTools/__init__.py"}], "after_files": [{"content": "\"\"\"WrightTools init.\"\"\"\n# flake8: noqa\n\n\n# --- import --------------------------------------------------------------------------------------\n\n\nimport sys as _sys\n\nfrom .__version__ import *\nfrom . import artists\nfrom . import collection\nfrom . import data\nfrom . import diagrams\nfrom . import kit\nfrom . import units\nfrom . import exceptions\n\nfrom ._open import *\nfrom .collection._collection import *\nfrom .data._data import *\n\n\n# --- rcparams ------------------------------------------------------------------------------------\n\n\nif int(_sys.version.split('.')[0]) > 2:\n artists.apply_rcparams('fast')\n", "path": "WrightTools/__init__.py"}]} | 443 | 87 |
gh_patches_debug_7839 | rasdani/github-patches | git_diff | getsentry__sentry-25 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Limitations on SENTRY_KEY not documented
I assumed that SENTRY_KEY was just any random string that should be unique to a deployment and kept secret (ie, serving a similar purpose to django's SECRET_KEY) so I generated a random string to use there.
It turns out that it's used in a URL for the feeds:
```
url(r'^feeds/%s/messages.xml$' % KEY, MessageFeed(), name='sentry-feed-messages'),
```
(in sentry.urls)
Mine happened to have an '[' in it which caused a very confusing error about an unterminated regex.
It should be documented that SENTRY_KEY will make it into a URL (and further that django will try to then parse it as a regexp when trying to do reverse lookups) and thus can't have most punctuation.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sentry/urls.py`
Content:
```
1 import os
2
3 from django.conf.urls.defaults import *
4
5 from sentry.conf import KEY
6 from sentry import views
7 from sentry.feeds import MessageFeed, SummaryFeed
8
9 SENTRY_ROOT = os.path.dirname(__file__)
10
11 urlpatterns = patterns('',
12 url(r'^_media/(?P<path>.*)$', 'django.views.static.serve',
13 {'document_root': os.path.join(SENTRY_ROOT, 'media')}, name='sentry-media'),
14
15 # Feeds
16
17 url(r'^feeds/%s/messages.xml$' % KEY, MessageFeed(), name='sentry-feed-messages'),
18 url(r'^feeds/%s/summaries.xml$' % KEY, SummaryFeed(), name='sentry-feed-summaries'),
19
20 # JS and API
21
22 url(r'^jsapi/$', views.ajax_handler, name='sentry-ajax'),
23 url(r'^store/$', views.store, name='sentry-store'),
24
25 # Normal views
26
27 url(r'^login$', views.login, name='sentry-login'),
28 url(r'^logout$', views.logout, name='sentry-logout'),
29 url(r'^group/(\d+)$', views.group, name='sentry-group'),
30 url(r'^group/(\d+)/messages$', views.group_message_list, name='sentry-group-messages'),
31 url(r'^group/(\d+)/messages/(\d+)$', views.group_message_details, name='sentry-group-message'),
32 url(r'^group/(\d+)/actions/([\w_-]+)', views.group_plugin_action, name='sentry-group-plugin-action'),
33
34 url(r'^$', views.index, name='sentry'),
35 )
36
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/sentry/urls.py b/sentry/urls.py
--- a/sentry/urls.py
+++ b/sentry/urls.py
@@ -1,4 +1,5 @@
import os
+import re
from django.conf.urls.defaults import *
@@ -14,8 +15,8 @@
# Feeds
- url(r'^feeds/%s/messages.xml$' % KEY, MessageFeed(), name='sentry-feed-messages'),
- url(r'^feeds/%s/summaries.xml$' % KEY, SummaryFeed(), name='sentry-feed-summaries'),
+ url(r'^feeds/%s/messages.xml$' % re.escape(KEY), MessageFeed(), name='sentry-feed-messages'),
+ url(r'^feeds/%s/summaries.xml$' % re.escape(KEY), SummaryFeed(), name='sentry-feed-summaries'),
# JS and API
| {"golden_diff": "diff --git a/sentry/urls.py b/sentry/urls.py\n--- a/sentry/urls.py\n+++ b/sentry/urls.py\n@@ -1,4 +1,5 @@\n import os\n+import re\n \n from django.conf.urls.defaults import *\n \n@@ -14,8 +15,8 @@\n \n # Feeds\n \n- url(r'^feeds/%s/messages.xml$' % KEY, MessageFeed(), name='sentry-feed-messages'),\n- url(r'^feeds/%s/summaries.xml$' % KEY, SummaryFeed(), name='sentry-feed-summaries'),\n+ url(r'^feeds/%s/messages.xml$' % re.escape(KEY), MessageFeed(), name='sentry-feed-messages'),\n+ url(r'^feeds/%s/summaries.xml$' % re.escape(KEY), SummaryFeed(), name='sentry-feed-summaries'),\n \n # JS and API\n", "issue": "Limitations on SENTRY_KEY not documented\nI assumed that SENTRY_KEY was just any random string that should be unique to a deployment and kept secret (ie, serving a similar purpose to django's SECRET_KEY) so I generated a random string to use there. \n\nIt turns out that it's used in a URL for the feeds:\n\n```\nurl(r'^feeds/%s/messages.xml$' % KEY, MessageFeed(), name='sentry-feed-messages'),\n```\n\n(in sentry.urls)\n\nMine happened to have an '[' in it which caused a very confusing error about an unterminated regex. \n\nIt should be documented that SENTRY_KEY will make it into a URL (and further that django will try to then parse it as a regexp when trying to do reverse lookups) and thus can't have most punctuation. \n\n", "before_files": [{"content": "import os\n\nfrom django.conf.urls.defaults import *\n\nfrom sentry.conf import KEY\nfrom sentry import views\nfrom sentry.feeds import MessageFeed, SummaryFeed\n\nSENTRY_ROOT = os.path.dirname(__file__) \n\nurlpatterns = patterns('',\n url(r'^_media/(?P<path>.*)$', 'django.views.static.serve',\n {'document_root': os.path.join(SENTRY_ROOT, 'media')}, name='sentry-media'),\n\n # Feeds\n\n url(r'^feeds/%s/messages.xml$' % KEY, MessageFeed(), name='sentry-feed-messages'),\n url(r'^feeds/%s/summaries.xml$' % KEY, SummaryFeed(), name='sentry-feed-summaries'),\n\n # JS and API\n\n url(r'^jsapi/$', views.ajax_handler, name='sentry-ajax'),\n url(r'^store/$', views.store, name='sentry-store'),\n \n # Normal views\n\n url(r'^login$', views.login, name='sentry-login'),\n url(r'^logout$', views.logout, name='sentry-logout'),\n url(r'^group/(\\d+)$', views.group, name='sentry-group'),\n url(r'^group/(\\d+)/messages$', views.group_message_list, name='sentry-group-messages'),\n url(r'^group/(\\d+)/messages/(\\d+)$', views.group_message_details, name='sentry-group-message'),\n url(r'^group/(\\d+)/actions/([\\w_-]+)', views.group_plugin_action, name='sentry-group-plugin-action'),\n\n url(r'^$', views.index, name='sentry'),\n)\n", "path": "sentry/urls.py"}], "after_files": [{"content": "import os\nimport re\n\nfrom django.conf.urls.defaults import *\n\nfrom sentry.conf import KEY\nfrom sentry import views\nfrom sentry.feeds import MessageFeed, SummaryFeed\n\nSENTRY_ROOT = os.path.dirname(__file__) \n\nurlpatterns = patterns('',\n url(r'^_media/(?P<path>.*)$', 'django.views.static.serve',\n {'document_root': os.path.join(SENTRY_ROOT, 'media')}, name='sentry-media'),\n\n # Feeds\n\n url(r'^feeds/%s/messages.xml$' % re.escape(KEY), MessageFeed(), name='sentry-feed-messages'),\n url(r'^feeds/%s/summaries.xml$' % re.escape(KEY), SummaryFeed(), name='sentry-feed-summaries'),\n\n # JS and API\n\n url(r'^jsapi/$', views.ajax_handler, name='sentry-ajax'),\n url(r'^store/$', views.store, name='sentry-store'),\n \n # Normal views\n\n url(r'^login$', views.login, name='sentry-login'),\n url(r'^logout$', views.logout, name='sentry-logout'),\n url(r'^group/(\\d+)$', views.group, name='sentry-group'),\n url(r'^group/(\\d+)/messages$', views.group_message_list, name='sentry-group-messages'),\n url(r'^group/(\\d+)/messages/(\\d+)$', views.group_message_details, name='sentry-group-message'),\n url(r'^group/(\\d+)/urls$', views.group_urls, name='sentry-group-urls'),\n url(r'^group/(\\d+)/servers$', views.group_servers, name='sentry-group-servers'),\n url(r'^group/(\\d+)/actions/([\\w_-]+)', views.group_plugin_action, name='sentry-group-plugin-action'),\n\n url(r'^$', views.index, name='sentry'),\n)\n", "path": "sentry/urls.py"}]} | 836 | 194 |
gh_patches_debug_14750 | rasdani/github-patches | git_diff | Qiskit__qiskit-4721 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
circuit -> schedule raises exception
<!-- ⚠️ If you do not respect this template, your issue will be closed -->
<!-- ⚠️ Make sure to browse the opened and closed issues -->
### Information
- **Qiskit Terra version**: master
- **Python version**:
- **Operating system**:
### What is the current behavior?
```python
ghz = QuantumCircuit(5, 5)
ghz.h(0)
ghz.cx(range(4), range(1,5))
ghz.barrier()
ghz.measure(range(5), range(5))
sch = schedule(ghz, backend)
```
gives:
AttributeError: 'NoneType' object has no attribute 'instruction_schedule_map'
This works on older versions.
### Steps to reproduce the problem
### What is the expected behavior?
### Suggested solutions
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `qiskit/compiler/schedule.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 # This code is part of Qiskit.
4 #
5 # (C) Copyright IBM 2019.
6 #
7 # This code is licensed under the Apache License, Version 2.0. You may
8 # obtain a copy of this license in the LICENSE.txt file in the root directory
9 # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
10 #
11 # Any modifications or derivative works of this code must retain this
12 # copyright notice, and modified files need to carry a notice indicating
13 # that they have been altered from the originals.
14
15 """
16 Convenience entry point into pulse scheduling, requiring only a circuit and a backend. For more
17 control over pulse scheduling, look at `qiskit.scheduler.schedule_circuit`.
18 """
19 import logging
20
21 from time import time
22 from typing import List, Optional, Union
23
24 from qiskit.circuit.quantumcircuit import QuantumCircuit
25 from qiskit.exceptions import QiskitError
26 from qiskit.pulse import InstructionScheduleMap, Schedule
27 from qiskit.providers import BaseBackend
28 from qiskit.scheduler import ScheduleConfig
29 from qiskit.scheduler.schedule_circuit import schedule_circuit
30
31 LOG = logging.getLogger(__name__)
32
33
34 def _log_schedule_time(start_time, end_time):
35 log_msg = "Total Scheduling Time - %.5f (ms)" % ((end_time - start_time) * 1000)
36 LOG.info(log_msg)
37
38
39 def schedule(circuits: Union[QuantumCircuit, List[QuantumCircuit]],
40 backend: Optional[BaseBackend] = None,
41 inst_map: Optional[InstructionScheduleMap] = None,
42 meas_map: Optional[List[List[int]]] = None,
43 method: Optional[Union[str, List[str]]] = None) -> Union[Schedule, List[Schedule]]:
44 """
45 Schedule a circuit to a pulse ``Schedule``, using the backend, according to any specified
46 methods. Supported methods are documented in :py:mod:`qiskit.scheduler.schedule_circuit`.
47
48 Args:
49 circuits: The quantum circuit or circuits to translate
50 backend: A backend instance, which contains hardware-specific data required for scheduling
51 inst_map: Mapping of circuit operations to pulse schedules. If ``None``, defaults to the
52 ``backend``\'s ``instruction_schedule_map``
53 meas_map: List of sets of qubits that must be measured together. If ``None``, defaults to
54 the ``backend``\'s ``meas_map``
55 method: Optionally specify a particular scheduling method
56
57 Returns:
58 A pulse ``Schedule`` that implements the input circuit
59
60 Raises:
61 QiskitError: If ``inst_map`` and ``meas_map`` are not passed and ``backend`` is not passed
62 """
63 start_time = time()
64 if inst_map is None:
65 if backend is None:
66 raise QiskitError("Must supply either a backend or InstructionScheduleMap for "
67 "scheduling passes.")
68 inst_map = backend.defaults().instruction_schedule_map
69 if meas_map is None:
70 if backend is None:
71 raise QiskitError("Must supply either a backend or a meas_map for scheduling passes.")
72 meas_map = backend.configuration().meas_map
73
74 schedule_config = ScheduleConfig(inst_map=inst_map, meas_map=meas_map)
75 circuits = circuits if isinstance(circuits, list) else [circuits]
76 schedules = [schedule_circuit(circuit, schedule_config, method) for circuit in circuits]
77 end_time = time()
78 _log_schedule_time(start_time, end_time)
79 return schedules[0] if len(schedules) == 1 else schedules
80
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/qiskit/compiler/schedule.py b/qiskit/compiler/schedule.py
--- a/qiskit/compiler/schedule.py
+++ b/qiskit/compiler/schedule.py
@@ -65,7 +65,11 @@
if backend is None:
raise QiskitError("Must supply either a backend or InstructionScheduleMap for "
"scheduling passes.")
- inst_map = backend.defaults().instruction_schedule_map
+ defaults = backend.defaults()
+ if defaults is None:
+ raise QiskitError("The backend defaults are unavailable. The backend may not "
+ "support pulse.")
+ inst_map = defaults.instruction_schedule_map
if meas_map is None:
if backend is None:
raise QiskitError("Must supply either a backend or a meas_map for scheduling passes.")
| {"golden_diff": "diff --git a/qiskit/compiler/schedule.py b/qiskit/compiler/schedule.py\n--- a/qiskit/compiler/schedule.py\n+++ b/qiskit/compiler/schedule.py\n@@ -65,7 +65,11 @@\n if backend is None:\n raise QiskitError(\"Must supply either a backend or InstructionScheduleMap for \"\n \"scheduling passes.\")\n- inst_map = backend.defaults().instruction_schedule_map\n+ defaults = backend.defaults()\n+ if defaults is None:\n+ raise QiskitError(\"The backend defaults are unavailable. The backend may not \"\n+ \"support pulse.\")\n+ inst_map = defaults.instruction_schedule_map\n if meas_map is None:\n if backend is None:\n raise QiskitError(\"Must supply either a backend or a meas_map for scheduling passes.\")\n", "issue": "circuit -> schedule raises exception\n<!-- \u26a0\ufe0f If you do not respect this template, your issue will be closed -->\r\n<!-- \u26a0\ufe0f Make sure to browse the opened and closed issues -->\r\n\r\n### Information\r\n\r\n- **Qiskit Terra version**: master\r\n- **Python version**:\r\n- **Operating system**:\r\n\r\n### What is the current behavior?\r\n```python\r\nghz = QuantumCircuit(5, 5)\r\nghz.h(0)\r\nghz.cx(range(4), range(1,5))\r\nghz.barrier()\r\nghz.measure(range(5), range(5))\r\n\r\nsch = schedule(ghz, backend)\r\n```\r\n\r\ngives:\r\n\r\nAttributeError: 'NoneType' object has no attribute 'instruction_schedule_map'\r\n\r\nThis works on older versions.\r\n\r\n\r\n### Steps to reproduce the problem\r\n\r\n\r\n\r\n### What is the expected behavior?\r\n\r\n\r\n\r\n### Suggested solutions\r\n\r\n\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# This code is part of Qiskit.\n#\n# (C) Copyright IBM 2019.\n#\n# This code is licensed under the Apache License, Version 2.0. You may\n# obtain a copy of this license in the LICENSE.txt file in the root directory\n# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.\n#\n# Any modifications or derivative works of this code must retain this\n# copyright notice, and modified files need to carry a notice indicating\n# that they have been altered from the originals.\n\n\"\"\"\nConvenience entry point into pulse scheduling, requiring only a circuit and a backend. For more\ncontrol over pulse scheduling, look at `qiskit.scheduler.schedule_circuit`.\n\"\"\"\nimport logging\n\nfrom time import time\nfrom typing import List, Optional, Union\n\nfrom qiskit.circuit.quantumcircuit import QuantumCircuit\nfrom qiskit.exceptions import QiskitError\nfrom qiskit.pulse import InstructionScheduleMap, Schedule\nfrom qiskit.providers import BaseBackend\nfrom qiskit.scheduler import ScheduleConfig\nfrom qiskit.scheduler.schedule_circuit import schedule_circuit\n\nLOG = logging.getLogger(__name__)\n\n\ndef _log_schedule_time(start_time, end_time):\n log_msg = \"Total Scheduling Time - %.5f (ms)\" % ((end_time - start_time) * 1000)\n LOG.info(log_msg)\n\n\ndef schedule(circuits: Union[QuantumCircuit, List[QuantumCircuit]],\n backend: Optional[BaseBackend] = None,\n inst_map: Optional[InstructionScheduleMap] = None,\n meas_map: Optional[List[List[int]]] = None,\n method: Optional[Union[str, List[str]]] = None) -> Union[Schedule, List[Schedule]]:\n \"\"\"\n Schedule a circuit to a pulse ``Schedule``, using the backend, according to any specified\n methods. Supported methods are documented in :py:mod:`qiskit.scheduler.schedule_circuit`.\n\n Args:\n circuits: The quantum circuit or circuits to translate\n backend: A backend instance, which contains hardware-specific data required for scheduling\n inst_map: Mapping of circuit operations to pulse schedules. If ``None``, defaults to the\n ``backend``\\'s ``instruction_schedule_map``\n meas_map: List of sets of qubits that must be measured together. If ``None``, defaults to\n the ``backend``\\'s ``meas_map``\n method: Optionally specify a particular scheduling method\n\n Returns:\n A pulse ``Schedule`` that implements the input circuit\n\n Raises:\n QiskitError: If ``inst_map`` and ``meas_map`` are not passed and ``backend`` is not passed\n \"\"\"\n start_time = time()\n if inst_map is None:\n if backend is None:\n raise QiskitError(\"Must supply either a backend or InstructionScheduleMap for \"\n \"scheduling passes.\")\n inst_map = backend.defaults().instruction_schedule_map\n if meas_map is None:\n if backend is None:\n raise QiskitError(\"Must supply either a backend or a meas_map for scheduling passes.\")\n meas_map = backend.configuration().meas_map\n\n schedule_config = ScheduleConfig(inst_map=inst_map, meas_map=meas_map)\n circuits = circuits if isinstance(circuits, list) else [circuits]\n schedules = [schedule_circuit(circuit, schedule_config, method) for circuit in circuits]\n end_time = time()\n _log_schedule_time(start_time, end_time)\n return schedules[0] if len(schedules) == 1 else schedules\n", "path": "qiskit/compiler/schedule.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\n# This code is part of Qiskit.\n#\n# (C) Copyright IBM 2019.\n#\n# This code is licensed under the Apache License, Version 2.0. You may\n# obtain a copy of this license in the LICENSE.txt file in the root directory\n# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.\n#\n# Any modifications or derivative works of this code must retain this\n# copyright notice, and modified files need to carry a notice indicating\n# that they have been altered from the originals.\n\n\"\"\"\nConvenience entry point into pulse scheduling, requiring only a circuit and a backend. For more\ncontrol over pulse scheduling, look at `qiskit.scheduler.schedule_circuit`.\n\"\"\"\nimport logging\n\nfrom time import time\nfrom typing import List, Optional, Union\n\nfrom qiskit.circuit.quantumcircuit import QuantumCircuit\nfrom qiskit.exceptions import QiskitError\nfrom qiskit.pulse import InstructionScheduleMap, Schedule\nfrom qiskit.providers import BaseBackend\nfrom qiskit.scheduler import ScheduleConfig\nfrom qiskit.scheduler.schedule_circuit import schedule_circuit\n\nLOG = logging.getLogger(__name__)\n\n\ndef _log_schedule_time(start_time, end_time):\n log_msg = \"Total Scheduling Time - %.5f (ms)\" % ((end_time - start_time) * 1000)\n LOG.info(log_msg)\n\n\ndef schedule(circuits: Union[QuantumCircuit, List[QuantumCircuit]],\n backend: Optional[BaseBackend] = None,\n inst_map: Optional[InstructionScheduleMap] = None,\n meas_map: Optional[List[List[int]]] = None,\n method: Optional[Union[str, List[str]]] = None) -> Union[Schedule, List[Schedule]]:\n \"\"\"\n Schedule a circuit to a pulse ``Schedule``, using the backend, according to any specified\n methods. Supported methods are documented in :py:mod:`qiskit.scheduler.schedule_circuit`.\n\n Args:\n circuits: The quantum circuit or circuits to translate\n backend: A backend instance, which contains hardware-specific data required for scheduling\n inst_map: Mapping of circuit operations to pulse schedules. If ``None``, defaults to the\n ``backend``\\'s ``instruction_schedule_map``\n meas_map: List of sets of qubits that must be measured together. If ``None``, defaults to\n the ``backend``\\'s ``meas_map``\n method: Optionally specify a particular scheduling method\n\n Returns:\n A pulse ``Schedule`` that implements the input circuit\n\n Raises:\n QiskitError: If ``inst_map`` and ``meas_map`` are not passed and ``backend`` is not passed\n \"\"\"\n start_time = time()\n if inst_map is None:\n if backend is None:\n raise QiskitError(\"Must supply either a backend or InstructionScheduleMap for \"\n \"scheduling passes.\")\n defaults = backend.defaults()\n if defaults is None:\n raise QiskitError(\"The backend defaults are unavailable. The backend may not \"\n \"support pulse.\")\n inst_map = defaults.instruction_schedule_map\n if meas_map is None:\n if backend is None:\n raise QiskitError(\"Must supply either a backend or a meas_map for scheduling passes.\")\n meas_map = backend.configuration().meas_map\n\n schedule_config = ScheduleConfig(inst_map=inst_map, meas_map=meas_map)\n circuits = circuits if isinstance(circuits, list) else [circuits]\n schedules = [schedule_circuit(circuit, schedule_config, method) for circuit in circuits]\n end_time = time()\n _log_schedule_time(start_time, end_time)\n return schedules[0] if len(schedules) == 1 else schedules\n", "path": "qiskit/compiler/schedule.py"}]} | 1,375 | 178 |
gh_patches_debug_6245 | rasdani/github-patches | git_diff | ansible__ansible-lint-303 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
False Positive ANSIBLE0014 does not allow command:args:stdin
# Issue Type
- Bug report
# Ansible and Ansible Lint details
```
ansible --version
ansible 2.4.0.0
config file = /etc/ansible/ansible.cfg
configured module search path = [u'$HOME/.ansible/plugins/modules', u'/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python2.7/site-packages/ansible
executable location = /usr/bin/ansible
python version = 2.7.5 (default, Aug 2 2017, 11:05:32) [GCC 4.8.5 20150623 (Red Hat 4.8.5-16)]
ansible-lint --version
ansible-lint 3.4.17
```
- ansible installation method: OS package
- ansible-lint installation method: pip
# Desired Behaviour
The `stdin` argument to the `command` module should not trigger the "Environment variables don't work as part of command" error.
# Actual Behaviour (Bug report only)
The EnvVarsInCommandRule (ANSIBLE0014) linter rule rejects the following playbook:
```
- hosts: localhost
tasks:
- command: /bin/cat
args:
stdin: "Hello, world!"
```
due to the presence of the `stdin` attribute which was added in Ansible 2.4. This appears to be because `stdin` is missing from `expected_args`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lib/ansiblelint/rules/EnvVarsInCommandRule.py`
Content:
```
1 # Copyright (c) 2016 Will Thames <[email protected]>
2 #
3 # Permission is hereby granted, free of charge, to any person obtaining a copy
4 # of this software and associated documentation files (the "Software"), to deal
5 # in the Software without restriction, including without limitation the rights
6 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 # copies of the Software, and to permit persons to whom the Software is
8 # furnished to do so, subject to the following conditions:
9 #
10 # The above copyright notice and this permission notice shall be included in
11 # all copies or substantial portions of the Software.
12 #
13 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 # THE SOFTWARE.
20
21 from ansiblelint import AnsibleLintRule
22 from ansiblelint.utils import LINE_NUMBER_KEY, FILENAME_KEY
23
24
25 class EnvVarsInCommandRule(AnsibleLintRule):
26 id = 'ANSIBLE0014'
27 shortdesc = "Environment variables don't work as part of command"
28 description = 'Environment variables should be passed to shell or ' \
29 'command through environment argument'
30 tags = ['bug']
31
32 expected_args = ['chdir', 'creates', 'executable', 'removes', 'warn',
33 'cmd', '__ansible_module__', '__ansible_arguments__',
34 LINE_NUMBER_KEY, FILENAME_KEY]
35
36 def matchtask(self, file, task):
37 if task["action"]["__ansible_module__"] in ['shell', 'command']:
38 if 'cmd' in task['action']:
39 first_cmd_arg = task['action']['cmd'].split()[0]
40 else:
41 first_cmd_arg = task['action']['__ansible_arguments__'][0]
42 return any([arg not in self.expected_args for arg in task['action']] +
43 ["=" in first_cmd_arg])
44
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/lib/ansiblelint/rules/EnvVarsInCommandRule.py b/lib/ansiblelint/rules/EnvVarsInCommandRule.py
--- a/lib/ansiblelint/rules/EnvVarsInCommandRule.py
+++ b/lib/ansiblelint/rules/EnvVarsInCommandRule.py
@@ -29,7 +29,7 @@
'command through environment argument'
tags = ['bug']
- expected_args = ['chdir', 'creates', 'executable', 'removes', 'warn',
+ expected_args = ['chdir', 'creates', 'executable', 'removes', 'stdin', 'warn',
'cmd', '__ansible_module__', '__ansible_arguments__',
LINE_NUMBER_KEY, FILENAME_KEY]
| {"golden_diff": "diff --git a/lib/ansiblelint/rules/EnvVarsInCommandRule.py b/lib/ansiblelint/rules/EnvVarsInCommandRule.py\n--- a/lib/ansiblelint/rules/EnvVarsInCommandRule.py\n+++ b/lib/ansiblelint/rules/EnvVarsInCommandRule.py\n@@ -29,7 +29,7 @@\n 'command through environment argument'\n tags = ['bug']\n \n- expected_args = ['chdir', 'creates', 'executable', 'removes', 'warn',\n+ expected_args = ['chdir', 'creates', 'executable', 'removes', 'stdin', 'warn',\n 'cmd', '__ansible_module__', '__ansible_arguments__',\n LINE_NUMBER_KEY, FILENAME_KEY]\n", "issue": "False Positive ANSIBLE0014 does not allow command:args:stdin\n# Issue Type\r\n- Bug report\r\n\r\n# Ansible and Ansible Lint details\r\n\r\n```\r\nansible --version\r\nansible 2.4.0.0\r\n config file = /etc/ansible/ansible.cfg\r\n configured module search path = [u'$HOME/.ansible/plugins/modules', u'/usr/share/ansible/plugins/modules']\r\n ansible python module location = /usr/lib/python2.7/site-packages/ansible\r\n executable location = /usr/bin/ansible\r\n python version = 2.7.5 (default, Aug 2 2017, 11:05:32) [GCC 4.8.5 20150623 (Red Hat 4.8.5-16)]\r\n\r\nansible-lint --version\r\nansible-lint 3.4.17\r\n```\r\n\r\n- ansible installation method: OS package\r\n- ansible-lint installation method: pip\r\n\r\n# Desired Behaviour\r\n\r\nThe `stdin` argument to the `command` module should not trigger the \"Environment variables don't work as part of command\" error.\r\n\r\n# Actual Behaviour (Bug report only)\r\n\r\nThe EnvVarsInCommandRule (ANSIBLE0014) linter rule rejects the following playbook:\r\n\r\n```\r\n- hosts: localhost\r\n tasks:\r\n - command: /bin/cat\r\n args:\r\n stdin: \"Hello, world!\"\r\n```\r\n\r\ndue to the presence of the `stdin` attribute which was added in Ansible 2.4. This appears to be because `stdin` is missing from `expected_args`.\n", "before_files": [{"content": "# Copyright (c) 2016 Will Thames <[email protected]>\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n# THE SOFTWARE.\n\nfrom ansiblelint import AnsibleLintRule\nfrom ansiblelint.utils import LINE_NUMBER_KEY, FILENAME_KEY\n\n\nclass EnvVarsInCommandRule(AnsibleLintRule):\n id = 'ANSIBLE0014'\n shortdesc = \"Environment variables don't work as part of command\"\n description = 'Environment variables should be passed to shell or ' \\\n 'command through environment argument'\n tags = ['bug']\n\n expected_args = ['chdir', 'creates', 'executable', 'removes', 'warn',\n 'cmd', '__ansible_module__', '__ansible_arguments__',\n LINE_NUMBER_KEY, FILENAME_KEY]\n\n def matchtask(self, file, task):\n if task[\"action\"][\"__ansible_module__\"] in ['shell', 'command']:\n if 'cmd' in task['action']:\n first_cmd_arg = task['action']['cmd'].split()[0]\n else:\n first_cmd_arg = task['action']['__ansible_arguments__'][0]\n return any([arg not in self.expected_args for arg in task['action']] +\n [\"=\" in first_cmd_arg])\n", "path": "lib/ansiblelint/rules/EnvVarsInCommandRule.py"}], "after_files": [{"content": "# Copyright (c) 2016 Will Thames <[email protected]>\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n# THE SOFTWARE.\n\nfrom ansiblelint import AnsibleLintRule\nfrom ansiblelint.utils import LINE_NUMBER_KEY, FILENAME_KEY\n\n\nclass EnvVarsInCommandRule(AnsibleLintRule):\n id = 'ANSIBLE0014'\n shortdesc = \"Environment variables don't work as part of command\"\n description = 'Environment variables should be passed to shell or ' \\\n 'command through environment argument'\n tags = ['bug']\n\n expected_args = ['chdir', 'creates', 'executable', 'removes', 'stdin', 'warn',\n 'cmd', '__ansible_module__', '__ansible_arguments__',\n LINE_NUMBER_KEY, FILENAME_KEY]\n\n def matchtask(self, file, task):\n if task[\"action\"][\"__ansible_module__\"] in ['shell', 'command']:\n if 'cmd' in task['action']:\n first_cmd_arg = task['action']['cmd'].split()[0]\n else:\n first_cmd_arg = task['action']['__ansible_arguments__'][0]\n return any([arg not in self.expected_args for arg in task['action']] +\n [\"=\" in first_cmd_arg])\n", "path": "lib/ansiblelint/rules/EnvVarsInCommandRule.py"}]} | 1,164 | 155 |
gh_patches_debug_7106 | rasdani/github-patches | git_diff | CTFd__CTFd-1485 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Theme settings
There needs to be some way to change settings in themes themselves. People complain about a lot of nonsensical things that they should fix in their forks and not need to be a concern in master.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `CTFd/constants/config.py`
Content:
```
1 from CTFd.utils import get_config
2 from CTFd.utils.helpers import markup
3
4
5 class _ConfigsWrapper:
6 def __getattr__(self, attr):
7 return get_config(attr)
8
9 @property
10 def ctf_name(self):
11 return get_config("theme_header", default="CTFd")
12
13 @property
14 def theme_header(self):
15 return markup(get_config("theme_header", default=""))
16
17 @property
18 def theme_footer(self):
19 return markup(get_config("theme_footer", default=""))
20
21
22 Configs = _ConfigsWrapper()
23
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/CTFd/constants/config.py b/CTFd/constants/config.py
--- a/CTFd/constants/config.py
+++ b/CTFd/constants/config.py
@@ -1,3 +1,5 @@
+import json
+
from CTFd.utils import get_config
from CTFd.utils.helpers import markup
@@ -18,5 +20,9 @@
def theme_footer(self):
return markup(get_config("theme_footer", default=""))
+ @property
+ def theme_settings(self):
+ return json.loads(get_config("theme_settings", default="null"))
+
Configs = _ConfigsWrapper()
| {"golden_diff": "diff --git a/CTFd/constants/config.py b/CTFd/constants/config.py\n--- a/CTFd/constants/config.py\n+++ b/CTFd/constants/config.py\n@@ -1,3 +1,5 @@\n+import json\n+\n from CTFd.utils import get_config\n from CTFd.utils.helpers import markup\n \n@@ -18,5 +20,9 @@\n def theme_footer(self):\n return markup(get_config(\"theme_footer\", default=\"\"))\n \n+ @property\n+ def theme_settings(self):\n+ return json.loads(get_config(\"theme_settings\", default=\"null\"))\n+\n \n Configs = _ConfigsWrapper()\n", "issue": "Theme settings\nThere needs to be some way to change settings in themes themselves. People complain about a lot of nonsensical things that they should fix in their forks and not need to be a concern in master. \n", "before_files": [{"content": "from CTFd.utils import get_config\nfrom CTFd.utils.helpers import markup\n\n\nclass _ConfigsWrapper:\n def __getattr__(self, attr):\n return get_config(attr)\n\n @property\n def ctf_name(self):\n return get_config(\"theme_header\", default=\"CTFd\")\n\n @property\n def theme_header(self):\n return markup(get_config(\"theme_header\", default=\"\"))\n\n @property\n def theme_footer(self):\n return markup(get_config(\"theme_footer\", default=\"\"))\n\n\nConfigs = _ConfigsWrapper()\n", "path": "CTFd/constants/config.py"}], "after_files": [{"content": "import json\n\nfrom CTFd.utils import get_config\nfrom CTFd.utils.helpers import markup\n\n\nclass _ConfigsWrapper:\n def __getattr__(self, attr):\n return get_config(attr)\n\n @property\n def ctf_name(self):\n return get_config(\"theme_header\", default=\"CTFd\")\n\n @property\n def theme_header(self):\n return markup(get_config(\"theme_header\", default=\"\"))\n\n @property\n def theme_footer(self):\n return markup(get_config(\"theme_footer\", default=\"\"))\n\n @property\n def theme_settings(self):\n return json.loads(get_config(\"theme_settings\", default=\"null\"))\n\n\nConfigs = _ConfigsWrapper()\n", "path": "CTFd/constants/config.py"}]} | 460 | 136 |
gh_patches_debug_7342 | rasdani/github-patches | git_diff | Zeroto521__my-data-toolkit-765 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
MAINT: Add `sparse_output` for `OneHotEncoder` to compat with sklearn1.2
<!--
Thanks for contributing a pull request!
Please follow these standard acronyms to start the commit message:
- ENH: enhancement
- BUG: bug fix
- DOC: documentation
- TYP: type annotations
- TST: addition or modification of tests
- MAINT: maintenance commit (refactoring, typos, etc.)
- BLD: change related to building
- REL: related to releasing
- API: an (incompatible) API change
- DEP: deprecate something, or remove a deprecated object
- DEV: development tool or utility
- REV: revert an earlier commit
- PERF: performance improvement
- BOT: always commit via a bot
- CI: related to CI or CD
- CLN: Code cleanup
-->
- [x] closes https://github.com/Zeroto521/my-data-toolkit/actions/runs/3156312323/jobs/5135884971#step:5:960
- [x] whatsnew entry
In the latest (>= 1.1.2) sklearn version, `sparse` is replaced by `sparse_out`.
And `sparse` will be removed in 1.4
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `dtoolkit/transformer/sklearn/OneHotEncoder.py`
Content:
```
1 from __future__ import annotations
2
3 from textwrap import dedent
4 from typing import Literal
5 from typing import TYPE_CHECKING
6
7 import numpy as np
8 import pandas as pd
9 from pandas.util._decorators import doc
10 from sklearn.preprocessing import OneHotEncoder as SKOneHotEncoder
11
12 from dtoolkit._typing import TwoDimArray
13 from dtoolkit.accessor.dataframe import cols # noqa: F401
14 from dtoolkit.accessor.series import cols # noqa: F401, F811
15 from dtoolkit.transformer._compat import SKLEARN_GE_12
16
17
18 if TYPE_CHECKING:
19 from scipy.sparse import csr_matrix
20
21
22 class OneHotEncoder(SKOneHotEncoder):
23 """
24 Encode categorical features as a one-hot numeric array.
25
26 Parameters
27 ----------
28 categories_with_parent : bool, default False
29 Returned column would hook parent labels if ``True`` else
30 would be ``categories``.
31
32 sparse : bool, default False
33 Will return sparse matrix if ``True`` else will return an array.
34
35 Other parameters
36 See :obj:`sklearn.preprocessing.OneHotEncoder`.
37
38 Notes
39 -----
40 Different to :obj:`sklearn.preprocessing.OneHotEncoder`.
41 The result would return a :obj:`~pandas.DataFrame` which uses categories
42 as columns.
43
44 Examples
45 --------
46 Given a dataset with two features, we let the encoder find the unique
47 values per feature and transform the data to a binary one-hot encoding.
48
49 :obj:`~pandas.DataFrame` in, :obj:`~pandas.DataFrame` out with categories
50 as columns.
51
52 >>> from dtoolkit.transformer import OneHotEncoder
53 >>> import pandas as pd
54 >>> X = [['Male', 1], ['Female', 3], ['Female', 2]]
55 >>> df = pd.DataFrame(X, columns=['gender', 'number'])
56 >>> df
57 gender number
58 0 Male 1
59 1 Female 3
60 2 Female 2
61 >>> enc = OneHotEncoder()
62 >>> enc.fit_transform(df)
63 Female Male 1 2 3
64 0 0.0 1.0 1.0 0.0 0.0
65 1 1.0 0.0 0.0 0.0 1.0
66 2 1.0 0.0 0.0 1.0 0.0
67
68 The encoded data also could hook parent labels.
69
70 >>> enc = OneHotEncoder(categories_with_parent=True)
71 >>> enc.fit_transform(df)
72 gender_Female gender_Male number_1 number_2 number_3
73 0 0.0 1.0 1.0 0.0 0.0
74 1 1.0 0.0 0.0 0.0 1.0
75 2 1.0 0.0 0.0 1.0 0.0
76 """
77
78 @doc(SKOneHotEncoder.__init__)
79 def __init__(
80 self,
81 *,
82 sparse: bool = False,
83 sparse_output: bool = False,
84 categories_with_parent: bool = False,
85 categories="auto",
86 drop=None,
87 dtype=np.float64,
88 handle_unknown: Literal["error", "ignore", "infrequent_if_exist"] = "error",
89 min_frequency: int | float = None,
90 max_categories: int = None,
91 ):
92 # TODO: Remove `sparse` in sklearn 1.4.
93 # In the latest (>= 1.1.2) sklearn version, `sparse` is deprecated.
94 super().__init__(
95 categories=categories,
96 drop=drop,
97 dtype=dtype,
98 handle_unknown=handle_unknown,
99 min_frequency=min_frequency,
100 max_categories=max_categories,
101 **(
102 dict(sparse_output=sparse_output)
103 if SKLEARN_GE_12
104 else dict(sparse=sparse)
105 ),
106 )
107 self.categories_with_parent = categories_with_parent
108
109 # compat with sklearn lower version
110 # `_parameter_constraints` comes out at sklearn 1.2
111 # TODO: delete this condition when required sklearn version is >= 1.2
112 if hasattr(self, "_parameter_constraints"):
113 self._parameter_constraints["categories_with_parent"] = ["boolean"]
114
115 @doc(
116 SKOneHotEncoder.transform,
117 dedent(
118 """
119 Notes
120 -----
121 This would let :obj:`~pandas.DataFrame` out.
122 """,
123 ),
124 )
125 def transform(self, X: TwoDimArray) -> TwoDimArray | csr_matrix:
126 from itertools import chain
127
128 Xt = super().transform(X)
129
130 if self.sparse is False and isinstance(X, (pd.Series, pd.DataFrame)):
131 # NOTE: `get_feature_names_out` requires sklearn >= 1.0
132 categories = (
133 self.get_feature_names_out(X.cols(to_list=True))
134 if self.categories_with_parent
135 else chain.from_iterable(self.categories_)
136 )
137 return pd.DataFrame(Xt, columns=categories, index=X.index)
138
139 return Xt
140
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/dtoolkit/transformer/sklearn/OneHotEncoder.py b/dtoolkit/transformer/sklearn/OneHotEncoder.py
--- a/dtoolkit/transformer/sklearn/OneHotEncoder.py
+++ b/dtoolkit/transformer/sklearn/OneHotEncoder.py
@@ -106,6 +106,9 @@
)
self.categories_with_parent = categories_with_parent
+ # TODO: Remove the following line in sklearn 1.2.
+ self.sparse_output = sparse_output
+
# compat with sklearn lower version
# `_parameter_constraints` comes out at sklearn 1.2
# TODO: delete this condition when required sklearn version is >= 1.2
| {"golden_diff": "diff --git a/dtoolkit/transformer/sklearn/OneHotEncoder.py b/dtoolkit/transformer/sklearn/OneHotEncoder.py\n--- a/dtoolkit/transformer/sklearn/OneHotEncoder.py\n+++ b/dtoolkit/transformer/sklearn/OneHotEncoder.py\n@@ -106,6 +106,9 @@\n )\n self.categories_with_parent = categories_with_parent\n \n+ # TODO: Remove the following line in sklearn 1.2.\n+ self.sparse_output = sparse_output\n+\n # compat with sklearn lower version\n # `_parameter_constraints` comes out at sklearn 1.2\n # TODO: delete this condition when required sklearn version is >= 1.2\n", "issue": "MAINT: Add `sparse_output` for `OneHotEncoder` to compat with sklearn1.2\n<!--\r\nThanks for contributing a pull request!\r\n\r\nPlease follow these standard acronyms to start the commit message:\r\n\r\n- ENH: enhancement\r\n- BUG: bug fix\r\n- DOC: documentation\r\n- TYP: type annotations\r\n- TST: addition or modification of tests\r\n- MAINT: maintenance commit (refactoring, typos, etc.)\r\n- BLD: change related to building\r\n- REL: related to releasing\r\n- API: an (incompatible) API change\r\n- DEP: deprecate something, or remove a deprecated object\r\n- DEV: development tool or utility\r\n- REV: revert an earlier commit\r\n- PERF: performance improvement\r\n- BOT: always commit via a bot\r\n- CI: related to CI or CD\r\n- CLN: Code cleanup\r\n-->\r\n\r\n- [x] closes https://github.com/Zeroto521/my-data-toolkit/actions/runs/3156312323/jobs/5135884971#step:5:960\r\n- [x] whatsnew entry\r\n\r\nIn the latest (>= 1.1.2) sklearn version, `sparse` is replaced by `sparse_out`.\r\nAnd `sparse` will be removed in 1.4\n", "before_files": [{"content": "from __future__ import annotations\n\nfrom textwrap import dedent\nfrom typing import Literal\nfrom typing import TYPE_CHECKING\n\nimport numpy as np\nimport pandas as pd\nfrom pandas.util._decorators import doc\nfrom sklearn.preprocessing import OneHotEncoder as SKOneHotEncoder\n\nfrom dtoolkit._typing import TwoDimArray\nfrom dtoolkit.accessor.dataframe import cols # noqa: F401\nfrom dtoolkit.accessor.series import cols # noqa: F401, F811\nfrom dtoolkit.transformer._compat import SKLEARN_GE_12\n\n\nif TYPE_CHECKING:\n from scipy.sparse import csr_matrix\n\n\nclass OneHotEncoder(SKOneHotEncoder):\n \"\"\"\n Encode categorical features as a one-hot numeric array.\n\n Parameters\n ----------\n categories_with_parent : bool, default False\n Returned column would hook parent labels if ``True`` else\n would be ``categories``.\n\n sparse : bool, default False\n Will return sparse matrix if ``True`` else will return an array.\n\n Other parameters\n See :obj:`sklearn.preprocessing.OneHotEncoder`.\n\n Notes\n -----\n Different to :obj:`sklearn.preprocessing.OneHotEncoder`.\n The result would return a :obj:`~pandas.DataFrame` which uses categories\n as columns.\n\n Examples\n --------\n Given a dataset with two features, we let the encoder find the unique\n values per feature and transform the data to a binary one-hot encoding.\n\n :obj:`~pandas.DataFrame` in, :obj:`~pandas.DataFrame` out with categories\n as columns.\n\n >>> from dtoolkit.transformer import OneHotEncoder\n >>> import pandas as pd\n >>> X = [['Male', 1], ['Female', 3], ['Female', 2]]\n >>> df = pd.DataFrame(X, columns=['gender', 'number'])\n >>> df\n gender number\n 0 Male 1\n 1 Female 3\n 2 Female 2\n >>> enc = OneHotEncoder()\n >>> enc.fit_transform(df)\n Female Male 1 2 3\n 0 0.0 1.0 1.0 0.0 0.0\n 1 1.0 0.0 0.0 0.0 1.0\n 2 1.0 0.0 0.0 1.0 0.0\n\n The encoded data also could hook parent labels.\n\n >>> enc = OneHotEncoder(categories_with_parent=True)\n >>> enc.fit_transform(df)\n gender_Female gender_Male number_1 number_2 number_3\n 0 0.0 1.0 1.0 0.0 0.0\n 1 1.0 0.0 0.0 0.0 1.0\n 2 1.0 0.0 0.0 1.0 0.0\n \"\"\"\n\n @doc(SKOneHotEncoder.__init__)\n def __init__(\n self,\n *,\n sparse: bool = False,\n sparse_output: bool = False,\n categories_with_parent: bool = False,\n categories=\"auto\",\n drop=None,\n dtype=np.float64,\n handle_unknown: Literal[\"error\", \"ignore\", \"infrequent_if_exist\"] = \"error\",\n min_frequency: int | float = None,\n max_categories: int = None,\n ):\n # TODO: Remove `sparse` in sklearn 1.4.\n # In the latest (>= 1.1.2) sklearn version, `sparse` is deprecated.\n super().__init__(\n categories=categories,\n drop=drop,\n dtype=dtype,\n handle_unknown=handle_unknown,\n min_frequency=min_frequency,\n max_categories=max_categories,\n **(\n dict(sparse_output=sparse_output)\n if SKLEARN_GE_12\n else dict(sparse=sparse)\n ),\n )\n self.categories_with_parent = categories_with_parent\n\n # compat with sklearn lower version\n # `_parameter_constraints` comes out at sklearn 1.2\n # TODO: delete this condition when required sklearn version is >= 1.2\n if hasattr(self, \"_parameter_constraints\"):\n self._parameter_constraints[\"categories_with_parent\"] = [\"boolean\"]\n\n @doc(\n SKOneHotEncoder.transform,\n dedent(\n \"\"\"\n Notes\n -----\n This would let :obj:`~pandas.DataFrame` out.\n \"\"\",\n ),\n )\n def transform(self, X: TwoDimArray) -> TwoDimArray | csr_matrix:\n from itertools import chain\n\n Xt = super().transform(X)\n\n if self.sparse is False and isinstance(X, (pd.Series, pd.DataFrame)):\n # NOTE: `get_feature_names_out` requires sklearn >= 1.0\n categories = (\n self.get_feature_names_out(X.cols(to_list=True))\n if self.categories_with_parent\n else chain.from_iterable(self.categories_)\n )\n return pd.DataFrame(Xt, columns=categories, index=X.index)\n\n return Xt\n", "path": "dtoolkit/transformer/sklearn/OneHotEncoder.py"}], "after_files": [{"content": "from __future__ import annotations\n\nfrom textwrap import dedent\nfrom typing import Literal\nfrom typing import TYPE_CHECKING\n\nimport numpy as np\nimport pandas as pd\nfrom pandas.util._decorators import doc\nfrom sklearn.preprocessing import OneHotEncoder as SKOneHotEncoder\n\nfrom dtoolkit._typing import TwoDimArray\nfrom dtoolkit.accessor.dataframe import cols # noqa: F401\nfrom dtoolkit.accessor.series import cols # noqa: F401, F811\nfrom dtoolkit.transformer._compat import SKLEARN_GE_12\n\n\nif TYPE_CHECKING:\n from scipy.sparse import csr_matrix\n\n\nclass OneHotEncoder(SKOneHotEncoder):\n \"\"\"\n Encode categorical features as a one-hot numeric array.\n\n Parameters\n ----------\n categories_with_parent : bool, default False\n Returned column would hook parent labels if ``True`` else\n would be ``categories``.\n\n sparse : bool, default False\n Will return sparse matrix if ``True`` else will return an array.\n\n Other parameters\n See :obj:`sklearn.preprocessing.OneHotEncoder`.\n\n Notes\n -----\n Different to :obj:`sklearn.preprocessing.OneHotEncoder`.\n The result would return a :obj:`~pandas.DataFrame` which uses categories\n as columns.\n\n Examples\n --------\n Given a dataset with two features, we let the encoder find the unique\n values per feature and transform the data to a binary one-hot encoding.\n\n :obj:`~pandas.DataFrame` in, :obj:`~pandas.DataFrame` out with categories\n as columns.\n\n >>> from dtoolkit.transformer import OneHotEncoder\n >>> import pandas as pd\n >>> X = [['Male', 1], ['Female', 3], ['Female', 2]]\n >>> df = pd.DataFrame(X, columns=['gender', 'number'])\n >>> df\n gender number\n 0 Male 1\n 1 Female 3\n 2 Female 2\n >>> enc = OneHotEncoder()\n >>> enc.fit_transform(df)\n Female Male 1 2 3\n 0 0.0 1.0 1.0 0.0 0.0\n 1 1.0 0.0 0.0 0.0 1.0\n 2 1.0 0.0 0.0 1.0 0.0\n\n The encoded data also could hook parent labels.\n\n >>> enc = OneHotEncoder(categories_with_parent=True)\n >>> enc.fit_transform(df)\n gender_Female gender_Male number_1 number_2 number_3\n 0 0.0 1.0 1.0 0.0 0.0\n 1 1.0 0.0 0.0 0.0 1.0\n 2 1.0 0.0 0.0 1.0 0.0\n \"\"\"\n\n @doc(SKOneHotEncoder.__init__)\n def __init__(\n self,\n *,\n sparse: bool = False,\n sparse_output: bool = False,\n categories_with_parent: bool = False,\n categories=\"auto\",\n drop=None,\n dtype=np.float64,\n handle_unknown: Literal[\"error\", \"ignore\", \"infrequent_if_exist\"] = \"error\",\n min_frequency: int | float = None,\n max_categories: int = None,\n ):\n # TODO: Remove `sparse` in sklearn 1.4.\n # In the latest (>= 1.1.2) sklearn version, `sparse` is deprecated.\n super().__init__(\n categories=categories,\n drop=drop,\n dtype=dtype,\n handle_unknown=handle_unknown,\n min_frequency=min_frequency,\n max_categories=max_categories,\n **(\n dict(sparse_output=sparse_output)\n if SKLEARN_GE_12\n else dict(sparse=sparse)\n ),\n )\n self.categories_with_parent = categories_with_parent\n\n # TODO: Remove the following line in sklearn 1.2.\n self.sparse_output = sparse_output\n\n # compat with sklearn lower version\n # `_parameter_constraints` comes out at sklearn 1.2\n # TODO: delete this condition when required sklearn version is >= 1.2\n if hasattr(self, \"_parameter_constraints\"):\n self._parameter_constraints[\"categories_with_parent\"] = [\"boolean\"]\n\n @doc(\n SKOneHotEncoder.transform,\n dedent(\n \"\"\"\n Notes\n -----\n This would let :obj:`~pandas.DataFrame` out.\n \"\"\",\n ),\n )\n def transform(self, X: TwoDimArray) -> TwoDimArray | csr_matrix:\n from itertools import chain\n\n Xt = super().transform(X)\n\n if self.sparse is False and isinstance(X, (pd.Series, pd.DataFrame)):\n # NOTE: `get_feature_names_out` requires sklearn >= 1.0\n categories = (\n self.get_feature_names_out(X.cols(to_list=True))\n if self.categories_with_parent\n else chain.from_iterable(self.categories_)\n )\n return pd.DataFrame(Xt, columns=categories, index=X.index)\n\n return Xt\n", "path": "dtoolkit/transformer/sklearn/OneHotEncoder.py"}]} | 2,023 | 161 |
gh_patches_debug_3 | rasdani/github-patches | git_diff | plotly__dash-2553 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[BUG] Flask 2.2.3 dependency has HIGH security vulnerability (fixed in 2.2.5)
Issue #2538 pinned the upper bound of the Flask dependency to 2.2.3. However Flask 2.2.3 is affected by a HIGH security vulnerability that is fixed in Flask 2.2.5. See https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-30861
Debian 11, Python 3.11 (from Python official 3.11 Docker image)
```
# pip install dash
Collecting dash
Downloading dash-2.10.1-py3-none-any.whl (10.3 MB)
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 10.3/10.3 MB 14.1 MB/s eta 0:00:00
Collecting Flask<=2.2.3,>=1.0.4 (from dash)
Downloading Flask-2.2.3-py3-none-any.whl (101 kB)
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 101.8/101.8 kB 17.0 MB/s eta 0:00:00
```
```
dash 2.10.1
dash-core-components 2.0.0
dash-html-components 2.0.0
dash-table 5.0.0
```
**Describe the bug**
Dash installs a vulnerable version of Flask and dependency scans flag the vulnerability.
**Expected behavior**
No known and fixed security vulnerabilities added. Perhaps Pin to 2.2.* instead of specific 2.2.3 version where future pins will find new security issues.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `dash/version.py`
Content:
```
1 __version__ = "2.10.1"
2
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/dash/version.py b/dash/version.py
--- a/dash/version.py
+++ b/dash/version.py
@@ -1 +1 @@
-__version__ = "2.10.1"
+__version__ = "2.10.2"
| {"golden_diff": "diff --git a/dash/version.py b/dash/version.py\n--- a/dash/version.py\n+++ b/dash/version.py\n@@ -1 +1 @@\n-__version__ = \"2.10.1\"\n+__version__ = \"2.10.2\"\n", "issue": "[BUG] Flask 2.2.3 dependency has HIGH security vulnerability (fixed in 2.2.5)\nIssue #2538 pinned the upper bound of the Flask dependency to 2.2.3. However Flask 2.2.3 is affected by a HIGH security vulnerability that is fixed in Flask 2.2.5. See https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-30861\r\n\r\nDebian 11, Python 3.11 (from Python official 3.11 Docker image)\r\n```\r\n# pip install dash\r\nCollecting dash\r\n Downloading dash-2.10.1-py3-none-any.whl (10.3 MB)\r\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 10.3/10.3 MB 14.1 MB/s eta 0:00:00\r\nCollecting Flask<=2.2.3,>=1.0.4 (from dash)\r\n Downloading Flask-2.2.3-py3-none-any.whl (101 kB)\r\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 101.8/101.8 kB 17.0 MB/s eta 0:00:00\r\n```\r\n\r\n```\r\ndash 2.10.1\r\ndash-core-components 2.0.0\r\ndash-html-components 2.0.0\r\ndash-table 5.0.0\r\n```\r\n\r\n**Describe the bug**\r\n\r\nDash installs a vulnerable version of Flask and dependency scans flag the vulnerability.\r\n\r\n**Expected behavior**\r\n\r\nNo known and fixed security vulnerabilities added. Perhaps Pin to 2.2.* instead of specific 2.2.3 version where future pins will find new security issues.\r\n\r\n\n", "before_files": [{"content": "__version__ = \"2.10.1\"\n", "path": "dash/version.py"}], "after_files": [{"content": "__version__ = \"2.10.2\"\n", "path": "dash/version.py"}]} | 692 | 60 |
gh_patches_debug_41606 | rasdani/github-patches | git_diff | canonical__snapcraft-4622 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Support project hooks for core24 snaps
### What needs to get done
The `PackageService` for core24 snaps should support project hooks. The behavior should be the same as core22.
The failures can be found by running `spread google:ubuntu-24.04-64:tests/spread/general/hooks/`. See failing logs [here](https://paste.ubuntu.com/p/CjBwVKcwyR/).
### Why it needs to get done
To support building core24 snaps with craft-application
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `snapcraft/services/package.py`
Content:
```
1 # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
2 #
3 # Copyright 2023 Canonical Ltd.
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU General Public License version 3 as
7 # published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
17 """Snapcraft Package service."""
18
19 from __future__ import annotations
20
21 import os
22 import pathlib
23 import shutil
24 from typing import TYPE_CHECKING, cast
25
26 from craft_application import AppMetadata, PackageService
27 from overrides import override
28
29 from snapcraft import errors, linters, models, pack, utils
30 from snapcraft.linters import LinterStatus
31 from snapcraft.meta import snap_yaml
32 from snapcraft.services import Lifecycle
33 from snapcraft.utils import process_version
34
35 if TYPE_CHECKING:
36 from snapcraft.services import SnapcraftServiceFactory
37
38
39 class Package(PackageService):
40 """Package service subclass for Snapcraft."""
41
42 _project: models.Project
43
44 def __init__( # noqa: PLR0913 (Too many arguments)
45 self,
46 app: AppMetadata,
47 services: SnapcraftServiceFactory,
48 *,
49 project: models.Project,
50 snapcraft_yaml_path: pathlib.Path,
51 platform: str | None,
52 build_for: str,
53 ) -> None:
54 super().__init__(app, services, project=project)
55 self._platform = platform
56 self._build_for = build_for
57 self._snapcraft_yaml_path = snapcraft_yaml_path
58
59 @override
60 def pack(self, prime_dir: pathlib.Path, dest: pathlib.Path) -> list[pathlib.Path]:
61 """Create one or more packages as appropriate.
62
63 :param prime_dir: Path to the directory to pack.
64 :param dest: Directory into which to write the package(s).
65 :returns: A list of paths to created packages.
66 """
67 issues = linters.run_linters(prime_dir, lint=self._project.lint)
68 status = linters.report(issues, intermediate=True)
69
70 # In case of linter errors, stop execution and return the error code.
71 if status in (LinterStatus.ERRORS, LinterStatus.FATAL):
72 raise errors.LinterError("Linter errors found", exit_code=status)
73
74 return [
75 pathlib.Path(
76 pack.pack_snap(
77 prime_dir,
78 output=str(dest),
79 compression=self._project.compression,
80 name=self._project.name,
81 version=process_version(self._project.version),
82 target_arch=self._build_for,
83 )
84 )
85 ]
86
87 @override
88 def write_metadata(self, path: pathlib.Path) -> None:
89 """Write the project metadata to metadata.yaml in the given directory.
90
91 :param path: The path to the prime directory.
92 """
93 meta_dir = path / "meta"
94 meta_dir.mkdir(parents=True, exist_ok=True)
95 self.metadata.to_yaml_file(meta_dir / "snap.yaml")
96
97 enable_manifest = utils.strtobool(os.getenv("SNAPCRAFT_BUILD_INFO", "n"))
98
99 if enable_manifest:
100 snap_dir = path / "snap"
101 snap_dir.mkdir(parents=True, exist_ok=True)
102 lifecycle = cast(Lifecycle, self._services.lifecycle)
103 manifest = lifecycle.generate_manifest()
104 manifest.to_yaml_file(snap_dir / "manifest.yaml")
105
106 shutil.copy(self._snapcraft_yaml_path, snap_dir)
107
108 @property
109 def metadata(self) -> snap_yaml.SnapMetadata:
110 """Get the metadata model for this project."""
111 return snap_yaml.get_metadata_from_project(
112 self._project, self._services.lifecycle.prime_dir, arch=self._build_for
113 )
114
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/snapcraft/services/package.py b/snapcraft/services/package.py
--- a/snapcraft/services/package.py
+++ b/snapcraft/services/package.py
@@ -29,6 +29,7 @@
from snapcraft import errors, linters, models, pack, utils
from snapcraft.linters import LinterStatus
from snapcraft.meta import snap_yaml
+from snapcraft.parts.setup_assets import setup_assets
from snapcraft.services import Lifecycle
from snapcraft.utils import process_version
@@ -84,6 +85,23 @@
)
]
+ def _get_assets_dir(self) -> pathlib.Path:
+ """Return a snapcraft assets directory.
+
+ Asset directories can exist in:
+
+ - <PROJECT_ROOT>/snap
+ - <PROJECT_ROOT>/build-aux/snap
+ """
+ project_dir = self._services.lifecycle.project_info.project_dir
+ for asset_reldir in ("snap", "build-aux/snap"):
+ asset_dir = project_dir / asset_reldir
+ if asset_dir.exists():
+ return asset_dir
+
+ # This is for backwards compatibility with setup_assets(...)
+ return project_dir / "snap"
+
@override
def write_metadata(self, path: pathlib.Path) -> None:
"""Write the project metadata to metadata.yaml in the given directory.
@@ -105,9 +123,79 @@
shutil.copy(self._snapcraft_yaml_path, snap_dir)
+ assets_dir = self._get_assets_dir()
+ setup_assets(
+ self._project,
+ assets_dir=assets_dir,
+ project_dir=self._services.lifecycle.project_info.project_dir,
+ prime_dir=path,
+ meta_directory_handler=meta_directory_handler,
+ )
+
@property
def metadata(self) -> snap_yaml.SnapMetadata:
"""Get the metadata model for this project."""
return snap_yaml.get_metadata_from_project(
self._project, self._services.lifecycle.prime_dir, arch=self._build_for
)
+
+
+def _hardlink_or_copy(source: pathlib.Path, destination: pathlib.Path) -> bool:
+ """Try to hardlink and fallback to copy if it fails.
+
+ :param source: the source path.
+ :param destination: the destination path.
+ :returns: True if a hardlink was done or False for copy.
+ """
+ # Unlink the destination to avoid link failures
+ destination.unlink(missing_ok=True)
+
+ try:
+ destination.hardlink_to(source)
+ except OSError as os_error:
+ # Cross device link
+ if os_error.errno != 18:
+ raise
+ shutil.copy(source, destination)
+ return False
+
+ return True
+
+
+def meta_directory_handler(assets_dir: pathlib.Path, path: pathlib.Path):
+ """Handle hooks and gui assets from Snapcraft.
+
+ :param assets_dir: directory with project assets.
+ :param path: directory to write assets to.
+ """
+ meta_dir = path / "meta"
+ built_snap_hooks = path / "snap" / "hooks"
+ hooks_project_dir = assets_dir / "hooks"
+
+ hooks_meta_dir = meta_dir / "hooks"
+
+ if built_snap_hooks.is_dir():
+ hooks_meta_dir.mkdir(parents=True, exist_ok=True)
+ for hook in built_snap_hooks.iterdir():
+ meta_dir_hook = hooks_meta_dir / hook.name
+ # Remove to always refresh to the latest
+ meta_dir_hook.unlink(missing_ok=True)
+ meta_dir_hook.hardlink_to(hook)
+
+ # Overwrite any built hooks with project level ones
+ if hooks_project_dir.is_dir():
+ hooks_meta_dir.mkdir(parents=True, exist_ok=True)
+ for hook in hooks_project_dir.iterdir():
+ meta_dir_hook = hooks_meta_dir / hook.name
+
+ _hardlink_or_copy(hook, meta_dir_hook)
+
+ # Write any gui assets
+ gui_project_dir = assets_dir / "gui"
+ gui_meta_dir = meta_dir / "gui"
+ if gui_project_dir.is_dir():
+ gui_meta_dir.mkdir(parents=True, exist_ok=True)
+ for gui in gui_project_dir.iterdir():
+ meta_dir_gui = gui_meta_dir / gui.name
+
+ _hardlink_or_copy(gui, meta_dir_gui)
| {"golden_diff": "diff --git a/snapcraft/services/package.py b/snapcraft/services/package.py\n--- a/snapcraft/services/package.py\n+++ b/snapcraft/services/package.py\n@@ -29,6 +29,7 @@\n from snapcraft import errors, linters, models, pack, utils\n from snapcraft.linters import LinterStatus\n from snapcraft.meta import snap_yaml\n+from snapcraft.parts.setup_assets import setup_assets\n from snapcraft.services import Lifecycle\n from snapcraft.utils import process_version\n \n@@ -84,6 +85,23 @@\n )\n ]\n \n+ def _get_assets_dir(self) -> pathlib.Path:\n+ \"\"\"Return a snapcraft assets directory.\n+\n+ Asset directories can exist in:\n+\n+ - <PROJECT_ROOT>/snap\n+ - <PROJECT_ROOT>/build-aux/snap\n+ \"\"\"\n+ project_dir = self._services.lifecycle.project_info.project_dir\n+ for asset_reldir in (\"snap\", \"build-aux/snap\"):\n+ asset_dir = project_dir / asset_reldir\n+ if asset_dir.exists():\n+ return asset_dir\n+\n+ # This is for backwards compatibility with setup_assets(...)\n+ return project_dir / \"snap\"\n+\n @override\n def write_metadata(self, path: pathlib.Path) -> None:\n \"\"\"Write the project metadata to metadata.yaml in the given directory.\n@@ -105,9 +123,79 @@\n \n shutil.copy(self._snapcraft_yaml_path, snap_dir)\n \n+ assets_dir = self._get_assets_dir()\n+ setup_assets(\n+ self._project,\n+ assets_dir=assets_dir,\n+ project_dir=self._services.lifecycle.project_info.project_dir,\n+ prime_dir=path,\n+ meta_directory_handler=meta_directory_handler,\n+ )\n+\n @property\n def metadata(self) -> snap_yaml.SnapMetadata:\n \"\"\"Get the metadata model for this project.\"\"\"\n return snap_yaml.get_metadata_from_project(\n self._project, self._services.lifecycle.prime_dir, arch=self._build_for\n )\n+\n+\n+def _hardlink_or_copy(source: pathlib.Path, destination: pathlib.Path) -> bool:\n+ \"\"\"Try to hardlink and fallback to copy if it fails.\n+\n+ :param source: the source path.\n+ :param destination: the destination path.\n+ :returns: True if a hardlink was done or False for copy.\n+ \"\"\"\n+ # Unlink the destination to avoid link failures\n+ destination.unlink(missing_ok=True)\n+\n+ try:\n+ destination.hardlink_to(source)\n+ except OSError as os_error:\n+ # Cross device link\n+ if os_error.errno != 18:\n+ raise\n+ shutil.copy(source, destination)\n+ return False\n+\n+ return True\n+\n+\n+def meta_directory_handler(assets_dir: pathlib.Path, path: pathlib.Path):\n+ \"\"\"Handle hooks and gui assets from Snapcraft.\n+\n+ :param assets_dir: directory with project assets.\n+ :param path: directory to write assets to.\n+ \"\"\"\n+ meta_dir = path / \"meta\"\n+ built_snap_hooks = path / \"snap\" / \"hooks\"\n+ hooks_project_dir = assets_dir / \"hooks\"\n+\n+ hooks_meta_dir = meta_dir / \"hooks\"\n+\n+ if built_snap_hooks.is_dir():\n+ hooks_meta_dir.mkdir(parents=True, exist_ok=True)\n+ for hook in built_snap_hooks.iterdir():\n+ meta_dir_hook = hooks_meta_dir / hook.name\n+ # Remove to always refresh to the latest\n+ meta_dir_hook.unlink(missing_ok=True)\n+ meta_dir_hook.hardlink_to(hook)\n+\n+ # Overwrite any built hooks with project level ones\n+ if hooks_project_dir.is_dir():\n+ hooks_meta_dir.mkdir(parents=True, exist_ok=True)\n+ for hook in hooks_project_dir.iterdir():\n+ meta_dir_hook = hooks_meta_dir / hook.name\n+\n+ _hardlink_or_copy(hook, meta_dir_hook)\n+\n+ # Write any gui assets\n+ gui_project_dir = assets_dir / \"gui\"\n+ gui_meta_dir = meta_dir / \"gui\"\n+ if gui_project_dir.is_dir():\n+ gui_meta_dir.mkdir(parents=True, exist_ok=True)\n+ for gui in gui_project_dir.iterdir():\n+ meta_dir_gui = gui_meta_dir / gui.name\n+\n+ _hardlink_or_copy(gui, meta_dir_gui)\n", "issue": "Support project hooks for core24 snaps\n### What needs to get done\n\nThe `PackageService` for core24 snaps should support project hooks. The behavior should be the same as core22.\r\n\r\nThe failures can be found by running `spread google:ubuntu-24.04-64:tests/spread/general/hooks/`. See failing logs [here](https://paste.ubuntu.com/p/CjBwVKcwyR/).\n\n### Why it needs to get done\n\nTo support building core24 snaps with craft-application\n", "before_files": [{"content": "# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-\n#\n# Copyright 2023 Canonical Ltd.\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License version 3 as\n# published by the Free Software Foundation.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>.\n\n\"\"\"Snapcraft Package service.\"\"\"\n\nfrom __future__ import annotations\n\nimport os\nimport pathlib\nimport shutil\nfrom typing import TYPE_CHECKING, cast\n\nfrom craft_application import AppMetadata, PackageService\nfrom overrides import override\n\nfrom snapcraft import errors, linters, models, pack, utils\nfrom snapcraft.linters import LinterStatus\nfrom snapcraft.meta import snap_yaml\nfrom snapcraft.services import Lifecycle\nfrom snapcraft.utils import process_version\n\nif TYPE_CHECKING:\n from snapcraft.services import SnapcraftServiceFactory\n\n\nclass Package(PackageService):\n \"\"\"Package service subclass for Snapcraft.\"\"\"\n\n _project: models.Project\n\n def __init__( # noqa: PLR0913 (Too many arguments)\n self,\n app: AppMetadata,\n services: SnapcraftServiceFactory,\n *,\n project: models.Project,\n snapcraft_yaml_path: pathlib.Path,\n platform: str | None,\n build_for: str,\n ) -> None:\n super().__init__(app, services, project=project)\n self._platform = platform\n self._build_for = build_for\n self._snapcraft_yaml_path = snapcraft_yaml_path\n\n @override\n def pack(self, prime_dir: pathlib.Path, dest: pathlib.Path) -> list[pathlib.Path]:\n \"\"\"Create one or more packages as appropriate.\n\n :param prime_dir: Path to the directory to pack.\n :param dest: Directory into which to write the package(s).\n :returns: A list of paths to created packages.\n \"\"\"\n issues = linters.run_linters(prime_dir, lint=self._project.lint)\n status = linters.report(issues, intermediate=True)\n\n # In case of linter errors, stop execution and return the error code.\n if status in (LinterStatus.ERRORS, LinterStatus.FATAL):\n raise errors.LinterError(\"Linter errors found\", exit_code=status)\n\n return [\n pathlib.Path(\n pack.pack_snap(\n prime_dir,\n output=str(dest),\n compression=self._project.compression,\n name=self._project.name,\n version=process_version(self._project.version),\n target_arch=self._build_for,\n )\n )\n ]\n\n @override\n def write_metadata(self, path: pathlib.Path) -> None:\n \"\"\"Write the project metadata to metadata.yaml in the given directory.\n\n :param path: The path to the prime directory.\n \"\"\"\n meta_dir = path / \"meta\"\n meta_dir.mkdir(parents=True, exist_ok=True)\n self.metadata.to_yaml_file(meta_dir / \"snap.yaml\")\n\n enable_manifest = utils.strtobool(os.getenv(\"SNAPCRAFT_BUILD_INFO\", \"n\"))\n\n if enable_manifest:\n snap_dir = path / \"snap\"\n snap_dir.mkdir(parents=True, exist_ok=True)\n lifecycle = cast(Lifecycle, self._services.lifecycle)\n manifest = lifecycle.generate_manifest()\n manifest.to_yaml_file(snap_dir / \"manifest.yaml\")\n\n shutil.copy(self._snapcraft_yaml_path, snap_dir)\n\n @property\n def metadata(self) -> snap_yaml.SnapMetadata:\n \"\"\"Get the metadata model for this project.\"\"\"\n return snap_yaml.get_metadata_from_project(\n self._project, self._services.lifecycle.prime_dir, arch=self._build_for\n )\n", "path": "snapcraft/services/package.py"}], "after_files": [{"content": "# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-\n#\n# Copyright 2023 Canonical Ltd.\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License version 3 as\n# published by the Free Software Foundation.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>.\n\n\"\"\"Snapcraft Package service.\"\"\"\n\nfrom __future__ import annotations\n\nimport os\nimport pathlib\nimport shutil\nfrom typing import TYPE_CHECKING, cast\n\nfrom craft_application import AppMetadata, PackageService\nfrom overrides import override\n\nfrom snapcraft import errors, linters, models, pack, utils\nfrom snapcraft.linters import LinterStatus\nfrom snapcraft.meta import snap_yaml\nfrom snapcraft.parts.setup_assets import setup_assets\nfrom snapcraft.services import Lifecycle\nfrom snapcraft.utils import process_version\n\nif TYPE_CHECKING:\n from snapcraft.services import SnapcraftServiceFactory\n\n\nclass Package(PackageService):\n \"\"\"Package service subclass for Snapcraft.\"\"\"\n\n _project: models.Project\n\n def __init__( # noqa: PLR0913 (Too many arguments)\n self,\n app: AppMetadata,\n services: SnapcraftServiceFactory,\n *,\n project: models.Project,\n snapcraft_yaml_path: pathlib.Path,\n platform: str | None,\n build_for: str,\n ) -> None:\n super().__init__(app, services, project=project)\n self._platform = platform\n self._build_for = build_for\n self._snapcraft_yaml_path = snapcraft_yaml_path\n\n @override\n def pack(self, prime_dir: pathlib.Path, dest: pathlib.Path) -> list[pathlib.Path]:\n \"\"\"Create one or more packages as appropriate.\n\n :param prime_dir: Path to the directory to pack.\n :param dest: Directory into which to write the package(s).\n :returns: A list of paths to created packages.\n \"\"\"\n issues = linters.run_linters(prime_dir, lint=self._project.lint)\n status = linters.report(issues, intermediate=True)\n\n # In case of linter errors, stop execution and return the error code.\n if status in (LinterStatus.ERRORS, LinterStatus.FATAL):\n raise errors.LinterError(\"Linter errors found\", exit_code=status)\n\n return [\n pathlib.Path(\n pack.pack_snap(\n prime_dir,\n output=str(dest),\n compression=self._project.compression,\n name=self._project.name,\n version=process_version(self._project.version),\n target_arch=self._build_for,\n )\n )\n ]\n\n def _get_assets_dir(self) -> pathlib.Path:\n \"\"\"Return a snapcraft assets directory.\n\n Asset directories can exist in:\n\n - <PROJECT_ROOT>/snap\n - <PROJECT_ROOT>/build-aux/snap\n \"\"\"\n project_dir = self._services.lifecycle.project_info.project_dir\n for asset_reldir in (\"snap\", \"build-aux/snap\"):\n asset_dir = project_dir / asset_reldir\n if asset_dir.exists():\n return asset_dir\n\n # This is for backwards compatibility with setup_assets(...)\n return project_dir / \"snap\"\n\n @override\n def write_metadata(self, path: pathlib.Path) -> None:\n \"\"\"Write the project metadata to metadata.yaml in the given directory.\n\n :param path: The path to the prime directory.\n \"\"\"\n meta_dir = path / \"meta\"\n meta_dir.mkdir(parents=True, exist_ok=True)\n self.metadata.to_yaml_file(meta_dir / \"snap.yaml\")\n\n enable_manifest = utils.strtobool(os.getenv(\"SNAPCRAFT_BUILD_INFO\", \"n\"))\n\n if enable_manifest:\n snap_dir = path / \"snap\"\n snap_dir.mkdir(parents=True, exist_ok=True)\n lifecycle = cast(Lifecycle, self._services.lifecycle)\n manifest = lifecycle.generate_manifest()\n manifest.to_yaml_file(snap_dir / \"manifest.yaml\")\n\n shutil.copy(self._snapcraft_yaml_path, snap_dir)\n\n assets_dir = self._get_assets_dir()\n setup_assets(\n self._project,\n assets_dir=assets_dir,\n project_dir=self._services.lifecycle.project_info.project_dir,\n prime_dir=path,\n meta_directory_handler=meta_directory_handler,\n )\n\n @property\n def metadata(self) -> snap_yaml.SnapMetadata:\n \"\"\"Get the metadata model for this project.\"\"\"\n return snap_yaml.get_metadata_from_project(\n self._project, self._services.lifecycle.prime_dir, arch=self._build_for\n )\n\n\ndef _hardlink_or_copy(source: pathlib.Path, destination: pathlib.Path) -> bool:\n \"\"\"Try to hardlink and fallback to copy if it fails.\n\n :param source: the source path.\n :param destination: the destination path.\n :returns: True if a hardlink was done or False for copy.\n \"\"\"\n # Unlink the destination to avoid link failures\n destination.unlink(missing_ok=True)\n\n try:\n destination.hardlink_to(source)\n except OSError as os_error:\n # Cross device link\n if os_error.errno != 18:\n raise\n shutil.copy(source, destination)\n return False\n\n return True\n\n\ndef meta_directory_handler(assets_dir: pathlib.Path, path: pathlib.Path):\n \"\"\"Handle hooks and gui assets from Snapcraft.\n\n :param assets_dir: directory with project assets.\n :param path: directory to write assets to.\n \"\"\"\n meta_dir = path / \"meta\"\n built_snap_hooks = path / \"snap\" / \"hooks\"\n hooks_project_dir = assets_dir / \"hooks\"\n\n hooks_meta_dir = meta_dir / \"hooks\"\n\n if built_snap_hooks.is_dir():\n hooks_meta_dir.mkdir(parents=True, exist_ok=True)\n for hook in built_snap_hooks.iterdir():\n meta_dir_hook = hooks_meta_dir / hook.name\n # Remove to always refresh to the latest\n meta_dir_hook.unlink(missing_ok=True)\n meta_dir_hook.hardlink_to(hook)\n\n # Overwrite any built hooks with project level ones\n if hooks_project_dir.is_dir():\n hooks_meta_dir.mkdir(parents=True, exist_ok=True)\n for hook in hooks_project_dir.iterdir():\n meta_dir_hook = hooks_meta_dir / hook.name\n\n _hardlink_or_copy(hook, meta_dir_hook)\n\n # Write any gui assets\n gui_project_dir = assets_dir / \"gui\"\n gui_meta_dir = meta_dir / \"gui\"\n if gui_project_dir.is_dir():\n gui_meta_dir.mkdir(parents=True, exist_ok=True)\n for gui in gui_project_dir.iterdir():\n meta_dir_gui = gui_meta_dir / gui.name\n\n _hardlink_or_copy(gui, meta_dir_gui)\n", "path": "snapcraft/services/package.py"}]} | 1,472 | 969 |
gh_patches_debug_9672 | rasdani/github-patches | git_diff | svthalia__concrexit-2712 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Paparazcie committee members cannot edit promo requests
Members of the paparazcie cannot edit the promo requests in the back-end. I can, so it might be an issue with permissions.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `website/promotion/admin.py`
Content:
```
1 """Registers admin interfaces for the models defined in this module."""
2 from django.contrib import admin
3 from django.contrib.admin import ModelAdmin
4
5 from events.services import is_organiser
6 from promotion.forms import PromotionRequestForm
7
8 from .models import PromotionChannel, PromotionRequest
9
10
11 @admin.register(PromotionRequest)
12 class PromotionRequestAdmin(admin.ModelAdmin):
13 """This manages the admin interface for the model items."""
14
15 list_display = ("event", "publish_date", "channel", "assigned_to", "status")
16 list_filter = (
17 "publish_date",
18 "assigned_to",
19 "status",
20 )
21 date_hierarchy = "publish_date"
22 form = PromotionRequestForm
23 actions = ["mark_not_started", "mark_started", "mark_finished", "mark_published"]
24
25 def has_change_permission(self, request, obj=None):
26 if obj is not None and not is_organiser(request.member, obj.event):
27 return False
28 return super().has_change_permission(request, obj)
29
30 def mark_not_started(self, request, queryset):
31 """Change the status of the event to published."""
32 self._change_published(queryset, PromotionRequest.NOT_STARTED)
33
34 mark_not_started.short_description = "Mark requests as not started"
35
36 def mark_started(self, request, queryset):
37 """Change the status of the event to published."""
38 self._change_published(queryset, PromotionRequest.STARTED)
39
40 mark_started.short_description = "Mark requests as started"
41
42 def mark_finished(self, request, queryset):
43 """Change the status of the event to published."""
44 self._change_published(queryset, PromotionRequest.FINISHED)
45
46 mark_finished.short_description = "Mark requests as finished"
47
48 def mark_published(self, request, queryset):
49 """Change the status of the event to published."""
50 self._change_published(queryset, PromotionRequest.PUBLISHED)
51
52 mark_published.short_description = "Mark requests as published"
53
54 @staticmethod
55 def _change_published(queryset, status):
56 queryset.update(status=status)
57
58
59 @admin.register(PromotionChannel)
60 class PromotionChannelAdmin(ModelAdmin):
61 pass
62
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/website/promotion/admin.py b/website/promotion/admin.py
--- a/website/promotion/admin.py
+++ b/website/promotion/admin.py
@@ -23,8 +23,8 @@
actions = ["mark_not_started", "mark_started", "mark_finished", "mark_published"]
def has_change_permission(self, request, obj=None):
- if obj is not None and not is_organiser(request.member, obj.event):
- return False
+ if obj is not None and obj.event and is_organiser(request.member, obj.event):
+ return True
return super().has_change_permission(request, obj)
def mark_not_started(self, request, queryset):
| {"golden_diff": "diff --git a/website/promotion/admin.py b/website/promotion/admin.py\n--- a/website/promotion/admin.py\n+++ b/website/promotion/admin.py\n@@ -23,8 +23,8 @@\n actions = [\"mark_not_started\", \"mark_started\", \"mark_finished\", \"mark_published\"]\n \n def has_change_permission(self, request, obj=None):\n- if obj is not None and not is_organiser(request.member, obj.event):\n- return False\n+ if obj is not None and obj.event and is_organiser(request.member, obj.event):\n+ return True\n return super().has_change_permission(request, obj)\n \n def mark_not_started(self, request, queryset):\n", "issue": "Paparazcie committee members cannot edit promo requests\nMembers of the paparazcie cannot edit the promo requests in the back-end. I can, so it might be an issue with permissions. \n", "before_files": [{"content": "\"\"\"Registers admin interfaces for the models defined in this module.\"\"\"\nfrom django.contrib import admin\nfrom django.contrib.admin import ModelAdmin\n\nfrom events.services import is_organiser\nfrom promotion.forms import PromotionRequestForm\n\nfrom .models import PromotionChannel, PromotionRequest\n\n\[email protected](PromotionRequest)\nclass PromotionRequestAdmin(admin.ModelAdmin):\n \"\"\"This manages the admin interface for the model items.\"\"\"\n\n list_display = (\"event\", \"publish_date\", \"channel\", \"assigned_to\", \"status\")\n list_filter = (\n \"publish_date\",\n \"assigned_to\",\n \"status\",\n )\n date_hierarchy = \"publish_date\"\n form = PromotionRequestForm\n actions = [\"mark_not_started\", \"mark_started\", \"mark_finished\", \"mark_published\"]\n\n def has_change_permission(self, request, obj=None):\n if obj is not None and not is_organiser(request.member, obj.event):\n return False\n return super().has_change_permission(request, obj)\n\n def mark_not_started(self, request, queryset):\n \"\"\"Change the status of the event to published.\"\"\"\n self._change_published(queryset, PromotionRequest.NOT_STARTED)\n\n mark_not_started.short_description = \"Mark requests as not started\"\n\n def mark_started(self, request, queryset):\n \"\"\"Change the status of the event to published.\"\"\"\n self._change_published(queryset, PromotionRequest.STARTED)\n\n mark_started.short_description = \"Mark requests as started\"\n\n def mark_finished(self, request, queryset):\n \"\"\"Change the status of the event to published.\"\"\"\n self._change_published(queryset, PromotionRequest.FINISHED)\n\n mark_finished.short_description = \"Mark requests as finished\"\n\n def mark_published(self, request, queryset):\n \"\"\"Change the status of the event to published.\"\"\"\n self._change_published(queryset, PromotionRequest.PUBLISHED)\n\n mark_published.short_description = \"Mark requests as published\"\n\n @staticmethod\n def _change_published(queryset, status):\n queryset.update(status=status)\n\n\[email protected](PromotionChannel)\nclass PromotionChannelAdmin(ModelAdmin):\n pass\n", "path": "website/promotion/admin.py"}], "after_files": [{"content": "\"\"\"Registers admin interfaces for the models defined in this module.\"\"\"\nfrom django.contrib import admin\nfrom django.contrib.admin import ModelAdmin\n\nfrom events.services import is_organiser\nfrom promotion.forms import PromotionRequestForm\n\nfrom .models import PromotionChannel, PromotionRequest\n\n\[email protected](PromotionRequest)\nclass PromotionRequestAdmin(admin.ModelAdmin):\n \"\"\"This manages the admin interface for the model items.\"\"\"\n\n list_display = (\"event\", \"publish_date\", \"channel\", \"assigned_to\", \"status\")\n list_filter = (\n \"publish_date\",\n \"assigned_to\",\n \"status\",\n )\n date_hierarchy = \"publish_date\"\n form = PromotionRequestForm\n actions = [\"mark_not_started\", \"mark_started\", \"mark_finished\", \"mark_published\"]\n\n def has_change_permission(self, request, obj=None):\n if obj is not None and obj.event and is_organiser(request.member, obj.event):\n return True\n return super().has_change_permission(request, obj)\n\n def mark_not_started(self, request, queryset):\n \"\"\"Change the status of the event to published.\"\"\"\n self._change_published(queryset, PromotionRequest.NOT_STARTED)\n\n mark_not_started.short_description = \"Mark requests as not started\"\n\n def mark_started(self, request, queryset):\n \"\"\"Change the status of the event to published.\"\"\"\n self._change_published(queryset, PromotionRequest.STARTED)\n\n mark_started.short_description = \"Mark requests as started\"\n\n def mark_finished(self, request, queryset):\n \"\"\"Change the status of the event to published.\"\"\"\n self._change_published(queryset, PromotionRequest.FINISHED)\n\n mark_finished.short_description = \"Mark requests as finished\"\n\n def mark_published(self, request, queryset):\n \"\"\"Change the status of the event to published.\"\"\"\n self._change_published(queryset, PromotionRequest.PUBLISHED)\n\n mark_published.short_description = \"Mark requests as published\"\n\n @staticmethod\n def _change_published(queryset, status):\n queryset.update(status=status)\n\n\[email protected](PromotionChannel)\nclass PromotionChannelAdmin(ModelAdmin):\n pass\n", "path": "website/promotion/admin.py"}]} | 862 | 154 |
gh_patches_debug_35024 | rasdani/github-patches | git_diff | internetarchive__openlibrary-4013 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Sentry should include deployment SHA
Sentry has options for specifying the SHA of the current code, so you can see when an error was introduced. We currently don't take advantage of this.
### Describe the problem that you'd like solved
<!-- A clear and concise description of what you want to happen. -->
### Proposal & Constraints
<!-- What is the proposed solution / implementation? Is there a precedent of this approach succeeding elsewhere? -->
<!-- Which suggestions or requirements should be considered for how feature needs to appear or be implemented? -->
### Additional context
<!-- Add any other context or screenshots about the feature request here. -->
### Stakeholders
@cdrini
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `openlibrary/plugins/openlibrary/sentry.py`
Content:
```
1 import logging
2
3 import sentry_sdk
4
5 import infogami
6 from infogami.utils import delegate
7
8 logger = logging.getLogger("openlibrary.sentry")
9
10
11 def is_enabled():
12 return hasattr(infogami.config, 'sentry') and infogami.config.sentry.enabled
13
14
15 def setup():
16 logger.info("Setting up sentry (enabled={})".format(is_enabled()))
17
18 if not is_enabled():
19 return
20
21 sentry_sdk.init(dsn=infogami.config.sentry.dsn,
22 environment=infogami.config.sentry.environment)
23 delegate.add_exception_hook(lambda: sentry_sdk.capture_exception())
24
```
Path: `openlibrary/plugins/openlibrary/status.py`
Content:
```
1 import web
2
3 import datetime
4 import socket
5 import subprocess
6 import sys
7
8 from infogami import config
9 from infogami.utils import delegate
10 from infogami.utils.view import render_template, public
11 from openlibrary.core import stats
12
13 status_info = {}
14 feature_flags = {}
15
16 class status(delegate.page):
17 def GET(self):
18 template = render_template("status", status_info, feature_flags)
19 template.v2 = True
20 return template
21
22 @public
23 def get_git_revision_short_hash():
24 return (status_info.get('Software version')
25 if status_info and isinstance(status_info, dict)
26 else None)
27
28 def get_software_version():
29 return subprocess.Popen("git rev-parse --short HEAD --".split(), stdout = subprocess.PIPE, stderr = subprocess.STDOUT).stdout.read().strip()
30
31 def get_features_enabled():
32 return config.features
33
34 def setup():
35 "Basic startup status for the server"
36 global status_info, feature_flags
37 version = get_software_version()
38 if bytes != str: # Python 3
39 version = version.decode("utf-8")
40 host = socket.gethostname()
41 status_info = {
42 "Software version": version,
43 "Python version": sys.version.split()[0],
44 "Host": host,
45 "Start time": datetime.datetime.utcnow(),
46 }
47 feature_flags = get_features_enabled()
48
49 # Host is e.g. ol-web4.blah.archive.org ; we just want the first subdomain
50 first_subdomain = host.split('.')[0] or 'unknown'
51 stats.increment('ol.servers.%s.started' % first_subdomain)
52
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/openlibrary/plugins/openlibrary/sentry.py b/openlibrary/plugins/openlibrary/sentry.py
--- a/openlibrary/plugins/openlibrary/sentry.py
+++ b/openlibrary/plugins/openlibrary/sentry.py
@@ -5,6 +5,8 @@
import infogami
from infogami.utils import delegate
+from openlibrary.plugins.openlibrary.status import get_software_version
+
logger = logging.getLogger("openlibrary.sentry")
@@ -19,5 +21,6 @@
return
sentry_sdk.init(dsn=infogami.config.sentry.dsn,
- environment=infogami.config.sentry.environment)
+ environment=infogami.config.sentry.environment,
+ release=get_software_version())
delegate.add_exception_hook(lambda: sentry_sdk.capture_exception())
diff --git a/openlibrary/plugins/openlibrary/status.py b/openlibrary/plugins/openlibrary/status.py
--- a/openlibrary/plugins/openlibrary/status.py
+++ b/openlibrary/plugins/openlibrary/status.py
@@ -2,8 +2,8 @@
import datetime
import socket
-import subprocess
import sys
+from subprocess import PIPE, Popen, STDOUT
from infogami import config
from infogami.utils import delegate
@@ -25,8 +25,10 @@
if status_info and isinstance(status_info, dict)
else None)
-def get_software_version():
- return subprocess.Popen("git rev-parse --short HEAD --".split(), stdout = subprocess.PIPE, stderr = subprocess.STDOUT).stdout.read().strip()
+
+def get_software_version(): # -> str:
+ cmd = "git rev-parse --short HEAD --".split()
+ return str(Popen(cmd, stdout=PIPE, stderr=STDOUT).stdout.read().decode().strip())
def get_features_enabled():
return config.features
@@ -34,12 +36,9 @@
def setup():
"Basic startup status for the server"
global status_info, feature_flags
- version = get_software_version()
- if bytes != str: # Python 3
- version = version.decode("utf-8")
host = socket.gethostname()
status_info = {
- "Software version": version,
+ "Software version": get_software_version(),
"Python version": sys.version.split()[0],
"Host": host,
"Start time": datetime.datetime.utcnow(),
| {"golden_diff": "diff --git a/openlibrary/plugins/openlibrary/sentry.py b/openlibrary/plugins/openlibrary/sentry.py\n--- a/openlibrary/plugins/openlibrary/sentry.py\n+++ b/openlibrary/plugins/openlibrary/sentry.py\n@@ -5,6 +5,8 @@\n import infogami\n from infogami.utils import delegate\n \n+from openlibrary.plugins.openlibrary.status import get_software_version\n+\n logger = logging.getLogger(\"openlibrary.sentry\")\n \n \n@@ -19,5 +21,6 @@\n return\n \n sentry_sdk.init(dsn=infogami.config.sentry.dsn,\n- environment=infogami.config.sentry.environment)\n+ environment=infogami.config.sentry.environment,\n+ release=get_software_version())\n delegate.add_exception_hook(lambda: sentry_sdk.capture_exception())\ndiff --git a/openlibrary/plugins/openlibrary/status.py b/openlibrary/plugins/openlibrary/status.py\n--- a/openlibrary/plugins/openlibrary/status.py\n+++ b/openlibrary/plugins/openlibrary/status.py\n@@ -2,8 +2,8 @@\n \n import datetime\n import socket\n-import subprocess\n import sys\n+from subprocess import PIPE, Popen, STDOUT\n \n from infogami import config\n from infogami.utils import delegate\n@@ -25,8 +25,10 @@\n if status_info and isinstance(status_info, dict) \n else None)\n \n-def get_software_version():\n- return subprocess.Popen(\"git rev-parse --short HEAD --\".split(), stdout = subprocess.PIPE, stderr = subprocess.STDOUT).stdout.read().strip()\n+\n+def get_software_version(): # -> str:\n+ cmd = \"git rev-parse --short HEAD --\".split()\n+ return str(Popen(cmd, stdout=PIPE, stderr=STDOUT).stdout.read().decode().strip())\n \n def get_features_enabled():\n return config.features\n@@ -34,12 +36,9 @@\n def setup():\n \"Basic startup status for the server\"\n global status_info, feature_flags\n- version = get_software_version()\n- if bytes != str: # Python 3\n- version = version.decode(\"utf-8\")\n host = socket.gethostname()\n status_info = {\n- \"Software version\": version,\n+ \"Software version\": get_software_version(),\n \"Python version\": sys.version.split()[0],\n \"Host\": host,\n \"Start time\": datetime.datetime.utcnow(),\n", "issue": "Sentry should include deployment SHA\nSentry has options for specifying the SHA of the current code, so you can see when an error was introduced. We currently don't take advantage of this.\r\n\r\n### Describe the problem that you'd like solved\r\n<!-- A clear and concise description of what you want to happen. -->\r\n\r\n### Proposal & Constraints\r\n<!-- What is the proposed solution / implementation? Is there a precedent of this approach succeeding elsewhere? -->\r\n\r\n<!-- Which suggestions or requirements should be considered for how feature needs to appear or be implemented? -->\r\n\r\n### Additional context\r\n<!-- Add any other context or screenshots about the feature request here. -->\r\n\r\n### Stakeholders\r\n@cdrini \r\n\r\n\r\n\n", "before_files": [{"content": "import logging\n\nimport sentry_sdk\n\nimport infogami\nfrom infogami.utils import delegate\n\nlogger = logging.getLogger(\"openlibrary.sentry\")\n\n\ndef is_enabled():\n return hasattr(infogami.config, 'sentry') and infogami.config.sentry.enabled\n\n\ndef setup():\n logger.info(\"Setting up sentry (enabled={})\".format(is_enabled()))\n\n if not is_enabled():\n return\n\n sentry_sdk.init(dsn=infogami.config.sentry.dsn,\n environment=infogami.config.sentry.environment)\n delegate.add_exception_hook(lambda: sentry_sdk.capture_exception())\n", "path": "openlibrary/plugins/openlibrary/sentry.py"}, {"content": "import web\n\nimport datetime\nimport socket\nimport subprocess\nimport sys\n\nfrom infogami import config\nfrom infogami.utils import delegate\nfrom infogami.utils.view import render_template, public\nfrom openlibrary.core import stats\n\nstatus_info = {}\nfeature_flags = {}\n\nclass status(delegate.page):\n def GET(self):\n template = render_template(\"status\", status_info, feature_flags)\n template.v2 = True\n return template\n\n@public\ndef get_git_revision_short_hash():\n return (status_info.get('Software version')\n if status_info and isinstance(status_info, dict) \n else None)\n\ndef get_software_version():\n return subprocess.Popen(\"git rev-parse --short HEAD --\".split(), stdout = subprocess.PIPE, stderr = subprocess.STDOUT).stdout.read().strip()\n\ndef get_features_enabled():\n return config.features\n\ndef setup():\n \"Basic startup status for the server\"\n global status_info, feature_flags\n version = get_software_version()\n if bytes != str: # Python 3\n version = version.decode(\"utf-8\")\n host = socket.gethostname()\n status_info = {\n \"Software version\": version,\n \"Python version\": sys.version.split()[0],\n \"Host\": host,\n \"Start time\": datetime.datetime.utcnow(),\n }\n feature_flags = get_features_enabled()\n\n # Host is e.g. ol-web4.blah.archive.org ; we just want the first subdomain\n first_subdomain = host.split('.')[0] or 'unknown'\n stats.increment('ol.servers.%s.started' % first_subdomain)\n", "path": "openlibrary/plugins/openlibrary/status.py"}], "after_files": [{"content": "import logging\n\nimport sentry_sdk\n\nimport infogami\nfrom infogami.utils import delegate\n\nfrom openlibrary.plugins.openlibrary.status import get_software_version\n\nlogger = logging.getLogger(\"openlibrary.sentry\")\n\n\ndef is_enabled():\n return hasattr(infogami.config, 'sentry') and infogami.config.sentry.enabled\n\n\ndef setup():\n logger.info(\"Setting up sentry (enabled={})\".format(is_enabled()))\n\n if not is_enabled():\n return\n\n sentry_sdk.init(dsn=infogami.config.sentry.dsn,\n environment=infogami.config.sentry.environment,\n release=get_software_version())\n delegate.add_exception_hook(lambda: sentry_sdk.capture_exception())\n", "path": "openlibrary/plugins/openlibrary/sentry.py"}, {"content": "import web\n\nimport datetime\nimport socket\nimport sys\nfrom subprocess import PIPE, Popen, STDOUT\n\nfrom infogami import config\nfrom infogami.utils import delegate\nfrom infogami.utils.view import render_template, public\nfrom openlibrary.core import stats\n\nstatus_info = {}\nfeature_flags = {}\n\nclass status(delegate.page):\n def GET(self):\n template = render_template(\"status\", status_info, feature_flags)\n template.v2 = True\n return template\n\n@public\ndef get_git_revision_short_hash():\n return (status_info.get('Software version')\n if status_info and isinstance(status_info, dict) \n else None)\n\n\ndef get_software_version(): # -> str:\n cmd = \"git rev-parse --short HEAD --\".split()\n return str(Popen(cmd, stdout=PIPE, stderr=STDOUT).stdout.read().decode().strip())\n\ndef get_features_enabled():\n return config.features\n\ndef setup():\n \"Basic startup status for the server\"\n global status_info, feature_flags\n host = socket.gethostname()\n status_info = {\n \"Software version\": get_software_version(),\n \"Python version\": sys.version.split()[0],\n \"Host\": host,\n \"Start time\": datetime.datetime.utcnow(),\n }\n feature_flags = get_features_enabled()\n\n # Host is e.g. ol-web4.blah.archive.org ; we just want the first subdomain\n first_subdomain = host.split('.')[0] or 'unknown'\n stats.increment('ol.servers.%s.started' % first_subdomain)\n", "path": "openlibrary/plugins/openlibrary/status.py"}]} | 1,027 | 513 |
gh_patches_debug_5772 | rasdani/github-patches | git_diff | liqd__a4-meinberlin-471 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
link to profile breaks if space in username
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `apps/embed/middleware.py`
Content:
```
1 class AjaxPathMiddleware(object):
2 """Append request path as a header.
3
4 In an ajax request, redirects are handled implicitly, so it it not possible
5 to know the path of the page where you end up. This middleware adds that
6 information in a header.
7 """
8
9 def process_response(self, request, response):
10 response['x-ajax-path'] = request.path
11 return response
12
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/apps/embed/middleware.py b/apps/embed/middleware.py
--- a/apps/embed/middleware.py
+++ b/apps/embed/middleware.py
@@ -1,3 +1,6 @@
+from django.utils.http import urlquote
+
+
class AjaxPathMiddleware(object):
"""Append request path as a header.
@@ -7,5 +10,5 @@
"""
def process_response(self, request, response):
- response['x-ajax-path'] = request.path
+ response['x-ajax-path'] = urlquote(request.path)
return response
| {"golden_diff": "diff --git a/apps/embed/middleware.py b/apps/embed/middleware.py\n--- a/apps/embed/middleware.py\n+++ b/apps/embed/middleware.py\n@@ -1,3 +1,6 @@\n+from django.utils.http import urlquote\n+\n+\n class AjaxPathMiddleware(object):\n \"\"\"Append request path as a header.\n \n@@ -7,5 +10,5 @@\n \"\"\"\n \n def process_response(self, request, response):\n- response['x-ajax-path'] = request.path\n+ response['x-ajax-path'] = urlquote(request.path)\n return response\n", "issue": "link to profile breaks if space in username\n\n", "before_files": [{"content": "class AjaxPathMiddleware(object):\n \"\"\"Append request path as a header.\n\n In an ajax request, redirects are handled implicitly, so it it not possible\n to know the path of the page where you end up. This middleware adds that\n information in a header.\n \"\"\"\n\n def process_response(self, request, response):\n response['x-ajax-path'] = request.path\n return response\n", "path": "apps/embed/middleware.py"}], "after_files": [{"content": "from django.utils.http import urlquote\n\n\nclass AjaxPathMiddleware(object):\n \"\"\"Append request path as a header.\n\n In an ajax request, redirects are handled implicitly, so it it not possible\n to know the path of the page where you end up. This middleware adds that\n information in a header.\n \"\"\"\n\n def process_response(self, request, response):\n response['x-ajax-path'] = urlquote(request.path)\n return response\n", "path": "apps/embed/middleware.py"}]} | 368 | 123 |
gh_patches_debug_11574 | rasdani/github-patches | git_diff | sunpy__sunpy-5293 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Provide an example of splitting sections of an attr query out of the Fido.search method.
@Cadair's had this snippet of code
``` python
import datetime
from sunpy.net import vso
from sunpy.time import parse_time
# Start time and end time for the AIA search
start = parse_time('2014/07/17T10:01:30')
stop = start + datetime.timedelta(seconds=12)
stop_hmi = start + datetime.timedelta(seconds=30)
# Define two VSO Searches for the AIA data and the HMI data
search_aia = (vso.attrs.Time(start, stop), vso.attrs.Instrument('AIA'))
search_hmi = (vso.attrs.Time(start, stop_hmi), vso.attrs.Instrument('HMI'),
vso.attrs.Physobs('LOS_magnetic_field'))
# Create the VSO Client
vsoClient = vso.VSOClient()
# Query VSO for both searches using the or operator `|`
results = vsoClient.query(search_aia | search_hmi)
```
That used to work but now I get this error.
``` python
TypeError: unsupported operand type(s) for |: 'tuple' and 'tuple'
```
Should this operation be possible?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/acquiring_data/searching_vso.py`
Content:
```
1 """
2 ======================================
3 Searching and downloading from the VSO
4 ======================================
5
6 How to download data from the VSO with Fido.
7 """
8 import astropy.units as u
9
10 from sunpy.net import Fido
11 from sunpy.net import attrs as a
12
13 ###############################################################################
14 # `sunpy.net.Fido` is the primary interface to search for and download data and
15 # will search the VSO when appropriate. The following example searches for all
16 # SOHO/EIT images between the times defined below by defining a
17 # timerange (`~sunpy.net.attrs.Time`) and the instrument (`~sunpy.net.attrs.Instrument`).
18
19 attrs_time = a.Time('2005/01/01 00:10', '2005/01/01 00:15')
20 result = Fido.search(attrs_time, a.Instrument.eit)
21
22 ###############################################################################
23 # Let's inspect the results.
24
25 print(result)
26
27 ###############################################################################
28 # The following shows how to download the results. If we
29 # don't provide a path it will download the file into the sunpy data directory.
30 # The output provides the path of the downloaded files.
31
32 downloaded_files = Fido.fetch(result)
33 print(downloaded_files)
34
35 ###############################################################################
36 # More complicated queries can be constructed by using relational operators.
37 # For example, it is possible to query two wavelengths at the same time with
38 # the OR operator (|).
39
40 result = Fido.search(a.Time('2020/03/04 00:00', '2020/03/04 00:02'),
41 a.Instrument.aia,
42 a.Wavelength(171*u.angstrom) | a.Wavelength(94*u.angstrom))
43 print(result)
44
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/examples/acquiring_data/searching_vso.py b/examples/acquiring_data/searching_vso.py
--- a/examples/acquiring_data/searching_vso.py
+++ b/examples/acquiring_data/searching_vso.py
@@ -41,3 +41,15 @@
a.Instrument.aia,
a.Wavelength(171*u.angstrom) | a.Wavelength(94*u.angstrom))
print(result)
+
+###############################################################################
+# We can even combine entire queries in this manner.
+# Here we will define two searches for the AIA and HMI data.
+# But unlike other examples, we have to ``&`` the individual queries.
+
+search_aia = (a.Time('2020/03/04 00:00', '2020/03/04 00:01') & a.Instrument.aia)
+search_hmi = (a.Time('2020/03/04 00:00', '2020/03/04 00:01')
+ & a.Instrument.hmi & a.Physobs.los_magnetic_field)
+
+result = Fido.search(search_aia | search_hmi)
+print(result)
| {"golden_diff": "diff --git a/examples/acquiring_data/searching_vso.py b/examples/acquiring_data/searching_vso.py\n--- a/examples/acquiring_data/searching_vso.py\n+++ b/examples/acquiring_data/searching_vso.py\n@@ -41,3 +41,15 @@\n a.Instrument.aia,\n a.Wavelength(171*u.angstrom) | a.Wavelength(94*u.angstrom))\n print(result)\n+\n+###############################################################################\n+# We can even combine entire queries in this manner.\n+# Here we will define two searches for the AIA and HMI data.\n+# But unlike other examples, we have to ``&`` the individual queries.\n+\n+search_aia = (a.Time('2020/03/04 00:00', '2020/03/04 00:01') & a.Instrument.aia)\n+search_hmi = (a.Time('2020/03/04 00:00', '2020/03/04 00:01')\n+ & a.Instrument.hmi & a.Physobs.los_magnetic_field)\n+\n+result = Fido.search(search_aia | search_hmi)\n+print(result)\n", "issue": "Provide an example of splitting sections of an attr query out of the Fido.search method.\n@Cadair's had this snippet of code\r\n\r\n``` python\r\nimport datetime\r\nfrom sunpy.net import vso\r\nfrom sunpy.time import parse_time\r\n\r\n# Start time and end time for the AIA search\r\nstart = parse_time('2014/07/17T10:01:30')\r\nstop = start + datetime.timedelta(seconds=12)\r\nstop_hmi = start + datetime.timedelta(seconds=30)\r\n\r\n# Define two VSO Searches for the AIA data and the HMI data\r\nsearch_aia = (vso.attrs.Time(start, stop), vso.attrs.Instrument('AIA'))\r\nsearch_hmi = (vso.attrs.Time(start, stop_hmi), vso.attrs.Instrument('HMI'),\r\n vso.attrs.Physobs('LOS_magnetic_field'))\r\n\r\n# Create the VSO Client\r\nvsoClient = vso.VSOClient()\r\n\r\n# Query VSO for both searches using the or operator `|`\r\nresults = vsoClient.query(search_aia | search_hmi)\r\n```\r\n\r\nThat used to work but now I get this error. \r\n\r\n``` python\r\nTypeError: unsupported operand type(s) for |: 'tuple' and 'tuple'\r\n```\r\n\r\nShould this operation be possible? \r\n\n", "before_files": [{"content": "\"\"\"\n======================================\nSearching and downloading from the VSO\n======================================\n\nHow to download data from the VSO with Fido.\n\"\"\"\nimport astropy.units as u\n\nfrom sunpy.net import Fido\nfrom sunpy.net import attrs as a\n\n###############################################################################\n# `sunpy.net.Fido` is the primary interface to search for and download data and\n# will search the VSO when appropriate. The following example searches for all\n# SOHO/EIT images between the times defined below by defining a\n# timerange (`~sunpy.net.attrs.Time`) and the instrument (`~sunpy.net.attrs.Instrument`).\n\nattrs_time = a.Time('2005/01/01 00:10', '2005/01/01 00:15')\nresult = Fido.search(attrs_time, a.Instrument.eit)\n\n###############################################################################\n# Let's inspect the results.\n\nprint(result)\n\n###############################################################################\n# The following shows how to download the results. If we\n# don't provide a path it will download the file into the sunpy data directory.\n# The output provides the path of the downloaded files.\n\ndownloaded_files = Fido.fetch(result)\nprint(downloaded_files)\n\n###############################################################################\n# More complicated queries can be constructed by using relational operators.\n# For example, it is possible to query two wavelengths at the same time with\n# the OR operator (|).\n\nresult = Fido.search(a.Time('2020/03/04 00:00', '2020/03/04 00:02'),\n a.Instrument.aia,\n a.Wavelength(171*u.angstrom) | a.Wavelength(94*u.angstrom))\nprint(result)\n", "path": "examples/acquiring_data/searching_vso.py"}], "after_files": [{"content": "\"\"\"\n======================================\nSearching and downloading from the VSO\n======================================\n\nHow to download data from the VSO with Fido.\n\"\"\"\nimport astropy.units as u\n\nfrom sunpy.net import Fido\nfrom sunpy.net import attrs as a\n\n###############################################################################\n# `sunpy.net.Fido` is the primary interface to search for and download data and\n# will search the VSO when appropriate. The following example searches for all\n# SOHO/EIT images between the times defined below by defining a\n# timerange (`~sunpy.net.attrs.Time`) and the instrument (`~sunpy.net.attrs.Instrument`).\n\nattrs_time = a.Time('2005/01/01 00:10', '2005/01/01 00:15')\nresult = Fido.search(attrs_time, a.Instrument.eit)\n\n###############################################################################\n# Let's inspect the results.\n\nprint(result)\n\n###############################################################################\n# The following shows how to download the results. If we\n# don't provide a path it will download the file into the sunpy data directory.\n# The output provides the path of the downloaded files.\n\ndownloaded_files = Fido.fetch(result)\nprint(downloaded_files)\n\n###############################################################################\n# More complicated queries can be constructed by using relational operators.\n# For example, it is possible to query two wavelengths at the same time with\n# the OR operator (|).\n\nresult = Fido.search(a.Time('2020/03/04 00:00', '2020/03/04 00:02'),\n a.Instrument.aia,\n a.Wavelength(171*u.angstrom) | a.Wavelength(94*u.angstrom))\nprint(result)\n\n###############################################################################\n# We can even combine entire queries in this manner.\n# Here we will define two searches for the AIA and HMI data.\n# But unlike other examples, we have to ``&`` the individual queries.\n\nsearch_aia = (a.Time('2020/03/04 00:00', '2020/03/04 00:01') & a.Instrument.aia)\nsearch_hmi = (a.Time('2020/03/04 00:00', '2020/03/04 00:01')\n & a.Instrument.hmi & a.Physobs.los_magnetic_field)\n\nresult = Fido.search(search_aia | search_hmi)\nprint(result)\n", "path": "examples/acquiring_data/searching_vso.py"}]} | 995 | 274 |
gh_patches_debug_2247 | rasdani/github-patches | git_diff | PaddlePaddle__PaddleDetection-8421 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
训练出现长警告
### 问题确认 Search before asking
- [X] 我已经查询[历史issue](https://github.com/PaddlePaddle/PaddleDetection/issues),没有发现相似的bug。I have searched the [issues](https://github.com/PaddlePaddle/PaddleDetection/issues) and found no similar bug report.
### Bug组件 Bug Component
_No response_
### Bug描述 Describe the Bug
训练出现长警告
```
I0706 13:09:13.075042 3772 eager_method.cc:140] Warning:: 0D Tensor cannot be used as 'Tensor.numpy()[0]' . In order to avoid this problem, 0D Tensor will be changed to 1D numpy currently, but it's not correct and will be removed in release 2.6. For Tensor contain only one element, Please modify 'Tensor.numpy()[0]' to 'float(Tensor)' as soon as possible, otherwise 'Tensor.numpy()[0]' will raise error in release 2.6.
I0706 13:09:13.382442 3772 eager_method.cc:140] Warning:: 0D Tensor cannot be used as 'Tensor.numpy()[0]' . In order to avoid this problem, 0D Tensor will be changed to 1D numpy currently, but it's not correct and will be removed in release 2.6. For Tensor contain only one element, Please modify 'Tensor.numpy()[0]' to 'float(Tensor)' as soon as possible, otherwise 'Tensor.numpy()[0]' will raise error in release 2.6.
```
### 复现环境 Environment
PaddleDetection2.6
PaddlePaddle2.5.0
经过排查将`ppdet/utils/stats.py`第77行进行如下修改
`v.update(stats[k].numpy())`→`v.update(float(stats[k]))`
### Bug描述确认 Bug description confirmation
- [X] 我确认已经提供了Bug复现步骤、代码改动说明、以及环境信息,确认问题是可以复现的。I confirm that the bug replication steps, code change instructions, and environment information have been provided, and the problem can be reproduced.
### 是否愿意提交PR? Are you willing to submit a PR?
- [ ] 我愿意提交PR!I'd like to help by submitting a PR!
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ppdet/utils/stats.py`
Content:
```
1 # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import collections
16 import numpy as np
17
18 __all__ = ['SmoothedValue', 'TrainingStats']
19
20
21 class SmoothedValue(object):
22 """Track a series of values and provide access to smoothed values over a
23 window or the global series average.
24 """
25
26 def __init__(self, window_size=20, fmt=None):
27 if fmt is None:
28 fmt = "{median:.4f} ({avg:.4f})"
29 self.deque = collections.deque(maxlen=window_size)
30 self.fmt = fmt
31 self.total = 0.
32 self.count = 0
33
34 def update(self, value, n=1):
35 self.deque.append(value)
36 self.count += n
37 self.total += value * n
38
39 @property
40 def median(self):
41 return np.median(self.deque)
42
43 @property
44 def avg(self):
45 return np.mean(self.deque)
46
47 @property
48 def max(self):
49 return np.max(self.deque)
50
51 @property
52 def value(self):
53 return self.deque[-1]
54
55 @property
56 def global_avg(self):
57 return self.total / self.count
58
59 def __str__(self):
60 return self.fmt.format(
61 median=self.median, avg=self.avg, max=self.max, value=self.value)
62
63
64 class TrainingStats(object):
65 def __init__(self, window_size, delimiter=' '):
66 self.meters = None
67 self.window_size = window_size
68 self.delimiter = delimiter
69
70 def update(self, stats):
71 if self.meters is None:
72 self.meters = {
73 k: SmoothedValue(self.window_size)
74 for k in stats.keys()
75 }
76 for k, v in self.meters.items():
77 v.update(stats[k].numpy())
78
79 def get(self, extras=None):
80 stats = collections.OrderedDict()
81 if extras:
82 for k, v in extras.items():
83 stats[k] = v
84 for k, v in self.meters.items():
85 stats[k] = format(v.median, '.6f')
86
87 return stats
88
89 def log(self, extras=None):
90 d = self.get(extras)
91 strs = []
92 for k, v in d.items():
93 strs.append("{}: {}".format(k, str(v)))
94 return self.delimiter.join(strs)
95
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/ppdet/utils/stats.py b/ppdet/utils/stats.py
--- a/ppdet/utils/stats.py
+++ b/ppdet/utils/stats.py
@@ -74,7 +74,7 @@
for k in stats.keys()
}
for k, v in self.meters.items():
- v.update(stats[k].numpy())
+ v.update(float(stats[k]))
def get(self, extras=None):
stats = collections.OrderedDict()
| {"golden_diff": "diff --git a/ppdet/utils/stats.py b/ppdet/utils/stats.py\n--- a/ppdet/utils/stats.py\n+++ b/ppdet/utils/stats.py\n@@ -74,7 +74,7 @@\n for k in stats.keys()\n }\n for k, v in self.meters.items():\n- v.update(stats[k].numpy())\n+ v.update(float(stats[k]))\n \n def get(self, extras=None):\n stats = collections.OrderedDict()\n", "issue": "\u8bad\u7ec3\u51fa\u73b0\u957f\u8b66\u544a\n### \u95ee\u9898\u786e\u8ba4 Search before asking\n\n- [X] \u6211\u5df2\u7ecf\u67e5\u8be2[\u5386\u53f2issue](https://github.com/PaddlePaddle/PaddleDetection/issues)\uff0c\u6ca1\u6709\u53d1\u73b0\u76f8\u4f3c\u7684bug\u3002I have searched the [issues](https://github.com/PaddlePaddle/PaddleDetection/issues) and found no similar bug report.\n\n\n### Bug\u7ec4\u4ef6 Bug Component\n\n_No response_\n\n### Bug\u63cf\u8ff0 Describe the Bug\n\n\u8bad\u7ec3\u51fa\u73b0\u957f\u8b66\u544a\r\n```\r\nI0706 13:09:13.075042 3772 eager_method.cc:140] Warning:: 0D Tensor cannot be used as 'Tensor.numpy()[0]' . In order to avoid this problem, 0D Tensor will be changed to 1D numpy currently, but it's not correct and will be removed in release 2.6. For Tensor contain only one element, Please modify 'Tensor.numpy()[0]' to 'float(Tensor)' as soon as possible, otherwise 'Tensor.numpy()[0]' will raise error in release 2.6.\r\nI0706 13:09:13.382442 3772 eager_method.cc:140] Warning:: 0D Tensor cannot be used as 'Tensor.numpy()[0]' . In order to avoid this problem, 0D Tensor will be changed to 1D numpy currently, but it's not correct and will be removed in release 2.6. For Tensor contain only one element, Please modify 'Tensor.numpy()[0]' to 'float(Tensor)' as soon as possible, otherwise 'Tensor.numpy()[0]' will raise error in release 2.6.\r\n```\n\n### \u590d\u73b0\u73af\u5883 Environment\n\nPaddleDetection2.6\r\nPaddlePaddle2.5.0\r\n\r\n\u7ecf\u8fc7\u6392\u67e5\u5c06`ppdet/utils/stats.py`\u7b2c77\u884c\u8fdb\u884c\u5982\u4e0b\u4fee\u6539\r\n`v.update(stats[k].numpy())`\u2192`v.update(float(stats[k]))`\n\n### Bug\u63cf\u8ff0\u786e\u8ba4 Bug description confirmation\n\n- [X] \u6211\u786e\u8ba4\u5df2\u7ecf\u63d0\u4f9b\u4e86Bug\u590d\u73b0\u6b65\u9aa4\u3001\u4ee3\u7801\u6539\u52a8\u8bf4\u660e\u3001\u4ee5\u53ca\u73af\u5883\u4fe1\u606f\uff0c\u786e\u8ba4\u95ee\u9898\u662f\u53ef\u4ee5\u590d\u73b0\u7684\u3002I confirm that the bug replication steps, code change instructions, and environment information have been provided, and the problem can be reproduced.\n\n\n### \u662f\u5426\u613f\u610f\u63d0\u4ea4PR\uff1f Are you willing to submit a PR?\n\n- [ ] \u6211\u613f\u610f\u63d0\u4ea4PR\uff01I'd like to help by submitting a PR!\n", "before_files": [{"content": "# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport collections\nimport numpy as np\n\n__all__ = ['SmoothedValue', 'TrainingStats']\n\n\nclass SmoothedValue(object):\n \"\"\"Track a series of values and provide access to smoothed values over a\n window or the global series average.\n \"\"\"\n\n def __init__(self, window_size=20, fmt=None):\n if fmt is None:\n fmt = \"{median:.4f} ({avg:.4f})\"\n self.deque = collections.deque(maxlen=window_size)\n self.fmt = fmt\n self.total = 0.\n self.count = 0\n\n def update(self, value, n=1):\n self.deque.append(value)\n self.count += n\n self.total += value * n\n\n @property\n def median(self):\n return np.median(self.deque)\n\n @property\n def avg(self):\n return np.mean(self.deque)\n\n @property\n def max(self):\n return np.max(self.deque)\n\n @property\n def value(self):\n return self.deque[-1]\n\n @property\n def global_avg(self):\n return self.total / self.count\n\n def __str__(self):\n return self.fmt.format(\n median=self.median, avg=self.avg, max=self.max, value=self.value)\n\n\nclass TrainingStats(object):\n def __init__(self, window_size, delimiter=' '):\n self.meters = None\n self.window_size = window_size\n self.delimiter = delimiter\n\n def update(self, stats):\n if self.meters is None:\n self.meters = {\n k: SmoothedValue(self.window_size)\n for k in stats.keys()\n }\n for k, v in self.meters.items():\n v.update(stats[k].numpy())\n\n def get(self, extras=None):\n stats = collections.OrderedDict()\n if extras:\n for k, v in extras.items():\n stats[k] = v\n for k, v in self.meters.items():\n stats[k] = format(v.median, '.6f')\n\n return stats\n\n def log(self, extras=None):\n d = self.get(extras)\n strs = []\n for k, v in d.items():\n strs.append(\"{}: {}\".format(k, str(v)))\n return self.delimiter.join(strs)\n", "path": "ppdet/utils/stats.py"}], "after_files": [{"content": "# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport collections\nimport numpy as np\n\n__all__ = ['SmoothedValue', 'TrainingStats']\n\n\nclass SmoothedValue(object):\n \"\"\"Track a series of values and provide access to smoothed values over a\n window or the global series average.\n \"\"\"\n\n def __init__(self, window_size=20, fmt=None):\n if fmt is None:\n fmt = \"{median:.4f} ({avg:.4f})\"\n self.deque = collections.deque(maxlen=window_size)\n self.fmt = fmt\n self.total = 0.\n self.count = 0\n\n def update(self, value, n=1):\n self.deque.append(value)\n self.count += n\n self.total += value * n\n\n @property\n def median(self):\n return np.median(self.deque)\n\n @property\n def avg(self):\n return np.mean(self.deque)\n\n @property\n def max(self):\n return np.max(self.deque)\n\n @property\n def value(self):\n return self.deque[-1]\n\n @property\n def global_avg(self):\n return self.total / self.count\n\n def __str__(self):\n return self.fmt.format(\n median=self.median, avg=self.avg, max=self.max, value=self.value)\n\n\nclass TrainingStats(object):\n def __init__(self, window_size, delimiter=' '):\n self.meters = None\n self.window_size = window_size\n self.delimiter = delimiter\n\n def update(self, stats):\n if self.meters is None:\n self.meters = {\n k: SmoothedValue(self.window_size)\n for k in stats.keys()\n }\n for k, v in self.meters.items():\n v.update(float(stats[k]))\n\n def get(self, extras=None):\n stats = collections.OrderedDict()\n if extras:\n for k, v in extras.items():\n stats[k] = v\n for k, v in self.meters.items():\n stats[k] = format(v.median, '.6f')\n\n return stats\n\n def log(self, extras=None):\n d = self.get(extras)\n strs = []\n for k, v in d.items():\n strs.append(\"{}: {}\".format(k, str(v)))\n return self.delimiter.join(strs)\n", "path": "ppdet/utils/stats.py"}]} | 1,600 | 98 |
gh_patches_debug_28974 | rasdani/github-patches | git_diff | prowler-cloud__prowler-2282 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Bug]: False positives on some checks?
### Steps to Reproduce
Hi,
it looks like some checks produce false positives (they are tagged as warning because I've allowlisted them):
```
Check ID: ec2_ebs_snapshots_encrypted - ec2 [medium]
WARNING eu-central-1: EBS Snapshot snap-112 is unencrypted.
WARNING eu-central-1: EBS Snapshot snap-113 is encrypted. <<<<
```
```
Check ID: iam_policy_allows_privilege_escalation - iam [high]
WARNING eu-central-1: Custom Policy arn:aws:iam::112:policy/aws_admin_access does not allow privilege escalation
```
Are you maybe simply overring the status (also "PASS") by WARNING in case of an allowlist match?
Another type of issue but more like a question:
_sns_topics_not_publicly_accessible_ triggers with
` WARNING eu-central-1: SNS topic cloudwatch-pagerduty-alarms-ec2-state-changes policy with public access but has a Condition`
which is (from the User's perspective) a false positive as well because we have a condition, which prowler cannot evaluate?
### Expected behavior
none
### Actual Result with Screenshots or Logs
none
### How did you install Prowler?
Cloning the repository from github.com (git clone)
### Environment Resource
locally
### OS used
Linux
### Prowler version
3.4.1
### Pip version
none
### Context
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `prowler/providers/aws/services/sns/sns_topics_not_publicly_accessible/sns_topics_not_publicly_accessible.py`
Content:
```
1 from prowler.lib.check.models import Check, Check_Report_AWS
2 from prowler.providers.aws.services.sns.sns_client import sns_client
3
4
5 class sns_topics_not_publicly_accessible(Check):
6 def execute(self):
7 findings = []
8 for topic in sns_client.topics:
9 report = Check_Report_AWS(self.metadata())
10 report.region = topic.region
11 report.resource_id = topic.name
12 report.resource_arn = topic.arn
13 report.resource_tags = topic.tags
14 report.status = "PASS"
15 report.status_extended = f"SNS topic {topic.name} without public access"
16 if topic.policy:
17 for statement in topic.policy["Statement"]:
18 # Only check allow statements
19 if statement["Effect"] == "Allow":
20 if (
21 "*" in statement["Principal"]
22 or (
23 "AWS" in statement["Principal"]
24 and "*" in statement["Principal"]["AWS"]
25 )
26 or (
27 "CanonicalUser" in statement["Principal"]
28 and "*" in statement["Principal"]["CanonicalUser"]
29 )
30 ):
31 if "Condition" not in statement:
32 report.status = "FAIL"
33 report.status_extended = (
34 f"SNS topic {topic.name} policy with public access"
35 )
36 else:
37 report.status = "FAIL"
38 report.status_extended = f"SNS topic {topic.name} policy with public access but has a Condition"
39
40 findings.append(report)
41
42 return findings
43
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/prowler/providers/aws/services/sns/sns_topics_not_publicly_accessible/sns_topics_not_publicly_accessible.py b/prowler/providers/aws/services/sns/sns_topics_not_publicly_accessible/sns_topics_not_publicly_accessible.py
--- a/prowler/providers/aws/services/sns/sns_topics_not_publicly_accessible/sns_topics_not_publicly_accessible.py
+++ b/prowler/providers/aws/services/sns/sns_topics_not_publicly_accessible/sns_topics_not_publicly_accessible.py
@@ -12,7 +12,7 @@
report.resource_arn = topic.arn
report.resource_tags = topic.tags
report.status = "PASS"
- report.status_extended = f"SNS topic {topic.name} without public access"
+ report.status_extended = f"SNS topic {topic.name} is not publicly accesible"
if topic.policy:
for statement in topic.policy["Statement"]:
# Only check allow statements
@@ -31,11 +31,11 @@
if "Condition" not in statement:
report.status = "FAIL"
report.status_extended = (
- f"SNS topic {topic.name} policy with public access"
+ f"SNS topic {topic.name} is publicly accesible"
)
else:
- report.status = "FAIL"
- report.status_extended = f"SNS topic {topic.name} policy with public access but has a Condition"
+ report.status = "PASS"
+ report.status_extended = f"SNS topic {topic.name} is publicly accesible but has a Condition that could filter it"
findings.append(report)
| {"golden_diff": "diff --git a/prowler/providers/aws/services/sns/sns_topics_not_publicly_accessible/sns_topics_not_publicly_accessible.py b/prowler/providers/aws/services/sns/sns_topics_not_publicly_accessible/sns_topics_not_publicly_accessible.py\n--- a/prowler/providers/aws/services/sns/sns_topics_not_publicly_accessible/sns_topics_not_publicly_accessible.py\n+++ b/prowler/providers/aws/services/sns/sns_topics_not_publicly_accessible/sns_topics_not_publicly_accessible.py\n@@ -12,7 +12,7 @@\n report.resource_arn = topic.arn\n report.resource_tags = topic.tags\n report.status = \"PASS\"\n- report.status_extended = f\"SNS topic {topic.name} without public access\"\n+ report.status_extended = f\"SNS topic {topic.name} is not publicly accesible\"\n if topic.policy:\n for statement in topic.policy[\"Statement\"]:\n # Only check allow statements\n@@ -31,11 +31,11 @@\n if \"Condition\" not in statement:\n report.status = \"FAIL\"\n report.status_extended = (\n- f\"SNS topic {topic.name} policy with public access\"\n+ f\"SNS topic {topic.name} is publicly accesible\"\n )\n else:\n- report.status = \"FAIL\"\n- report.status_extended = f\"SNS topic {topic.name} policy with public access but has a Condition\"\n+ report.status = \"PASS\"\n+ report.status_extended = f\"SNS topic {topic.name} is publicly accesible but has a Condition that could filter it\"\n \n findings.append(report)\n", "issue": "[Bug]: False positives on some checks?\n### Steps to Reproduce\n\nHi,\r\n\r\nit looks like some checks produce false positives (they are tagged as warning because I've allowlisted them):\r\n\r\n```\r\nCheck ID: ec2_ebs_snapshots_encrypted - ec2 [medium]\r\n WARNING eu-central-1: EBS Snapshot snap-112 is unencrypted.\r\n WARNING eu-central-1: EBS Snapshot snap-113 is encrypted. <<<<\r\n```\r\n\r\n\r\n```\r\nCheck ID: iam_policy_allows_privilege_escalation - iam [high]\r\n WARNING eu-central-1: Custom Policy arn:aws:iam::112:policy/aws_admin_access does not allow privilege escalation\r\n```\r\n\r\nAre you maybe simply overring the status (also \"PASS\") by WARNING in case of an allowlist match?\r\n\r\n\r\nAnother type of issue but more like a question:\r\n\r\n_sns_topics_not_publicly_accessible_ triggers with \r\n` WARNING eu-central-1: SNS topic cloudwatch-pagerduty-alarms-ec2-state-changes policy with public access but has a Condition`\r\nwhich is (from the User's perspective) a false positive as well because we have a condition, which prowler cannot evaluate?\r\n\r\n\r\n\n\n### Expected behavior\n\nnone\n\n### Actual Result with Screenshots or Logs\n\nnone\n\n### How did you install Prowler?\n\nCloning the repository from github.com (git clone)\n\n### Environment Resource\n\nlocally\n\n### OS used\n\nLinux\n\n### Prowler version\n\n3.4.1\n\n### Pip version\n\nnone\n\n### Context\n\n_No response_\n", "before_files": [{"content": "from prowler.lib.check.models import Check, Check_Report_AWS\nfrom prowler.providers.aws.services.sns.sns_client import sns_client\n\n\nclass sns_topics_not_publicly_accessible(Check):\n def execute(self):\n findings = []\n for topic in sns_client.topics:\n report = Check_Report_AWS(self.metadata())\n report.region = topic.region\n report.resource_id = topic.name\n report.resource_arn = topic.arn\n report.resource_tags = topic.tags\n report.status = \"PASS\"\n report.status_extended = f\"SNS topic {topic.name} without public access\"\n if topic.policy:\n for statement in topic.policy[\"Statement\"]:\n # Only check allow statements\n if statement[\"Effect\"] == \"Allow\":\n if (\n \"*\" in statement[\"Principal\"]\n or (\n \"AWS\" in statement[\"Principal\"]\n and \"*\" in statement[\"Principal\"][\"AWS\"]\n )\n or (\n \"CanonicalUser\" in statement[\"Principal\"]\n and \"*\" in statement[\"Principal\"][\"CanonicalUser\"]\n )\n ):\n if \"Condition\" not in statement:\n report.status = \"FAIL\"\n report.status_extended = (\n f\"SNS topic {topic.name} policy with public access\"\n )\n else:\n report.status = \"FAIL\"\n report.status_extended = f\"SNS topic {topic.name} policy with public access but has a Condition\"\n\n findings.append(report)\n\n return findings\n", "path": "prowler/providers/aws/services/sns/sns_topics_not_publicly_accessible/sns_topics_not_publicly_accessible.py"}], "after_files": [{"content": "from prowler.lib.check.models import Check, Check_Report_AWS\nfrom prowler.providers.aws.services.sns.sns_client import sns_client\n\n\nclass sns_topics_not_publicly_accessible(Check):\n def execute(self):\n findings = []\n for topic in sns_client.topics:\n report = Check_Report_AWS(self.metadata())\n report.region = topic.region\n report.resource_id = topic.name\n report.resource_arn = topic.arn\n report.resource_tags = topic.tags\n report.status = \"PASS\"\n report.status_extended = f\"SNS topic {topic.name} is not publicly accesible\"\n if topic.policy:\n for statement in topic.policy[\"Statement\"]:\n # Only check allow statements\n if statement[\"Effect\"] == \"Allow\":\n if (\n \"*\" in statement[\"Principal\"]\n or (\n \"AWS\" in statement[\"Principal\"]\n and \"*\" in statement[\"Principal\"][\"AWS\"]\n )\n or (\n \"CanonicalUser\" in statement[\"Principal\"]\n and \"*\" in statement[\"Principal\"][\"CanonicalUser\"]\n )\n ):\n if \"Condition\" not in statement:\n report.status = \"FAIL\"\n report.status_extended = (\n f\"SNS topic {topic.name} is publicly accesible\"\n )\n else:\n report.status = \"PASS\"\n report.status_extended = f\"SNS topic {topic.name} is publicly accesible but has a Condition that could filter it\"\n\n findings.append(report)\n\n return findings\n", "path": "prowler/providers/aws/services/sns/sns_topics_not_publicly_accessible/sns_topics_not_publicly_accessible.py"}]} | 988 | 343 |
gh_patches_debug_26160 | rasdani/github-patches | git_diff | buildbot__buildbot-1614 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Fix bytes/unicode issue to fix test on Python 3
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `master/buildbot/db/schedulers.py`
Content:
```
1 # This file is part of Buildbot. Buildbot is free software: you can
2 # redistribute it and/or modify it under the terms of the GNU General Public
3 # License as published by the Free Software Foundation, version 2.
4 #
5 # This program is distributed in the hope that it will be useful, but WITHOUT
6 # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
7 # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
8 # details.
9 #
10 # You should have received a copy of the GNU General Public License along with
11 # this program; if not, write to the Free Software Foundation, Inc., 51
12 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
13 #
14 # Copyright Buildbot Team Members
15
16 import sqlalchemy as sa
17 import sqlalchemy.exc
18
19 from buildbot.db import base
20
21
22 class SchedulersConnectorComponent(base.DBConnectorComponent):
23 # Documentation is in developer/database.rst
24
25 def classifyChanges(self, objectid, classifications):
26 def thd(conn):
27 transaction = conn.begin()
28 tbl = self.db.model.scheduler_changes
29 ins_q = tbl.insert()
30 upd_q = tbl.update(
31 ((tbl.c.objectid == objectid)
32 & (tbl.c.changeid == sa.bindparam('wc_changeid'))))
33 for changeid, important in classifications.items():
34 # convert the 'important' value into an integer, since that
35 # is the column type
36 imp_int = important and 1 or 0
37 try:
38 conn.execute(ins_q,
39 objectid=objectid,
40 changeid=changeid,
41 important=imp_int)
42 except (sqlalchemy.exc.ProgrammingError,
43 sqlalchemy.exc.IntegrityError):
44 transaction.rollback()
45 transaction = conn.begin()
46 # insert failed, so try an update
47 conn.execute(upd_q,
48 wc_changeid=changeid,
49 important=imp_int)
50
51 transaction.commit()
52 return self.db.pool.do(thd)
53
54 def flushChangeClassifications(self, objectid, less_than=None):
55 def thd(conn):
56 sch_ch_tbl = self.db.model.scheduler_changes
57 wc = (sch_ch_tbl.c.objectid == objectid)
58 if less_than is not None:
59 wc = wc & (sch_ch_tbl.c.changeid < less_than)
60 q = sch_ch_tbl.delete(whereclause=wc)
61 conn.execute(q)
62 return self.db.pool.do(thd)
63
64 class Thunk:
65 pass
66
67 def getChangeClassifications(self, objectid, branch=Thunk,
68 repository=Thunk, project=Thunk,
69 codebase=Thunk):
70 def thd(conn):
71 sch_ch_tbl = self.db.model.scheduler_changes
72 ch_tbl = self.db.model.changes
73
74 wc = (sch_ch_tbl.c.objectid == objectid)
75
76 # may need to filter further based on branch, etc
77 extra_wheres = []
78 if branch is not self.Thunk:
79 extra_wheres.append(ch_tbl.c.branch == branch)
80 if repository is not self.Thunk:
81 extra_wheres.append(ch_tbl.c.repository == repository)
82 if project is not self.Thunk:
83 extra_wheres.append(ch_tbl.c.project == project)
84 if codebase is not self.Thunk:
85 extra_wheres.append(ch_tbl.c.codebase == codebase)
86
87 # if we need to filter further append those, as well as a join
88 # on changeid (but just once for that one)
89 if extra_wheres:
90 wc &= (sch_ch_tbl.c.changeid == ch_tbl.c.changeid)
91 for w in extra_wheres:
92 wc &= w
93
94 q = sa.select(
95 [sch_ch_tbl.c.changeid, sch_ch_tbl.c.important],
96 whereclause=wc)
97 return dict([(r.changeid, [False, True][r.important])
98 for r in conn.execute(q)])
99 return self.db.pool.do(thd)
100
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/master/buildbot/db/schedulers.py b/master/buildbot/db/schedulers.py
--- a/master/buildbot/db/schedulers.py
+++ b/master/buildbot/db/schedulers.py
@@ -24,13 +24,13 @@
def classifyChanges(self, objectid, classifications):
def thd(conn):
- transaction = conn.begin()
tbl = self.db.model.scheduler_changes
ins_q = tbl.insert()
upd_q = tbl.update(
((tbl.c.objectid == objectid)
& (tbl.c.changeid == sa.bindparam('wc_changeid'))))
for changeid, important in classifications.items():
+ transaction = conn.begin()
# convert the 'important' value into an integer, since that
# is the column type
imp_int = important and 1 or 0
@@ -48,7 +48,7 @@
wc_changeid=changeid,
important=imp_int)
- transaction.commit()
+ transaction.commit()
return self.db.pool.do(thd)
def flushChangeClassifications(self, objectid, less_than=None):
| {"golden_diff": "diff --git a/master/buildbot/db/schedulers.py b/master/buildbot/db/schedulers.py\n--- a/master/buildbot/db/schedulers.py\n+++ b/master/buildbot/db/schedulers.py\n@@ -24,13 +24,13 @@\n \n def classifyChanges(self, objectid, classifications):\n def thd(conn):\n- transaction = conn.begin()\n tbl = self.db.model.scheduler_changes\n ins_q = tbl.insert()\n upd_q = tbl.update(\n ((tbl.c.objectid == objectid)\n & (tbl.c.changeid == sa.bindparam('wc_changeid'))))\n for changeid, important in classifications.items():\n+ transaction = conn.begin()\n # convert the 'important' value into an integer, since that\n # is the column type\n imp_int = important and 1 or 0\n@@ -48,7 +48,7 @@\n wc_changeid=changeid,\n important=imp_int)\n \n- transaction.commit()\n+ transaction.commit()\n return self.db.pool.do(thd)\n \n def flushChangeClassifications(self, objectid, less_than=None):\n", "issue": "Fix bytes/unicode issue to fix test on Python 3\n\n", "before_files": [{"content": "# This file is part of Buildbot. Buildbot is free software: you can\n# redistribute it and/or modify it under the terms of the GNU General Public\n# License as published by the Free Software Foundation, version 2.\n#\n# This program is distributed in the hope that it will be useful, but WITHOUT\n# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS\n# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more\n# details.\n#\n# You should have received a copy of the GNU General Public License along with\n# this program; if not, write to the Free Software Foundation, Inc., 51\n# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.\n#\n# Copyright Buildbot Team Members\n\nimport sqlalchemy as sa\nimport sqlalchemy.exc\n\nfrom buildbot.db import base\n\n\nclass SchedulersConnectorComponent(base.DBConnectorComponent):\n # Documentation is in developer/database.rst\n\n def classifyChanges(self, objectid, classifications):\n def thd(conn):\n transaction = conn.begin()\n tbl = self.db.model.scheduler_changes\n ins_q = tbl.insert()\n upd_q = tbl.update(\n ((tbl.c.objectid == objectid)\n & (tbl.c.changeid == sa.bindparam('wc_changeid'))))\n for changeid, important in classifications.items():\n # convert the 'important' value into an integer, since that\n # is the column type\n imp_int = important and 1 or 0\n try:\n conn.execute(ins_q,\n objectid=objectid,\n changeid=changeid,\n important=imp_int)\n except (sqlalchemy.exc.ProgrammingError,\n sqlalchemy.exc.IntegrityError):\n transaction.rollback()\n transaction = conn.begin()\n # insert failed, so try an update\n conn.execute(upd_q,\n wc_changeid=changeid,\n important=imp_int)\n\n transaction.commit()\n return self.db.pool.do(thd)\n\n def flushChangeClassifications(self, objectid, less_than=None):\n def thd(conn):\n sch_ch_tbl = self.db.model.scheduler_changes\n wc = (sch_ch_tbl.c.objectid == objectid)\n if less_than is not None:\n wc = wc & (sch_ch_tbl.c.changeid < less_than)\n q = sch_ch_tbl.delete(whereclause=wc)\n conn.execute(q)\n return self.db.pool.do(thd)\n\n class Thunk:\n pass\n\n def getChangeClassifications(self, objectid, branch=Thunk,\n repository=Thunk, project=Thunk,\n codebase=Thunk):\n def thd(conn):\n sch_ch_tbl = self.db.model.scheduler_changes\n ch_tbl = self.db.model.changes\n\n wc = (sch_ch_tbl.c.objectid == objectid)\n\n # may need to filter further based on branch, etc\n extra_wheres = []\n if branch is not self.Thunk:\n extra_wheres.append(ch_tbl.c.branch == branch)\n if repository is not self.Thunk:\n extra_wheres.append(ch_tbl.c.repository == repository)\n if project is not self.Thunk:\n extra_wheres.append(ch_tbl.c.project == project)\n if codebase is not self.Thunk:\n extra_wheres.append(ch_tbl.c.codebase == codebase)\n\n # if we need to filter further append those, as well as a join\n # on changeid (but just once for that one)\n if extra_wheres:\n wc &= (sch_ch_tbl.c.changeid == ch_tbl.c.changeid)\n for w in extra_wheres:\n wc &= w\n\n q = sa.select(\n [sch_ch_tbl.c.changeid, sch_ch_tbl.c.important],\n whereclause=wc)\n return dict([(r.changeid, [False, True][r.important])\n for r in conn.execute(q)])\n return self.db.pool.do(thd)\n", "path": "master/buildbot/db/schedulers.py"}], "after_files": [{"content": "# This file is part of Buildbot. Buildbot is free software: you can\n# redistribute it and/or modify it under the terms of the GNU General Public\n# License as published by the Free Software Foundation, version 2.\n#\n# This program is distributed in the hope that it will be useful, but WITHOUT\n# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS\n# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more\n# details.\n#\n# You should have received a copy of the GNU General Public License along with\n# this program; if not, write to the Free Software Foundation, Inc., 51\n# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.\n#\n# Copyright Buildbot Team Members\n\nimport sqlalchemy as sa\nimport sqlalchemy.exc\n\nfrom buildbot.db import base\n\n\nclass SchedulersConnectorComponent(base.DBConnectorComponent):\n # Documentation is in developer/database.rst\n\n def classifyChanges(self, objectid, classifications):\n def thd(conn):\n tbl = self.db.model.scheduler_changes\n ins_q = tbl.insert()\n upd_q = tbl.update(\n ((tbl.c.objectid == objectid)\n & (tbl.c.changeid == sa.bindparam('wc_changeid'))))\n for changeid, important in classifications.items():\n transaction = conn.begin()\n # convert the 'important' value into an integer, since that\n # is the column type\n imp_int = important and 1 or 0\n try:\n conn.execute(ins_q,\n objectid=objectid,\n changeid=changeid,\n important=imp_int)\n except (sqlalchemy.exc.ProgrammingError,\n sqlalchemy.exc.IntegrityError):\n transaction.rollback()\n transaction = conn.begin()\n # insert failed, so try an update\n conn.execute(upd_q,\n wc_changeid=changeid,\n important=imp_int)\n\n transaction.commit()\n return self.db.pool.do(thd)\n\n def flushChangeClassifications(self, objectid, less_than=None):\n def thd(conn):\n sch_ch_tbl = self.db.model.scheduler_changes\n wc = (sch_ch_tbl.c.objectid == objectid)\n if less_than is not None:\n wc = wc & (sch_ch_tbl.c.changeid < less_than)\n q = sch_ch_tbl.delete(whereclause=wc)\n conn.execute(q)\n return self.db.pool.do(thd)\n\n class Thunk:\n pass\n\n def getChangeClassifications(self, objectid, branch=Thunk,\n repository=Thunk, project=Thunk,\n codebase=Thunk):\n def thd(conn):\n sch_ch_tbl = self.db.model.scheduler_changes\n ch_tbl = self.db.model.changes\n\n wc = (sch_ch_tbl.c.objectid == objectid)\n\n # may need to filter further based on branch, etc\n extra_wheres = []\n if branch is not self.Thunk:\n extra_wheres.append(ch_tbl.c.branch == branch)\n if repository is not self.Thunk:\n extra_wheres.append(ch_tbl.c.repository == repository)\n if project is not self.Thunk:\n extra_wheres.append(ch_tbl.c.project == project)\n if codebase is not self.Thunk:\n extra_wheres.append(ch_tbl.c.codebase == codebase)\n\n # if we need to filter further append those, as well as a join\n # on changeid (but just once for that one)\n if extra_wheres:\n wc &= (sch_ch_tbl.c.changeid == ch_tbl.c.changeid)\n for w in extra_wheres:\n wc &= w\n\n q = sa.select(\n [sch_ch_tbl.c.changeid, sch_ch_tbl.c.important],\n whereclause=wc)\n return dict([(r.changeid, [False, True][r.important])\n for r in conn.execute(q)])\n return self.db.pool.do(thd)\n", "path": "master/buildbot/db/schedulers.py"}]} | 1,309 | 242 |
gh_patches_debug_23728 | rasdani/github-patches | git_diff | aws-cloudformation__cfn-lint-179 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Nested mappings raise an error
```cfn-lint 0.3.1```
We use nested maps in our templates:
```yaml
Mappings:
RegionAccountToAZ:
ap-northeast-1:
0123456789:
- ap-northeast-1a
- ap-northeast-1c
- none
9876543210:
- ap-northeast-1a
- ap-northeast-1b
- ap-northeast-1c
```
We'd access this data using a construction like `!FindInMap [RegionAccountToAZ, !Ref 'AWS::Region', !Ref 'AWS::AccountId']`. However cfn-lint says:
```
E7001 Mapping RegionAccountToAZ has invalid property at 9876543210
test.cfn.yaml:3:5
E7001 Mapping RegionAccountToAZ has invalid property at 0123456789
test.cfn.yaml:4:7
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/cfnlint/rules/mappings/Configuration.py`
Content:
```
1 """
2 Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
3
4 Permission is hereby granted, free of charge, to any person obtaining a copy of this
5 software and associated documentation files (the "Software"), to deal in the Software
6 without restriction, including without limitation the rights to use, copy, modify,
7 merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
8 permit persons to whom the Software is furnished to do so.
9
10 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
11 INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
12 PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
13 HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
14 OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
15 SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
16 """
17 from cfnlint import CloudFormationLintRule
18 from cfnlint import RuleMatch
19
20
21 class Configuration(CloudFormationLintRule):
22 """Check if Mappings are configured correctly"""
23 id = 'E7001'
24 shortdesc = 'Mappings are appropriately configured'
25 description = 'Check if Mappings are properly configured'
26 tags = ['base', 'mappings']
27
28 def match(self, cfn):
29 """Check CloudFormation Parameters"""
30
31 matches = list()
32
33 mappings = cfn.template.get('Mappings', {})
34 if mappings:
35 for mapname, mapobj in mappings.items():
36 if not isinstance(mapobj, dict):
37 message = 'Mapping {0} has invalid property'
38 matches.append(RuleMatch(
39 ['Mappings', mapname],
40 message.format(mapname)
41 ))
42 else:
43 for firstkey in mapobj:
44 firstkeyobj = mapobj[firstkey]
45 if not isinstance(firstkeyobj, dict):
46 message = 'Mapping {0} has invalid property at {1}'
47 matches.append(RuleMatch(
48 ['Mappings', mapname, firstkey],
49 message.format(mapname, firstkeyobj)
50 ))
51 else:
52 for secondkey in firstkeyobj:
53 if isinstance(firstkeyobj[secondkey], (dict, list)):
54 message = 'Mapping {0} has invalid property at {1}'
55 matches.append(RuleMatch(
56 ['Mappings', mapname, firstkey, secondkey],
57 message.format(mapname, secondkey)
58 ))
59
60 return matches
61
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/cfnlint/rules/mappings/Configuration.py b/src/cfnlint/rules/mappings/Configuration.py
--- a/src/cfnlint/rules/mappings/Configuration.py
+++ b/src/cfnlint/rules/mappings/Configuration.py
@@ -14,6 +14,7 @@
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
+import six
from cfnlint import CloudFormationLintRule
from cfnlint import RuleMatch
@@ -50,7 +51,9 @@
))
else:
for secondkey in firstkeyobj:
- if isinstance(firstkeyobj[secondkey], (dict, list)):
+ if not isinstance(
+ firstkeyobj[secondkey],
+ (six.string_types, list, six.integer_types)):
message = 'Mapping {0} has invalid property at {1}'
matches.append(RuleMatch(
['Mappings', mapname, firstkey, secondkey],
| {"golden_diff": "diff --git a/src/cfnlint/rules/mappings/Configuration.py b/src/cfnlint/rules/mappings/Configuration.py\n--- a/src/cfnlint/rules/mappings/Configuration.py\n+++ b/src/cfnlint/rules/mappings/Configuration.py\n@@ -14,6 +14,7 @@\n OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n \"\"\"\n+import six\n from cfnlint import CloudFormationLintRule\n from cfnlint import RuleMatch\n \n@@ -50,7 +51,9 @@\n ))\n else:\n for secondkey in firstkeyobj:\n- if isinstance(firstkeyobj[secondkey], (dict, list)):\n+ if not isinstance(\n+ firstkeyobj[secondkey],\n+ (six.string_types, list, six.integer_types)):\n message = 'Mapping {0} has invalid property at {1}'\n matches.append(RuleMatch(\n ['Mappings', mapname, firstkey, secondkey],\n", "issue": "Nested mappings raise an error\n```cfn-lint 0.3.1```\r\n\r\nWe use nested maps in our templates:\r\n\r\n```yaml\r\nMappings:\r\n RegionAccountToAZ:\r\n ap-northeast-1:\r\n 0123456789:\r\n - ap-northeast-1a\r\n - ap-northeast-1c\r\n - none\r\n 9876543210:\r\n - ap-northeast-1a\r\n - ap-northeast-1b\r\n - ap-northeast-1c\r\n```\r\n\r\nWe'd access this data using a construction like `!FindInMap [RegionAccountToAZ, !Ref 'AWS::Region', !Ref 'AWS::AccountId']`. However cfn-lint says:\r\n\r\n```\r\nE7001 Mapping RegionAccountToAZ has invalid property at 9876543210\r\ntest.cfn.yaml:3:5\r\n\r\nE7001 Mapping RegionAccountToAZ has invalid property at 0123456789\r\ntest.cfn.yaml:4:7\r\n```\n", "before_files": [{"content": "\"\"\"\n Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n\n Permission is hereby granted, free of charge, to any person obtaining a copy of this\n software and associated documentation files (the \"Software\"), to deal in the Software\n without restriction, including without limitation the rights to use, copy, modify,\n merge, publish, distribute, sublicense, and/or sell copies of the Software, and to\n permit persons to whom the Software is furnished to do so.\n\n THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\n INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\n HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\"\"\"\nfrom cfnlint import CloudFormationLintRule\nfrom cfnlint import RuleMatch\n\n\nclass Configuration(CloudFormationLintRule):\n \"\"\"Check if Mappings are configured correctly\"\"\"\n id = 'E7001'\n shortdesc = 'Mappings are appropriately configured'\n description = 'Check if Mappings are properly configured'\n tags = ['base', 'mappings']\n\n def match(self, cfn):\n \"\"\"Check CloudFormation Parameters\"\"\"\n\n matches = list()\n\n mappings = cfn.template.get('Mappings', {})\n if mappings:\n for mapname, mapobj in mappings.items():\n if not isinstance(mapobj, dict):\n message = 'Mapping {0} has invalid property'\n matches.append(RuleMatch(\n ['Mappings', mapname],\n message.format(mapname)\n ))\n else:\n for firstkey in mapobj:\n firstkeyobj = mapobj[firstkey]\n if not isinstance(firstkeyobj, dict):\n message = 'Mapping {0} has invalid property at {1}'\n matches.append(RuleMatch(\n ['Mappings', mapname, firstkey],\n message.format(mapname, firstkeyobj)\n ))\n else:\n for secondkey in firstkeyobj:\n if isinstance(firstkeyobj[secondkey], (dict, list)):\n message = 'Mapping {0} has invalid property at {1}'\n matches.append(RuleMatch(\n ['Mappings', mapname, firstkey, secondkey],\n message.format(mapname, secondkey)\n ))\n\n return matches\n", "path": "src/cfnlint/rules/mappings/Configuration.py"}], "after_files": [{"content": "\"\"\"\n Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n\n Permission is hereby granted, free of charge, to any person obtaining a copy of this\n software and associated documentation files (the \"Software\"), to deal in the Software\n without restriction, including without limitation the rights to use, copy, modify,\n merge, publish, distribute, sublicense, and/or sell copies of the Software, and to\n permit persons to whom the Software is furnished to do so.\n\n THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\n INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\n HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\"\"\"\nimport six\nfrom cfnlint import CloudFormationLintRule\nfrom cfnlint import RuleMatch\n\n\nclass Configuration(CloudFormationLintRule):\n \"\"\"Check if Mappings are configured correctly\"\"\"\n id = 'E7001'\n shortdesc = 'Mappings are appropriately configured'\n description = 'Check if Mappings are properly configured'\n tags = ['base', 'mappings']\n\n def match(self, cfn):\n \"\"\"Check CloudFormation Parameters\"\"\"\n\n matches = list()\n\n mappings = cfn.template.get('Mappings', {})\n if mappings:\n for mapname, mapobj in mappings.items():\n if not isinstance(mapobj, dict):\n message = 'Mapping {0} has invalid property'\n matches.append(RuleMatch(\n ['Mappings', mapname],\n message.format(mapname)\n ))\n else:\n for firstkey in mapobj:\n firstkeyobj = mapobj[firstkey]\n if not isinstance(firstkeyobj, dict):\n message = 'Mapping {0} has invalid property at {1}'\n matches.append(RuleMatch(\n ['Mappings', mapname, firstkey],\n message.format(mapname, firstkeyobj)\n ))\n else:\n for secondkey in firstkeyobj:\n if not isinstance(\n firstkeyobj[secondkey],\n (six.string_types, list, six.integer_types)):\n message = 'Mapping {0} has invalid property at {1}'\n matches.append(RuleMatch(\n ['Mappings', mapname, firstkey, secondkey],\n message.format(mapname, secondkey)\n ))\n\n return matches\n", "path": "src/cfnlint/rules/mappings/Configuration.py"}]} | 1,153 | 223 |
gh_patches_debug_8921 | rasdani/github-patches | git_diff | aws-cloudformation__cfn-lint-870 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Don't hardcode all for AvailabilityZones not relevant for AWS::ElasticLoadBalancingV2::TargetGroup
Hello,
*cfn-lint version: 0.19.1*
*Description of issue.*
The following snippet :
```
Resources:
DefaultTargetGroup:
Type: "AWS::ElasticLoadBalancingV2::TargetGroup"
Properties:
VpcId: hello
Port: 80
Protocol: HTTP
HealthCheckIntervalSeconds: 30
HealthCheckPath: "/"
HealthCheckPort: "80"
HealthCheckProtocol: "HTTP"
HealthCheckTimeoutSeconds: 5
HealthyThresholdCount: 5
TargetType: ip
Targets:
-
Id: "10.31.33.28"
AvailabilityZone: all
Matcher:
HttpCode: "200"
TargetGroupAttributes:
- Key: deregistration_delay.timeout_seconds
Value: "20"
```
Triggers this warn message :
> W3010 Don't hardcode all for AvailabilityZones
In the case of AWS::ElasticLoadBalancingV2::TargetGroup, there is legitimacy to hardcode all for AvailabilityZones :
> If the IP address is outside the VPC, this parameter is required. With an Application Load Balancer, if the target type is ip and the IP address is outside the VPC for the target group, the only supported value is all.
I'm unsure what to PR here. Should we get rid of this line ? https://github.com/aws-cloudformation/cfn-python-lint/blob/master/src/cfnlint/rules/resources/properties/AvailabilityZone.py#L52
Thanks for the suggestions.
[1] https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticloadbalancingv2-targetgroup-targetdescription.html#aws-properties-elasticloadbalancingv2-targetgroup-targetdescription-properties
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/cfnlint/rules/resources/properties/AvailabilityZone.py`
Content:
```
1 """
2 Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
3
4 Permission is hereby granted, free of charge, to any person obtaining a copy of this
5 software and associated documentation files (the "Software"), to deal in the Software
6 without restriction, including without limitation the rights to use, copy, modify,
7 merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
8 permit persons to whom the Software is furnished to do so.
9
10 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
11 INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
12 PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
13 HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
14 OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
15 SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
16 """
17 from cfnlint import CloudFormationLintRule
18 from cfnlint import RuleMatch
19
20
21 class AvailabilityZone(CloudFormationLintRule):
22 """Check Availibility Zone parameter checks """
23 id = 'W3010'
24 shortdesc = 'Availability Zone Parameters should not be hardcoded'
25 description = 'Check if an Availability Zone property is hardcoded.'
26 source_url = 'https://github.com/aws-cloudformation/cfn-python-lint'
27 tags = ['parameters', 'availabilityzone']
28
29 def __init__(self):
30 """Init"""
31 super(AvailabilityZone, self).__init__()
32 resource_type_specs = [
33 'AWS::DAX::Cluster',
34 'AWS::AutoScaling::AutoScalingGroup',
35 'AWS::RDS::DBCluster',
36 'AWS::EC2::Volume',
37 'AWS::ElasticLoadBalancing::LoadBalancer',
38 'AWS::OpsWorks::Instance',
39 'AWS::RDS::DBInstance',
40 'AWS::EC2::Host',
41 'AWS::EC2::Subnet',
42 'AWS::DMS::ReplicationInstance',
43 'AWS::EC2::Instance'
44 ]
45
46 property_type_specs = [
47 # Singular
48 'AWS::EC2::LaunchTemplate.Placement',
49 'AWS::EC2::SpotFleet.SpotPlacement',
50 'AWS::EMR::Cluster.PlacementType',
51 'AWS::Glue::Connection.PhysicalConnectionRequirements',
52 'AWS::ElasticLoadBalancingV2::TargetGroup.TargetDescription',
53 'AWS::EC2::SpotFleet.LaunchTemplateOverrides',
54 ]
55
56 for resource_type_spec in resource_type_specs:
57 self.resource_property_types.append(resource_type_spec)
58 for property_type_spec in property_type_specs:
59 self.resource_sub_property_types.append(property_type_spec)
60
61 # pylint: disable=W0613
62 def check_az_value(self, value, path):
63 """Check ref for VPC"""
64 matches = []
65
66 if path[-1] != 'Fn::GetAZs':
67 message = 'Don\'t hardcode {0} for AvailabilityZones'
68 matches.append(RuleMatch(path, message.format(value)))
69
70 return matches
71
72 def check(self, properties, resource_type, path, cfn):
73 """Check itself"""
74 matches = []
75
76 matches.extend(
77 cfn.check_value(
78 properties, 'AvailabilityZone', path,
79 check_value=self.check_az_value, check_ref=None,
80 check_find_in_map=None, check_split=None, check_join=None
81 )
82 )
83 matches.extend(
84 cfn.check_value(
85 properties, 'AvailabilityZones', path,
86 check_value=self.check_az_value, check_ref=None,
87 check_find_in_map=None, check_split=None, check_join=None
88 )
89 )
90
91 return matches
92
93 def match_resource_sub_properties(self, properties, property_type, path, cfn):
94 """Match for sub properties"""
95 matches = []
96
97 matches.extend(self.check(properties, property_type, path, cfn))
98
99 return matches
100
101 def match_resource_properties(self, properties, resource_type, path, cfn):
102 """Check CloudFormation Properties"""
103 matches = []
104
105 matches.extend(self.check(properties, resource_type, path, cfn))
106
107 return matches
108
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/cfnlint/rules/resources/properties/AvailabilityZone.py b/src/cfnlint/rules/resources/properties/AvailabilityZone.py
--- a/src/cfnlint/rules/resources/properties/AvailabilityZone.py
+++ b/src/cfnlint/rules/resources/properties/AvailabilityZone.py
@@ -63,9 +63,11 @@
"""Check ref for VPC"""
matches = []
- if path[-1] != 'Fn::GetAZs':
- message = 'Don\'t hardcode {0} for AvailabilityZones'
- matches.append(RuleMatch(path, message.format(value)))
+ # value of `all` is a valide exception in AWS::ElasticLoadBalancingV2::TargetGroup
+ if value not in ['all']:
+ if path[-1] != ['Fn::GetAZs']:
+ message = 'Don\'t hardcode {0} for AvailabilityZones'
+ matches.append(RuleMatch(path, message.format(value)))
return matches
| {"golden_diff": "diff --git a/src/cfnlint/rules/resources/properties/AvailabilityZone.py b/src/cfnlint/rules/resources/properties/AvailabilityZone.py\n--- a/src/cfnlint/rules/resources/properties/AvailabilityZone.py\n+++ b/src/cfnlint/rules/resources/properties/AvailabilityZone.py\n@@ -63,9 +63,11 @@\n \"\"\"Check ref for VPC\"\"\"\n matches = []\n \n- if path[-1] != 'Fn::GetAZs':\n- message = 'Don\\'t hardcode {0} for AvailabilityZones'\n- matches.append(RuleMatch(path, message.format(value)))\n+ # value of `all` is a valide exception in AWS::ElasticLoadBalancingV2::TargetGroup\n+ if value not in ['all']:\n+ if path[-1] != ['Fn::GetAZs']:\n+ message = 'Don\\'t hardcode {0} for AvailabilityZones'\n+ matches.append(RuleMatch(path, message.format(value)))\n \n return matches\n", "issue": "Don't hardcode all for AvailabilityZones not relevant for AWS::ElasticLoadBalancingV2::TargetGroup\nHello, \r\n\r\n*cfn-lint version: 0.19.1*\r\n\r\n*Description of issue.*\r\n\r\nThe following snippet : \r\n\r\n```\r\nResources:\r\n DefaultTargetGroup:\r\n Type: \"AWS::ElasticLoadBalancingV2::TargetGroup\"\r\n Properties:\r\n VpcId: hello\r\n Port: 80\r\n Protocol: HTTP\r\n HealthCheckIntervalSeconds: 30\r\n HealthCheckPath: \"/\"\r\n HealthCheckPort: \"80\"\r\n HealthCheckProtocol: \"HTTP\"\r\n HealthCheckTimeoutSeconds: 5\r\n HealthyThresholdCount: 5\r\n TargetType: ip\r\n Targets:\r\n - \r\n Id: \"10.31.33.28\"\r\n AvailabilityZone: all\r\n Matcher:\r\n HttpCode: \"200\"\r\n TargetGroupAttributes:\r\n - Key: deregistration_delay.timeout_seconds\r\n Value: \"20\"\r\n```\r\n\r\nTriggers this warn message : \r\n\r\n> W3010 Don't hardcode all for AvailabilityZones \r\n\r\nIn the case of AWS::ElasticLoadBalancingV2::TargetGroup, there is legitimacy to hardcode all for AvailabilityZones : \r\n> If the IP address is outside the VPC, this parameter is required. With an Application Load Balancer, if the target type is ip and the IP address is outside the VPC for the target group, the only supported value is all. \r\n\r\nI'm unsure what to PR here. Should we get rid of this line ? https://github.com/aws-cloudformation/cfn-python-lint/blob/master/src/cfnlint/rules/resources/properties/AvailabilityZone.py#L52 \r\n\r\nThanks for the suggestions. \r\n\r\n[1] https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticloadbalancingv2-targetgroup-targetdescription.html#aws-properties-elasticloadbalancingv2-targetgroup-targetdescription-properties\r\n\n", "before_files": [{"content": "\"\"\"\n Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n\n Permission is hereby granted, free of charge, to any person obtaining a copy of this\n software and associated documentation files (the \"Software\"), to deal in the Software\n without restriction, including without limitation the rights to use, copy, modify,\n merge, publish, distribute, sublicense, and/or sell copies of the Software, and to\n permit persons to whom the Software is furnished to do so.\n\n THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\n INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\n HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\"\"\"\nfrom cfnlint import CloudFormationLintRule\nfrom cfnlint import RuleMatch\n\n\nclass AvailabilityZone(CloudFormationLintRule):\n \"\"\"Check Availibility Zone parameter checks \"\"\"\n id = 'W3010'\n shortdesc = 'Availability Zone Parameters should not be hardcoded'\n description = 'Check if an Availability Zone property is hardcoded.'\n source_url = 'https://github.com/aws-cloudformation/cfn-python-lint'\n tags = ['parameters', 'availabilityzone']\n\n def __init__(self):\n \"\"\"Init\"\"\"\n super(AvailabilityZone, self).__init__()\n resource_type_specs = [\n 'AWS::DAX::Cluster',\n 'AWS::AutoScaling::AutoScalingGroup',\n 'AWS::RDS::DBCluster',\n 'AWS::EC2::Volume',\n 'AWS::ElasticLoadBalancing::LoadBalancer',\n 'AWS::OpsWorks::Instance',\n 'AWS::RDS::DBInstance',\n 'AWS::EC2::Host',\n 'AWS::EC2::Subnet',\n 'AWS::DMS::ReplicationInstance',\n 'AWS::EC2::Instance'\n ]\n\n property_type_specs = [\n # Singular\n 'AWS::EC2::LaunchTemplate.Placement',\n 'AWS::EC2::SpotFleet.SpotPlacement',\n 'AWS::EMR::Cluster.PlacementType',\n 'AWS::Glue::Connection.PhysicalConnectionRequirements',\n 'AWS::ElasticLoadBalancingV2::TargetGroup.TargetDescription',\n 'AWS::EC2::SpotFleet.LaunchTemplateOverrides',\n ]\n\n for resource_type_spec in resource_type_specs:\n self.resource_property_types.append(resource_type_spec)\n for property_type_spec in property_type_specs:\n self.resource_sub_property_types.append(property_type_spec)\n\n # pylint: disable=W0613\n def check_az_value(self, value, path):\n \"\"\"Check ref for VPC\"\"\"\n matches = []\n\n if path[-1] != 'Fn::GetAZs':\n message = 'Don\\'t hardcode {0} for AvailabilityZones'\n matches.append(RuleMatch(path, message.format(value)))\n\n return matches\n\n def check(self, properties, resource_type, path, cfn):\n \"\"\"Check itself\"\"\"\n matches = []\n\n matches.extend(\n cfn.check_value(\n properties, 'AvailabilityZone', path,\n check_value=self.check_az_value, check_ref=None,\n check_find_in_map=None, check_split=None, check_join=None\n )\n )\n matches.extend(\n cfn.check_value(\n properties, 'AvailabilityZones', path,\n check_value=self.check_az_value, check_ref=None,\n check_find_in_map=None, check_split=None, check_join=None\n )\n )\n\n return matches\n\n def match_resource_sub_properties(self, properties, property_type, path, cfn):\n \"\"\"Match for sub properties\"\"\"\n matches = []\n\n matches.extend(self.check(properties, property_type, path, cfn))\n\n return matches\n\n def match_resource_properties(self, properties, resource_type, path, cfn):\n \"\"\"Check CloudFormation Properties\"\"\"\n matches = []\n\n matches.extend(self.check(properties, resource_type, path, cfn))\n\n return matches\n", "path": "src/cfnlint/rules/resources/properties/AvailabilityZone.py"}], "after_files": [{"content": "\"\"\"\n Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n\n Permission is hereby granted, free of charge, to any person obtaining a copy of this\n software and associated documentation files (the \"Software\"), to deal in the Software\n without restriction, including without limitation the rights to use, copy, modify,\n merge, publish, distribute, sublicense, and/or sell copies of the Software, and to\n permit persons to whom the Software is furnished to do so.\n\n THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\n INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\n HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\"\"\"\nfrom cfnlint import CloudFormationLintRule\nfrom cfnlint import RuleMatch\n\n\nclass AvailabilityZone(CloudFormationLintRule):\n \"\"\"Check Availibility Zone parameter checks \"\"\"\n id = 'W3010'\n shortdesc = 'Availability Zone Parameters should not be hardcoded'\n description = 'Check if an Availability Zone property is hardcoded.'\n source_url = 'https://github.com/aws-cloudformation/cfn-python-lint'\n tags = ['parameters', 'availabilityzone']\n\n def __init__(self):\n \"\"\"Init\"\"\"\n super(AvailabilityZone, self).__init__()\n resource_type_specs = [\n 'AWS::DAX::Cluster',\n 'AWS::AutoScaling::AutoScalingGroup',\n 'AWS::RDS::DBCluster',\n 'AWS::EC2::Volume',\n 'AWS::ElasticLoadBalancing::LoadBalancer',\n 'AWS::OpsWorks::Instance',\n 'AWS::RDS::DBInstance',\n 'AWS::EC2::Host',\n 'AWS::EC2::Subnet',\n 'AWS::DMS::ReplicationInstance',\n 'AWS::EC2::Instance'\n ]\n\n property_type_specs = [\n # Singular\n 'AWS::EC2::LaunchTemplate.Placement',\n 'AWS::EC2::SpotFleet.SpotPlacement',\n 'AWS::EMR::Cluster.PlacementType',\n 'AWS::Glue::Connection.PhysicalConnectionRequirements',\n 'AWS::ElasticLoadBalancingV2::TargetGroup.TargetDescription',\n 'AWS::EC2::SpotFleet.LaunchTemplateOverrides',\n ]\n\n for resource_type_spec in resource_type_specs:\n self.resource_property_types.append(resource_type_spec)\n for property_type_spec in property_type_specs:\n self.resource_sub_property_types.append(property_type_spec)\n\n # pylint: disable=W0613\n def check_az_value(self, value, path):\n \"\"\"Check ref for VPC\"\"\"\n matches = []\n\n # value of `all` is a valide exception in AWS::ElasticLoadBalancingV2::TargetGroup\n if value not in ['all']:\n if path[-1] != ['Fn::GetAZs']:\n message = 'Don\\'t hardcode {0} for AvailabilityZones'\n matches.append(RuleMatch(path, message.format(value)))\n\n return matches\n\n def check(self, properties, resource_type, path, cfn):\n \"\"\"Check itself\"\"\"\n matches = []\n\n matches.extend(\n cfn.check_value(\n properties, 'AvailabilityZone', path,\n check_value=self.check_az_value, check_ref=None,\n check_find_in_map=None, check_split=None, check_join=None\n )\n )\n matches.extend(\n cfn.check_value(\n properties, 'AvailabilityZones', path,\n check_value=self.check_az_value, check_ref=None,\n check_find_in_map=None, check_split=None, check_join=None\n )\n )\n\n return matches\n\n def match_resource_sub_properties(self, properties, property_type, path, cfn):\n \"\"\"Match for sub properties\"\"\"\n matches = []\n\n matches.extend(self.check(properties, property_type, path, cfn))\n\n return matches\n\n def match_resource_properties(self, properties, resource_type, path, cfn):\n \"\"\"Check CloudFormation Properties\"\"\"\n matches = []\n\n matches.extend(self.check(properties, resource_type, path, cfn))\n\n return matches\n", "path": "src/cfnlint/rules/resources/properties/AvailabilityZone.py"}]} | 1,816 | 218 |
gh_patches_debug_20 | rasdani/github-patches | git_diff | google__pytype-251 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add test_data to MANIFEST.in
This PR also needs to be imported and re-exported rather than merged directly. I'm planning to use this one to test the import process fix I sent you.
Fixes https://github.com/google/pytype/issues/245.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pytype/__version__.py`
Content:
```
1 # pylint: skip-file
2 __version__ = '2019.02.13'
3
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pytype/__version__.py b/pytype/__version__.py
--- a/pytype/__version__.py
+++ b/pytype/__version__.py
@@ -1,2 +1,2 @@
# pylint: skip-file
-__version__ = '2019.02.13'
+__version__ = '2019.03.01'
| {"golden_diff": "diff --git a/pytype/__version__.py b/pytype/__version__.py\n--- a/pytype/__version__.py\n+++ b/pytype/__version__.py\n@@ -1,2 +1,2 @@\n # pylint: skip-file\n-__version__ = '2019.02.13'\n+__version__ = '2019.03.01'\n", "issue": "Add test_data to MANIFEST.in\nThis PR also needs to be imported and re-exported rather than merged directly. I'm planning to use this one to test the import process fix I sent you.\r\n\r\nFixes https://github.com/google/pytype/issues/245.\n", "before_files": [{"content": "# pylint: skip-file\n__version__ = '2019.02.13'\n", "path": "pytype/__version__.py"}], "after_files": [{"content": "# pylint: skip-file\n__version__ = '2019.03.01'\n", "path": "pytype/__version__.py"}]} | 341 | 86 |
gh_patches_debug_7061 | rasdani/github-patches | git_diff | mindsdb__lightwood-1051 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Lightwood pip packages creates tests module
Installing lightwood creates 'tests' module in python site-packages
Steps to reproduce:
- `pip install lightwood`
- in python
- `import tests`
- `print(tests.__file__) `
It will show that 'tests' is in site-packages
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 import sys
2 import setuptools
3 import os
4
5
6 def remove_requirements(requirements, name, replace=''):
7 new_requirements = []
8 for requirement in requirements:
9 if requirement.split(' ')[0] != name:
10 new_requirements.append(requirement)
11 elif replace is not None:
12 new_requirements.append(replace)
13 return new_requirements
14
15
16 sys_platform = sys.platform
17
18 about = {}
19 with open("lightwood/__about__.py") as fp:
20 exec(fp.read(), about)
21
22 with open("README.md", "r") as fh:
23 long_description = fh.read()
24
25 with open('requirements.txt') as req_file:
26 requirements = [req.strip() for req in req_file.read().splitlines()]
27
28 extra_requirements = {}
29 for fn in os.listdir('.'):
30 if fn.startswith('requirements_') and fn.endswith('.txt'):
31 extra_name = fn.replace('requirements_', '').replace('.txt', '')
32 with open(fn) as fp:
33 extra = [req.strip() for req in fp.read().splitlines()]
34 extra_requirements[extra_name] = extra
35 full_requirements = []
36 for v in extra_requirements.values():
37 full_requirements += v
38 extra_requirements['all_extras'] = list(set(full_requirements))
39
40 # Windows specific requirements
41 if sys_platform in ['win32', 'cygwin', 'windows']:
42 # These have to be installed manually or via the installers in windows
43 requirements = remove_requirements(requirements, 'torch')
44
45 setuptools.setup(
46 name=about['__title__'],
47 version=about['__version__'],
48 url=about['__github__'],
49 download_url=about['__pypi__'],
50 license=about['__license__'],
51 author=about['__author__'],
52 author_email=about['__email__'],
53 description=about['__description__'],
54 long_description=long_description,
55 long_description_content_type="text/markdown",
56 packages=setuptools.find_packages(),
57 package_data={'project': ['requirements.txt']},
58 install_requires=requirements,
59 extras_require=extra_requirements,
60 classifiers=[
61 "Programming Language :: Python :: 3",
62 "License :: OSI Approved :: MIT License",
63 "Operating System :: OS Independent",
64 ],
65 python_requires=">=3.7"
66 )
67
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -53,7 +53,7 @@
description=about['__description__'],
long_description=long_description,
long_description_content_type="text/markdown",
- packages=setuptools.find_packages(),
+ packages=setuptools.find_packages(exclude=["tests", "tests.*"]),
package_data={'project': ['requirements.txt']},
install_requires=requirements,
extras_require=extra_requirements,
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -53,7 +53,7 @@\n description=about['__description__'],\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n- packages=setuptools.find_packages(),\n+ packages=setuptools.find_packages(exclude=[\"tests\", \"tests.*\"]),\n package_data={'project': ['requirements.txt']},\n install_requires=requirements,\n extras_require=extra_requirements,\n", "issue": "Lightwood pip packages creates tests module\nInstalling lightwood creates 'tests' module in python site-packages\r\n\r\nSteps to reproduce:\r\n- `pip install lightwood`\r\n- in python\r\n - `import tests`\r\n - `print(tests.__file__) `\r\nIt will show that 'tests' is in site-packages\n", "before_files": [{"content": "import sys\nimport setuptools\nimport os\n\n\ndef remove_requirements(requirements, name, replace=''):\n new_requirements = []\n for requirement in requirements:\n if requirement.split(' ')[0] != name:\n new_requirements.append(requirement)\n elif replace is not None:\n new_requirements.append(replace)\n return new_requirements\n\n\nsys_platform = sys.platform\n\nabout = {}\nwith open(\"lightwood/__about__.py\") as fp:\n exec(fp.read(), about)\n\nwith open(\"README.md\", \"r\") as fh:\n long_description = fh.read()\n\nwith open('requirements.txt') as req_file:\n requirements = [req.strip() for req in req_file.read().splitlines()]\n\nextra_requirements = {}\nfor fn in os.listdir('.'):\n if fn.startswith('requirements_') and fn.endswith('.txt'):\n extra_name = fn.replace('requirements_', '').replace('.txt', '')\n with open(fn) as fp:\n extra = [req.strip() for req in fp.read().splitlines()]\n extra_requirements[extra_name] = extra\nfull_requirements = []\nfor v in extra_requirements.values():\n full_requirements += v\nextra_requirements['all_extras'] = list(set(full_requirements))\n\n# Windows specific requirements\nif sys_platform in ['win32', 'cygwin', 'windows']:\n # These have to be installed manually or via the installers in windows\n requirements = remove_requirements(requirements, 'torch')\n\nsetuptools.setup(\n name=about['__title__'],\n version=about['__version__'],\n url=about['__github__'],\n download_url=about['__pypi__'],\n license=about['__license__'],\n author=about['__author__'],\n author_email=about['__email__'],\n description=about['__description__'],\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n packages=setuptools.find_packages(),\n package_data={'project': ['requirements.txt']},\n install_requires=requirements,\n extras_require=extra_requirements,\n classifiers=[\n \"Programming Language :: Python :: 3\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n ],\n python_requires=\">=3.7\"\n)\n", "path": "setup.py"}], "after_files": [{"content": "import sys\nimport setuptools\nimport os\n\n\ndef remove_requirements(requirements, name, replace=''):\n new_requirements = []\n for requirement in requirements:\n if requirement.split(' ')[0] != name:\n new_requirements.append(requirement)\n elif replace is not None:\n new_requirements.append(replace)\n return new_requirements\n\n\nsys_platform = sys.platform\n\nabout = {}\nwith open(\"lightwood/__about__.py\") as fp:\n exec(fp.read(), about)\n\nwith open(\"README.md\", \"r\") as fh:\n long_description = fh.read()\n\nwith open('requirements.txt') as req_file:\n requirements = [req.strip() for req in req_file.read().splitlines()]\n\nextra_requirements = {}\nfor fn in os.listdir('.'):\n if fn.startswith('requirements_') and fn.endswith('.txt'):\n extra_name = fn.replace('requirements_', '').replace('.txt', '')\n with open(fn) as fp:\n extra = [req.strip() for req in fp.read().splitlines()]\n extra_requirements[extra_name] = extra\nfull_requirements = []\nfor v in extra_requirements.values():\n full_requirements += v\nextra_requirements['all_extras'] = list(set(full_requirements))\n\n# Windows specific requirements\nif sys_platform in ['win32', 'cygwin', 'windows']:\n # These have to be installed manually or via the installers in windows\n requirements = remove_requirements(requirements, 'torch')\n\nsetuptools.setup(\n name=about['__title__'],\n version=about['__version__'],\n url=about['__github__'],\n download_url=about['__pypi__'],\n license=about['__license__'],\n author=about['__author__'],\n author_email=about['__email__'],\n description=about['__description__'],\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n packages=setuptools.find_packages(exclude=[\"tests\", \"tests.*\"]),\n package_data={'project': ['requirements.txt']},\n install_requires=requirements,\n extras_require=extra_requirements,\n classifiers=[\n \"Programming Language :: Python :: 3\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n ],\n python_requires=\">=3.7\"\n)\n", "path": "setup.py"}]} | 909 | 104 |
gh_patches_debug_18477 | rasdani/github-patches | git_diff | saleor__saleor-1416 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Logging does not redirect to ?next= link
### What I'm trying to achieve
Currently Saleor has an option to redirect user to particular URL after being asked to log in - which isn't working ATM, beacuse user gets redirected to storefront main page.
### Steps to reproduce the problem
1. Go to auth-protected URL (such as `/dashboard`)
2. Log in
### What I expected to happen
To redirect user to requested page.
### What happened instead/how it failed
User gets redirected to `/`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `saleor/registration/views.py`
Content:
```
1 from __future__ import unicode_literals
2
3 from django.conf import settings
4 from django.contrib import auth, messages
5 from django.contrib.auth import views as django_views
6 from django.contrib.auth.decorators import login_required
7 from django.shortcuts import redirect
8 from django.template.response import TemplateResponse
9 from django.urls import reverse_lazy
10 from django.utils.translation import ugettext_lazy as _
11
12 from saleor.cart.utils import find_and_assign_anonymous_cart
13
14 from .forms import LoginForm, PasswordSetUpForm, SignupForm
15
16
17 @find_and_assign_anonymous_cart()
18 def login(request):
19 kwargs = {
20 'template_name': 'account/login.html', 'authentication_form': LoginForm}
21 return django_views.LoginView.as_view(**kwargs)(request, **kwargs)
22
23
24 @login_required
25 def logout(request):
26 auth.logout(request)
27 messages.success(request, _('You have been successfully logged out.'))
28 return redirect(settings.LOGIN_REDIRECT_URL)
29
30
31 def signup(request):
32 form = SignupForm(request.POST or None)
33 if form.is_valid():
34 form.save()
35 password = form.cleaned_data.get('password')
36 email = form.cleaned_data.get('email')
37 user = auth.authenticate(request=request, email=email,
38 password=password)
39 if user:
40 auth.login(request, user)
41 messages.success(request, _('User has been created'))
42 redirect_url = request.POST.get('next', '')
43 if redirect_url:
44 return redirect(redirect_url)
45 else:
46 return redirect(settings.LOGIN_REDIRECT_URL)
47 ctx = {'form': form}
48 return TemplateResponse(request, 'account/signup.html', ctx)
49
50
51 def password_reset(request):
52 kwargs = {
53 'template_name': 'account/password_reset.html',
54 'success_url': reverse_lazy('account_reset_password_done'),
55 'email_template_name': 'account/email/password_reset_message.txt',
56 'subject_template_name': 'account/email/password_reset_subject.txt'}
57 return django_views.PasswordResetView.as_view(**kwargs)(request, **kwargs)
58
59
60 class PasswordResetConfirm(django_views.PasswordResetConfirmView):
61 template_name = 'account/password_reset_from_key.html'
62 success_url = reverse_lazy('account_reset_password_complete')
63 set_password_form = PasswordSetUpForm
64 token = None
65 uidb64 = None
66
67
68 def password_reset_confirm(request, uidb64=None, token=None):
69 kwargs = {
70 'template_name': 'account/password_reset_from_key.html',
71 'success_url': reverse_lazy('account_reset_password_complete'),
72 'set_password_form': 'PasswordSetUpForm',
73 'token': token,
74 'uidb64': uidb64}
75 return PasswordResetConfirm.as_view(**kwargs)(
76 request, **kwargs)
77
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/saleor/registration/views.py b/saleor/registration/views.py
--- a/saleor/registration/views.py
+++ b/saleor/registration/views.py
@@ -34,16 +34,13 @@
form.save()
password = form.cleaned_data.get('password')
email = form.cleaned_data.get('email')
- user = auth.authenticate(request=request, email=email,
- password=password)
+ user = auth.authenticate(
+ request=request, email=email, password=password)
if user:
auth.login(request, user)
messages.success(request, _('User has been created'))
- redirect_url = request.POST.get('next', '')
- if redirect_url:
- return redirect(redirect_url)
- else:
- return redirect(settings.LOGIN_REDIRECT_URL)
+ redirect_url = request.POST.get('next', settings.LOGIN_REDIRECT_URL)
+ return redirect(redirect_url)
ctx = {'form': form}
return TemplateResponse(request, 'account/signup.html', ctx)
| {"golden_diff": "diff --git a/saleor/registration/views.py b/saleor/registration/views.py\n--- a/saleor/registration/views.py\n+++ b/saleor/registration/views.py\n@@ -34,16 +34,13 @@\n form.save()\n password = form.cleaned_data.get('password')\n email = form.cleaned_data.get('email')\n- user = auth.authenticate(request=request, email=email,\n- password=password)\n+ user = auth.authenticate(\n+ request=request, email=email, password=password)\n if user:\n auth.login(request, user)\n messages.success(request, _('User has been created'))\n- redirect_url = request.POST.get('next', '')\n- if redirect_url:\n- return redirect(redirect_url)\n- else:\n- return redirect(settings.LOGIN_REDIRECT_URL)\n+ redirect_url = request.POST.get('next', settings.LOGIN_REDIRECT_URL)\n+ return redirect(redirect_url)\n ctx = {'form': form}\n return TemplateResponse(request, 'account/signup.html', ctx)\n", "issue": "Logging does not redirect to ?next= link\n### What I'm trying to achieve\r\n\r\nCurrently Saleor has an option to redirect user to particular URL after being asked to log in - which isn't working ATM, beacuse user gets redirected to storefront main page.\r\n\r\n### Steps to reproduce the problem\r\n\r\n1. Go to auth-protected URL (such as `/dashboard`)\r\n2. Log in\r\n\r\n### What I expected to happen\r\n\r\nTo redirect user to requested page.\r\n\r\n### What happened instead/how it failed\r\n\r\nUser gets redirected to `/`\n", "before_files": [{"content": "from __future__ import unicode_literals\n\nfrom django.conf import settings\nfrom django.contrib import auth, messages\nfrom django.contrib.auth import views as django_views\nfrom django.contrib.auth.decorators import login_required\nfrom django.shortcuts import redirect\nfrom django.template.response import TemplateResponse\nfrom django.urls import reverse_lazy\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom saleor.cart.utils import find_and_assign_anonymous_cart\n\nfrom .forms import LoginForm, PasswordSetUpForm, SignupForm\n\n\n@find_and_assign_anonymous_cart()\ndef login(request):\n kwargs = {\n 'template_name': 'account/login.html', 'authentication_form': LoginForm}\n return django_views.LoginView.as_view(**kwargs)(request, **kwargs)\n\n\n@login_required\ndef logout(request):\n auth.logout(request)\n messages.success(request, _('You have been successfully logged out.'))\n return redirect(settings.LOGIN_REDIRECT_URL)\n\n\ndef signup(request):\n form = SignupForm(request.POST or None)\n if form.is_valid():\n form.save()\n password = form.cleaned_data.get('password')\n email = form.cleaned_data.get('email')\n user = auth.authenticate(request=request, email=email,\n password=password)\n if user:\n auth.login(request, user)\n messages.success(request, _('User has been created'))\n redirect_url = request.POST.get('next', '')\n if redirect_url:\n return redirect(redirect_url)\n else:\n return redirect(settings.LOGIN_REDIRECT_URL)\n ctx = {'form': form}\n return TemplateResponse(request, 'account/signup.html', ctx)\n\n\ndef password_reset(request):\n kwargs = {\n 'template_name': 'account/password_reset.html',\n 'success_url': reverse_lazy('account_reset_password_done'),\n 'email_template_name': 'account/email/password_reset_message.txt',\n 'subject_template_name': 'account/email/password_reset_subject.txt'}\n return django_views.PasswordResetView.as_view(**kwargs)(request, **kwargs)\n\n\nclass PasswordResetConfirm(django_views.PasswordResetConfirmView):\n template_name = 'account/password_reset_from_key.html'\n success_url = reverse_lazy('account_reset_password_complete')\n set_password_form = PasswordSetUpForm\n token = None\n uidb64 = None\n\n\ndef password_reset_confirm(request, uidb64=None, token=None):\n kwargs = {\n 'template_name': 'account/password_reset_from_key.html',\n 'success_url': reverse_lazy('account_reset_password_complete'),\n 'set_password_form': 'PasswordSetUpForm',\n 'token': token,\n 'uidb64': uidb64}\n return PasswordResetConfirm.as_view(**kwargs)(\n request, **kwargs)\n", "path": "saleor/registration/views.py"}], "after_files": [{"content": "from __future__ import unicode_literals\n\nfrom django.conf import settings\nfrom django.contrib import auth, messages\nfrom django.contrib.auth import views as django_views\nfrom django.contrib.auth.decorators import login_required\nfrom django.shortcuts import redirect\nfrom django.template.response import TemplateResponse\nfrom django.urls import reverse_lazy\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom saleor.cart.utils import find_and_assign_anonymous_cart\n\nfrom .forms import LoginForm, PasswordSetUpForm, SignupForm\n\n\n@find_and_assign_anonymous_cart()\ndef login(request):\n kwargs = {\n 'template_name': 'account/login.html', 'authentication_form': LoginForm}\n return django_views.LoginView.as_view(**kwargs)(request, **kwargs)\n\n\n@login_required\ndef logout(request):\n auth.logout(request)\n messages.success(request, _('You have been successfully logged out.'))\n return redirect(settings.LOGIN_REDIRECT_URL)\n\n\ndef signup(request):\n form = SignupForm(request.POST or None)\n if form.is_valid():\n form.save()\n password = form.cleaned_data.get('password')\n email = form.cleaned_data.get('email')\n user = auth.authenticate(\n request=request, email=email, password=password)\n if user:\n auth.login(request, user)\n messages.success(request, _('User has been created'))\n redirect_url = request.POST.get('next', settings.LOGIN_REDIRECT_URL)\n return redirect(redirect_url)\n ctx = {'form': form}\n return TemplateResponse(request, 'account/signup.html', ctx)\n\n\ndef password_reset(request):\n kwargs = {\n 'template_name': 'account/password_reset.html',\n 'success_url': reverse_lazy('account_reset_password_done'),\n 'email_template_name': 'account/email/password_reset_message.txt',\n 'subject_template_name': 'account/email/password_reset_subject.txt'}\n return django_views.PasswordResetView.as_view(**kwargs)(request, **kwargs)\n\n\nclass PasswordResetConfirm(django_views.PasswordResetConfirmView):\n template_name = 'account/password_reset_from_key.html'\n success_url = reverse_lazy('account_reset_password_complete')\n set_password_form = PasswordSetUpForm\n token = None\n uidb64 = None\n\n\ndef password_reset_confirm(request, uidb64=None, token=None):\n kwargs = {\n 'template_name': 'account/password_reset_from_key.html',\n 'success_url': reverse_lazy('account_reset_password_complete'),\n 'set_password_form': 'PasswordSetUpForm',\n 'token': token,\n 'uidb64': uidb64}\n return PasswordResetConfirm.as_view(**kwargs)(\n request, **kwargs)\n", "path": "saleor/registration/views.py"}]} | 1,072 | 219 |
gh_patches_debug_48346 | rasdani/github-patches | git_diff | interlegis__sapl-3164 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Não permitir que se altere campos rotulo_prefixo_texto e rotulo_sufixo_texto via interface admin
<!--- Forneça um resumo geral da _issue_ no título acima -->
## Comportamento Esperado
<!--- Se você está descrevendo um _bug_, conte-nos o que deveria acontecer. -->
<!--- Se você está sugerindo uma mudança/melhoria, conte-nos como deve funcionar. -->
## Comportamento Atual
<!--- Se está descrevendo um bug, conte-nos o que acontece em vez do comportamento esperado. -->
<!--- Se está sugerindo uma mudança/melhoria, explique a diferença com o comportamento atual. -->
## Possível Solução
<!--- Não é obrigatório, mas sugira uma possível correção/razão para o bug -->
<!--- ou ideias de como implementar a adição/mudança. -->
## Passos para Reproduzir (para bugs)
<!--- Forneça um link para um exemplo, ou um conjunto de passos inequívocos -->
<!--- para reproduzir esse bug. Inclua código para reproduzir, se relevante. -->
1.
2.
3.
4.
## Contexto
<!--- Como esse problema o afeta? O que você está tentando realizar? -->
<!--- Fornecer o contexto nos ajuda a encontrar uma solução que seja mais útil no mundo real -->
## Imagens do Ocorrido
<!--- Representação visual em vídeo ou imagem do ocorrido -->
<!--- Se está descrevendo um bug poste imagens ou vídeos na reprodução do bug citado, caso se aplique -->
## Seu Ambiente
<!--- Inclua detalhes relevantes sobre o ambiente em que você presenciou/experienciou o bug. -->
* Versão usada (_Release_):
* Nome e versão do navegador:
* Nome e versão do Sistema Operacional (desktop ou mobile):
* Link para o seu projeto (Caso de fork deste projeto):
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sapl/compilacao/admin.py`
Content:
```
1 from sapl.utils import register_all_models_in_admin
2
3 register_all_models_in_admin(__name__)
4
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/sapl/compilacao/admin.py b/sapl/compilacao/admin.py
--- a/sapl/compilacao/admin.py
+++ b/sapl/compilacao/admin.py
@@ -1,3 +1,12 @@
+from django.contrib import admin
+from sapl.compilacao.models import TipoDispositivo
from sapl.utils import register_all_models_in_admin
register_all_models_in_admin(__name__)
+admin.site.unregister(TipoDispositivo)
+
+
[email protected](TipoDispositivo)
+class TipoDispositivoAdmin(admin.ModelAdmin):
+ readonly_fields = ("rotulo_prefixo_texto", "rotulo_sufixo_texto",)
+ list_display = [f.name for f in TipoDispositivo._meta.fields if f.name != 'id']
| {"golden_diff": "diff --git a/sapl/compilacao/admin.py b/sapl/compilacao/admin.py\n--- a/sapl/compilacao/admin.py\n+++ b/sapl/compilacao/admin.py\n@@ -1,3 +1,12 @@\n+from django.contrib import admin\n+from sapl.compilacao.models import TipoDispositivo\n from sapl.utils import register_all_models_in_admin\n \n register_all_models_in_admin(__name__)\n+admin.site.unregister(TipoDispositivo)\n+\n+\[email protected](TipoDispositivo)\n+class TipoDispositivoAdmin(admin.ModelAdmin):\n+ readonly_fields = (\"rotulo_prefixo_texto\", \"rotulo_sufixo_texto\",)\n+ list_display = [f.name for f in TipoDispositivo._meta.fields if f.name != 'id']\n", "issue": "N\u00e3o permitir que se altere campos rotulo_prefixo_texto e rotulo_sufixo_texto via interface admin\n<!--- Forne\u00e7a um resumo geral da _issue_ no t\u00edtulo acima -->\r\n\r\n## Comportamento Esperado\r\n<!--- Se voc\u00ea est\u00e1 descrevendo um _bug_, conte-nos o que deveria acontecer. -->\r\n<!--- Se voc\u00ea est\u00e1 sugerindo uma mudan\u00e7a/melhoria, conte-nos como deve funcionar. -->\r\n\r\n## Comportamento Atual\r\n<!--- Se est\u00e1 descrevendo um bug, conte-nos o que acontece em vez do comportamento esperado. -->\r\n<!--- Se est\u00e1 sugerindo uma mudan\u00e7a/melhoria, explique a diferen\u00e7a com o comportamento atual. -->\r\n\r\n## Poss\u00edvel Solu\u00e7\u00e3o\r\n<!--- N\u00e3o \u00e9 obrigat\u00f3rio, mas sugira uma poss\u00edvel corre\u00e7\u00e3o/raz\u00e3o para o bug -->\r\n<!--- ou ideias de como implementar a adi\u00e7\u00e3o/mudan\u00e7a. -->\r\n\r\n## Passos para Reproduzir (para bugs)\r\n<!--- Forne\u00e7a um link para um exemplo, ou um conjunto de passos inequ\u00edvocos -->\r\n<!--- para reproduzir esse bug. Inclua c\u00f3digo para reproduzir, se relevante. -->\r\n1.\r\n2.\r\n3.\r\n4.\r\n\r\n## Contexto\r\n<!--- Como esse problema o afeta? O que voc\u00ea est\u00e1 tentando realizar? -->\r\n<!--- Fornecer o contexto nos ajuda a encontrar uma solu\u00e7\u00e3o que seja mais \u00fatil no mundo real -->\r\n\r\n## Imagens do Ocorrido\r\n<!--- Representa\u00e7\u00e3o visual em v\u00eddeo ou imagem do ocorrido -->\r\n<!--- Se est\u00e1 descrevendo um bug poste imagens ou v\u00eddeos na reprodu\u00e7\u00e3o do bug citado, caso se aplique -->\r\n\r\n## Seu Ambiente\r\n<!--- Inclua detalhes relevantes sobre o ambiente em que voc\u00ea presenciou/experienciou o bug. -->\r\n* Vers\u00e3o usada (_Release_):\r\n* Nome e vers\u00e3o do navegador:\r\n* Nome e vers\u00e3o do Sistema Operacional (desktop ou mobile):\r\n* Link para o seu projeto (Caso de fork deste projeto):\r\n\n", "before_files": [{"content": "from sapl.utils import register_all_models_in_admin\n\nregister_all_models_in_admin(__name__)\n", "path": "sapl/compilacao/admin.py"}], "after_files": [{"content": "from django.contrib import admin\nfrom sapl.compilacao.models import TipoDispositivo\nfrom sapl.utils import register_all_models_in_admin\n\nregister_all_models_in_admin(__name__)\nadmin.site.unregister(TipoDispositivo)\n\n\[email protected](TipoDispositivo)\nclass TipoDispositivoAdmin(admin.ModelAdmin):\n readonly_fields = (\"rotulo_prefixo_texto\", \"rotulo_sufixo_texto\",)\n list_display = [f.name for f in TipoDispositivo._meta.fields if f.name != 'id']\n", "path": "sapl/compilacao/admin.py"}]} | 721 | 175 |
gh_patches_debug_9994 | rasdani/github-patches | git_diff | urllib3__urllib3-603 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Really don't spam InsecurePlatformWarning
urllib3 should configure its warnings using append=True to avoid overriding the user's preferences as specified with python -W or PYTHONWARNINGS.
If this issue were fixed, the user could work around pypa/pip#2681 with
```
export PYTHONWARNINGS="ignore:A true SSLContext object is not available"
```
Additionally, the urllib3 docs are very unclear about why this is considered worth warning the end user about, particularly given that adding this strange message has effectively introduced a bug in hundreds of other projects.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `urllib3/__init__.py`
Content:
```
1 """
2 urllib3 - Thread-safe connection pooling and re-using.
3 """
4
5 __author__ = 'Andrey Petrov ([email protected])'
6 __license__ = 'MIT'
7 __version__ = '1.10.2'
8
9
10 from .connectionpool import (
11 HTTPConnectionPool,
12 HTTPSConnectionPool,
13 connection_from_url
14 )
15
16 from . import exceptions
17 from .filepost import encode_multipart_formdata
18 from .poolmanager import PoolManager, ProxyManager, proxy_from_url
19 from .response import HTTPResponse
20 from .util.request import make_headers
21 from .util.url import get_host
22 from .util.timeout import Timeout
23 from .util.retry import Retry
24
25
26 # Set default logging handler to avoid "No handler found" warnings.
27 import logging
28 try: # Python 2.7+
29 from logging import NullHandler
30 except ImportError:
31 class NullHandler(logging.Handler):
32 def emit(self, record):
33 pass
34
35 logging.getLogger(__name__).addHandler(NullHandler())
36
37 def add_stderr_logger(level=logging.DEBUG):
38 """
39 Helper for quickly adding a StreamHandler to the logger. Useful for
40 debugging.
41
42 Returns the handler after adding it.
43 """
44 # This method needs to be in this __init__.py to get the __name__ correct
45 # even if urllib3 is vendored within another package.
46 logger = logging.getLogger(__name__)
47 handler = logging.StreamHandler()
48 handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
49 logger.addHandler(handler)
50 logger.setLevel(level)
51 logger.debug('Added a stderr logging handler to logger: %s' % __name__)
52 return handler
53
54 # ... Clean up.
55 del NullHandler
56
57
58 import warnings
59 # SecurityWarning's always go off by default.
60 warnings.simplefilter('always', exceptions.SecurityWarning)
61 # InsecurePlatformWarning's don't vary between requests, so we keep it default.
62 warnings.simplefilter('default', exceptions.InsecurePlatformWarning)
63
64 def disable_warnings(category=exceptions.HTTPWarning):
65 """
66 Helper for quickly disabling all urllib3 warnings.
67 """
68 warnings.simplefilter('ignore', category)
69
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/urllib3/__init__.py b/urllib3/__init__.py
--- a/urllib3/__init__.py
+++ b/urllib3/__init__.py
@@ -57,9 +57,10 @@
import warnings
# SecurityWarning's always go off by default.
-warnings.simplefilter('always', exceptions.SecurityWarning)
+warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
-warnings.simplefilter('default', exceptions.InsecurePlatformWarning)
+warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
+ append=True)
def disable_warnings(category=exceptions.HTTPWarning):
"""
| {"golden_diff": "diff --git a/urllib3/__init__.py b/urllib3/__init__.py\n--- a/urllib3/__init__.py\n+++ b/urllib3/__init__.py\n@@ -57,9 +57,10 @@\n \n import warnings\n # SecurityWarning's always go off by default.\n-warnings.simplefilter('always', exceptions.SecurityWarning)\n+warnings.simplefilter('always', exceptions.SecurityWarning, append=True)\n # InsecurePlatformWarning's don't vary between requests, so we keep it default.\n-warnings.simplefilter('default', exceptions.InsecurePlatformWarning)\n+warnings.simplefilter('default', exceptions.InsecurePlatformWarning,\n+ append=True)\n \n def disable_warnings(category=exceptions.HTTPWarning):\n \"\"\"\n", "issue": "Really don't spam InsecurePlatformWarning\nurllib3 should configure its warnings using append=True to avoid overriding the user's preferences as specified with python -W or PYTHONWARNINGS.\n\nIf this issue were fixed, the user could work around pypa/pip#2681 with\n\n```\nexport PYTHONWARNINGS=\"ignore:A true SSLContext object is not available\"\n```\n\nAdditionally, the urllib3 docs are very unclear about why this is considered worth warning the end user about, particularly given that adding this strange message has effectively introduced a bug in hundreds of other projects.\n\n", "before_files": [{"content": "\"\"\"\nurllib3 - Thread-safe connection pooling and re-using.\n\"\"\"\n\n__author__ = 'Andrey Petrov ([email protected])'\n__license__ = 'MIT'\n__version__ = '1.10.2'\n\n\nfrom .connectionpool import (\n HTTPConnectionPool,\n HTTPSConnectionPool,\n connection_from_url\n)\n\nfrom . import exceptions\nfrom .filepost import encode_multipart_formdata\nfrom .poolmanager import PoolManager, ProxyManager, proxy_from_url\nfrom .response import HTTPResponse\nfrom .util.request import make_headers\nfrom .util.url import get_host\nfrom .util.timeout import Timeout\nfrom .util.retry import Retry\n\n\n# Set default logging handler to avoid \"No handler found\" warnings.\nimport logging\ntry: # Python 2.7+\n from logging import NullHandler\nexcept ImportError:\n class NullHandler(logging.Handler):\n def emit(self, record):\n pass\n\nlogging.getLogger(__name__).addHandler(NullHandler())\n\ndef add_stderr_logger(level=logging.DEBUG):\n \"\"\"\n Helper for quickly adding a StreamHandler to the logger. Useful for\n debugging.\n\n Returns the handler after adding it.\n \"\"\"\n # This method needs to be in this __init__.py to get the __name__ correct\n # even if urllib3 is vendored within another package.\n logger = logging.getLogger(__name__)\n handler = logging.StreamHandler()\n handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))\n logger.addHandler(handler)\n logger.setLevel(level)\n logger.debug('Added a stderr logging handler to logger: %s' % __name__)\n return handler\n\n# ... Clean up.\ndel NullHandler\n\n\nimport warnings\n# SecurityWarning's always go off by default.\nwarnings.simplefilter('always', exceptions.SecurityWarning)\n# InsecurePlatformWarning's don't vary between requests, so we keep it default.\nwarnings.simplefilter('default', exceptions.InsecurePlatformWarning)\n\ndef disable_warnings(category=exceptions.HTTPWarning):\n \"\"\"\n Helper for quickly disabling all urllib3 warnings.\n \"\"\"\n warnings.simplefilter('ignore', category)\n", "path": "urllib3/__init__.py"}], "after_files": [{"content": "\"\"\"\nurllib3 - Thread-safe connection pooling and re-using.\n\"\"\"\n\n__author__ = 'Andrey Petrov ([email protected])'\n__license__ = 'MIT'\n__version__ = '1.10.2'\n\n\nfrom .connectionpool import (\n HTTPConnectionPool,\n HTTPSConnectionPool,\n connection_from_url\n)\n\nfrom . import exceptions\nfrom .filepost import encode_multipart_formdata\nfrom .poolmanager import PoolManager, ProxyManager, proxy_from_url\nfrom .response import HTTPResponse\nfrom .util.request import make_headers\nfrom .util.url import get_host\nfrom .util.timeout import Timeout\nfrom .util.retry import Retry\n\n\n# Set default logging handler to avoid \"No handler found\" warnings.\nimport logging\ntry: # Python 2.7+\n from logging import NullHandler\nexcept ImportError:\n class NullHandler(logging.Handler):\n def emit(self, record):\n pass\n\nlogging.getLogger(__name__).addHandler(NullHandler())\n\ndef add_stderr_logger(level=logging.DEBUG):\n \"\"\"\n Helper for quickly adding a StreamHandler to the logger. Useful for\n debugging.\n\n Returns the handler after adding it.\n \"\"\"\n # This method needs to be in this __init__.py to get the __name__ correct\n # even if urllib3 is vendored within another package.\n logger = logging.getLogger(__name__)\n handler = logging.StreamHandler()\n handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))\n logger.addHandler(handler)\n logger.setLevel(level)\n logger.debug('Added a stderr logging handler to logger: %s' % __name__)\n return handler\n\n# ... Clean up.\ndel NullHandler\n\n\nimport warnings\n# SecurityWarning's always go off by default.\nwarnings.simplefilter('always', exceptions.SecurityWarning, append=True)\n# InsecurePlatformWarning's don't vary between requests, so we keep it default.\nwarnings.simplefilter('default', exceptions.InsecurePlatformWarning,\n append=True)\n\ndef disable_warnings(category=exceptions.HTTPWarning):\n \"\"\"\n Helper for quickly disabling all urllib3 warnings.\n \"\"\"\n warnings.simplefilter('ignore', category)\n", "path": "urllib3/__init__.py"}]} | 960 | 157 |
gh_patches_debug_17913 | rasdani/github-patches | git_diff | Parsl__parsl-1956 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Provider base and cluster provider to be added to reference
**Is your feature request related to a problem? Please describe.**
In the last set of doc updates where we trimmed some of the developer docs we've moved to relying more on the references to point someone to. It looks like the [provider base](https://github.com/Parsl/parsl/blob/master/parsl/providers/provider_base.py) class and [cluster provider](https://github.com/Parsl/parsl/blob/master/parsl/providers/cluster_provider.py) are missing from there.
**Describe the solution you'd like**
Update docs to add these to the reference.
Provider base and cluster provider to be added to reference
**Is your feature request related to a problem? Please describe.**
In the last set of doc updates where we trimmed some of the developer docs we've moved to relying more on the references to point someone to. It looks like the [provider base](https://github.com/Parsl/parsl/blob/master/parsl/providers/provider_base.py) class and [cluster provider](https://github.com/Parsl/parsl/blob/master/parsl/providers/cluster_provider.py) are missing from there.
**Describe the solution you'd like**
Update docs to add these to the reference.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `parsl/providers/cluster_provider.py`
Content:
```
1 import logging
2 from abc import abstractmethod
3 from string import Template
4
5 from parsl.providers.error import SchedulerMissingArgs, ScriptPathError
6 from parsl.launchers.error import BadLauncher
7 from parsl.providers.provider_base import ExecutionProvider
8
9 logger = logging.getLogger(__name__)
10
11
12 class ClusterProvider(ExecutionProvider):
13 """ This class defines behavior common to all cluster/supercompute-style scheduler systems.
14
15 Parameters
16 ----------
17 label : str
18 Label for this provider.
19 channel : Channel
20 Channel for accessing this provider. Possible channels include
21 :class:`~parsl.channels.LocalChannel` (the default),
22 :class:`~parsl.channels.SSHChannel`, or
23 :class:`~parsl.channels.SSHInteractiveLoginChannel`.
24 walltime : str
25 Walltime requested per block in HH:MM:SS.
26 launcher : str
27 FIXME
28 cmd_timeout : int
29 Timeout for commands made to the scheduler in seconds
30
31 .. code:: python
32
33 +------------------
34 |
35 script_string ------->| submit
36 id <--------|---+
37 |
38 [ ids ] ------->| status
39 [statuses] <--------|----+
40 |
41 [ ids ] ------->| cancel
42 [cancel] <--------|----+
43 |
44 +-------------------
45 """
46
47 def __init__(self,
48 label,
49 channel,
50 nodes_per_block,
51 init_blocks,
52 min_blocks,
53 max_blocks,
54 parallelism,
55 walltime,
56 launcher,
57 cmd_timeout=10):
58
59 self._label = label
60 self.channel = channel
61 self.nodes_per_block = nodes_per_block
62 self.init_blocks = init_blocks
63 self.min_blocks = min_blocks
64 self.max_blocks = max_blocks
65 self.parallelism = parallelism
66 self.launcher = launcher
67 self.walltime = walltime
68 self.cmd_timeout = cmd_timeout
69 if not callable(self.launcher):
70 raise(BadLauncher(self.launcher,
71 "Launcher for executor: {} is of type: {}. Expects a parsl.launcher.launcher.Launcher or callable".format(
72 label, type(self.launcher))))
73
74 self.script_dir = None
75
76 # Dictionary that keeps track of jobs, keyed on job_id
77 self.resources = {}
78
79 def execute_wait(self, cmd, timeout=None):
80 t = self.cmd_timeout
81 if timeout is not None:
82 t = timeout
83 return self.channel.execute_wait(cmd, t)
84
85 def _write_submit_script(self, template, script_filename, job_name, configs):
86 """Generate submit script and write it to a file.
87
88 Args:
89 - template (string) : The template string to be used for the writing submit script
90 - script_filename (string) : Name of the submit script
91 - job_name (string) : job name
92 - configs (dict) : configs that get pushed into the template
93
94 Returns:
95 - True: on success
96
97 Raises:
98 SchedulerMissingArgs : If template is missing args
99 ScriptPathError : Unable to write submit script out
100 """
101
102 try:
103 submit_script = Template(template).substitute(jobname=job_name, **configs)
104 # submit_script = Template(template).safe_substitute(jobname=job_name, **configs)
105 with open(script_filename, 'w') as f:
106 f.write(submit_script)
107
108 except KeyError as e:
109 logger.error("Missing keys for submit script : %s", e)
110 raise (SchedulerMissingArgs(e.args, self.label))
111
112 except IOError as e:
113 logger.error("Failed writing to submit script: %s", script_filename)
114 raise (ScriptPathError(script_filename, e))
115 except Exception as e:
116 print("Template : ", template)
117 print("Args : ", job_name)
118 print("Kwargs : ", configs)
119 logger.error("Uncategorized error: %s", e)
120 raise (e)
121
122 return True
123
124 @abstractmethod
125 def _status(self):
126 pass
127
128 def status(self, job_ids):
129 """ Get the status of a list of jobs identified by the job identifiers
130 returned from the submit request.
131
132 Args:
133 - job_ids (list) : A list of job identifiers
134
135 Returns:
136 - A list of JobStatus objects corresponding to each job_id in the job_ids list.
137
138 Raises:
139 - ExecutionProviderException or its subclasses
140
141 """
142 if job_ids:
143 self._status()
144 return [self.resources[jid]['status'] for jid in job_ids]
145
146 @property
147 def label(self):
148 return self._label
149
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/parsl/providers/cluster_provider.py b/parsl/providers/cluster_provider.py
--- a/parsl/providers/cluster_provider.py
+++ b/parsl/providers/cluster_provider.py
@@ -14,18 +14,18 @@
Parameters
----------
- label : str
+ label : str
Label for this provider.
- channel : Channel
+ channel : Channel
Channel for accessing this provider. Possible channels include
:class:`~parsl.channels.LocalChannel` (the default),
:class:`~parsl.channels.SSHChannel`, or
:class:`~parsl.channels.SSHInteractiveLoginChannel`.
- walltime : str
+ walltime : str
Walltime requested per block in HH:MM:SS.
- launcher : str
+ launcher : str
FIXME
- cmd_timeout : int
+ cmd_timeout : int
Timeout for commands made to the scheduler in seconds
.. code:: python
| {"golden_diff": "diff --git a/parsl/providers/cluster_provider.py b/parsl/providers/cluster_provider.py\n--- a/parsl/providers/cluster_provider.py\n+++ b/parsl/providers/cluster_provider.py\n@@ -14,18 +14,18 @@\n \n Parameters\n ----------\n- label : str\n+ label : str\n Label for this provider.\n- channel : Channel\n+ channel : Channel\n Channel for accessing this provider. Possible channels include\n :class:`~parsl.channels.LocalChannel` (the default),\n :class:`~parsl.channels.SSHChannel`, or\n :class:`~parsl.channels.SSHInteractiveLoginChannel`.\n- walltime : str\n+ walltime : str\n Walltime requested per block in HH:MM:SS.\n- launcher : str\n+ launcher : str\n FIXME\n- cmd_timeout : int\n+ cmd_timeout : int\n Timeout for commands made to the scheduler in seconds\n \n .. code:: python\n", "issue": "Provider base and cluster provider to be added to reference\n**Is your feature request related to a problem? Please describe.**\r\n\r\nIn the last set of doc updates where we trimmed some of the developer docs we've moved to relying more on the references to point someone to. It looks like the [provider base](https://github.com/Parsl/parsl/blob/master/parsl/providers/provider_base.py) class and [cluster provider](https://github.com/Parsl/parsl/blob/master/parsl/providers/cluster_provider.py) are missing from there. \r\n\r\n**Describe the solution you'd like**\r\nUpdate docs to add these to the reference.\r\n\nProvider base and cluster provider to be added to reference\n**Is your feature request related to a problem? Please describe.**\r\n\r\nIn the last set of doc updates where we trimmed some of the developer docs we've moved to relying more on the references to point someone to. It looks like the [provider base](https://github.com/Parsl/parsl/blob/master/parsl/providers/provider_base.py) class and [cluster provider](https://github.com/Parsl/parsl/blob/master/parsl/providers/cluster_provider.py) are missing from there. \r\n\r\n**Describe the solution you'd like**\r\nUpdate docs to add these to the reference.\r\n\n", "before_files": [{"content": "import logging\nfrom abc import abstractmethod\nfrom string import Template\n\nfrom parsl.providers.error import SchedulerMissingArgs, ScriptPathError\nfrom parsl.launchers.error import BadLauncher\nfrom parsl.providers.provider_base import ExecutionProvider\n\nlogger = logging.getLogger(__name__)\n\n\nclass ClusterProvider(ExecutionProvider):\n \"\"\" This class defines behavior common to all cluster/supercompute-style scheduler systems.\n\n Parameters\n ----------\n label : str\n Label for this provider.\n channel : Channel\n Channel for accessing this provider. Possible channels include\n :class:`~parsl.channels.LocalChannel` (the default),\n :class:`~parsl.channels.SSHChannel`, or\n :class:`~parsl.channels.SSHInteractiveLoginChannel`.\n walltime : str\n Walltime requested per block in HH:MM:SS.\n launcher : str\n FIXME\n cmd_timeout : int\n Timeout for commands made to the scheduler in seconds\n\n .. code:: python\n\n +------------------\n |\n script_string ------->| submit\n id <--------|---+\n |\n [ ids ] ------->| status\n [statuses] <--------|----+\n |\n [ ids ] ------->| cancel\n [cancel] <--------|----+\n |\n +-------------------\n \"\"\"\n\n def __init__(self,\n label,\n channel,\n nodes_per_block,\n init_blocks,\n min_blocks,\n max_blocks,\n parallelism,\n walltime,\n launcher,\n cmd_timeout=10):\n\n self._label = label\n self.channel = channel\n self.nodes_per_block = nodes_per_block\n self.init_blocks = init_blocks\n self.min_blocks = min_blocks\n self.max_blocks = max_blocks\n self.parallelism = parallelism\n self.launcher = launcher\n self.walltime = walltime\n self.cmd_timeout = cmd_timeout\n if not callable(self.launcher):\n raise(BadLauncher(self.launcher,\n \"Launcher for executor: {} is of type: {}. Expects a parsl.launcher.launcher.Launcher or callable\".format(\n label, type(self.launcher))))\n\n self.script_dir = None\n\n # Dictionary that keeps track of jobs, keyed on job_id\n self.resources = {}\n\n def execute_wait(self, cmd, timeout=None):\n t = self.cmd_timeout\n if timeout is not None:\n t = timeout\n return self.channel.execute_wait(cmd, t)\n\n def _write_submit_script(self, template, script_filename, job_name, configs):\n \"\"\"Generate submit script and write it to a file.\n\n Args:\n - template (string) : The template string to be used for the writing submit script\n - script_filename (string) : Name of the submit script\n - job_name (string) : job name\n - configs (dict) : configs that get pushed into the template\n\n Returns:\n - True: on success\n\n Raises:\n SchedulerMissingArgs : If template is missing args\n ScriptPathError : Unable to write submit script out\n \"\"\"\n\n try:\n submit_script = Template(template).substitute(jobname=job_name, **configs)\n # submit_script = Template(template).safe_substitute(jobname=job_name, **configs)\n with open(script_filename, 'w') as f:\n f.write(submit_script)\n\n except KeyError as e:\n logger.error(\"Missing keys for submit script : %s\", e)\n raise (SchedulerMissingArgs(e.args, self.label))\n\n except IOError as e:\n logger.error(\"Failed writing to submit script: %s\", script_filename)\n raise (ScriptPathError(script_filename, e))\n except Exception as e:\n print(\"Template : \", template)\n print(\"Args : \", job_name)\n print(\"Kwargs : \", configs)\n logger.error(\"Uncategorized error: %s\", e)\n raise (e)\n\n return True\n\n @abstractmethod\n def _status(self):\n pass\n\n def status(self, job_ids):\n \"\"\" Get the status of a list of jobs identified by the job identifiers\n returned from the submit request.\n\n Args:\n - job_ids (list) : A list of job identifiers\n\n Returns:\n - A list of JobStatus objects corresponding to each job_id in the job_ids list.\n\n Raises:\n - ExecutionProviderException or its subclasses\n\n \"\"\"\n if job_ids:\n self._status()\n return [self.resources[jid]['status'] for jid in job_ids]\n\n @property\n def label(self):\n return self._label\n", "path": "parsl/providers/cluster_provider.py"}], "after_files": [{"content": "import logging\nfrom abc import abstractmethod\nfrom string import Template\n\nfrom parsl.providers.error import SchedulerMissingArgs, ScriptPathError\nfrom parsl.launchers.error import BadLauncher\nfrom parsl.providers.provider_base import ExecutionProvider\n\nlogger = logging.getLogger(__name__)\n\n\nclass ClusterProvider(ExecutionProvider):\n \"\"\" This class defines behavior common to all cluster/supercompute-style scheduler systems.\n\n Parameters\n ----------\n label : str\n Label for this provider.\n channel : Channel\n Channel for accessing this provider. Possible channels include\n :class:`~parsl.channels.LocalChannel` (the default),\n :class:`~parsl.channels.SSHChannel`, or\n :class:`~parsl.channels.SSHInteractiveLoginChannel`.\n walltime : str\n Walltime requested per block in HH:MM:SS.\n launcher : str\n FIXME\n cmd_timeout : int\n Timeout for commands made to the scheduler in seconds\n\n .. code:: python\n\n +------------------\n |\n script_string ------->| submit\n id <--------|---+\n |\n [ ids ] ------->| status\n [statuses] <--------|----+\n |\n [ ids ] ------->| cancel\n [cancel] <--------|----+\n |\n +-------------------\n \"\"\"\n\n def __init__(self,\n label,\n channel,\n nodes_per_block,\n init_blocks,\n min_blocks,\n max_blocks,\n parallelism,\n walltime,\n launcher,\n cmd_timeout=10):\n\n self._label = label\n self.channel = channel\n self.nodes_per_block = nodes_per_block\n self.init_blocks = init_blocks\n self.min_blocks = min_blocks\n self.max_blocks = max_blocks\n self.parallelism = parallelism\n self.launcher = launcher\n self.walltime = walltime\n self.cmd_timeout = cmd_timeout\n if not callable(self.launcher):\n raise(BadLauncher(self.launcher,\n \"Launcher for executor: {} is of type: {}. Expects a parsl.launcher.launcher.Launcher or callable\".format(\n label, type(self.launcher))))\n\n self.script_dir = None\n\n # Dictionary that keeps track of jobs, keyed on job_id\n self.resources = {}\n\n def execute_wait(self, cmd, timeout=None):\n t = self.cmd_timeout\n if timeout is not None:\n t = timeout\n return self.channel.execute_wait(cmd, t)\n\n def _write_submit_script(self, template, script_filename, job_name, configs):\n \"\"\"Generate submit script and write it to a file.\n\n Args:\n - template (string) : The template string to be used for the writing submit script\n - script_filename (string) : Name of the submit script\n - job_name (string) : job name\n - configs (dict) : configs that get pushed into the template\n\n Returns:\n - True: on success\n\n Raises:\n SchedulerMissingArgs : If template is missing args\n ScriptPathError : Unable to write submit script out\n \"\"\"\n\n try:\n submit_script = Template(template).substitute(jobname=job_name, **configs)\n # submit_script = Template(template).safe_substitute(jobname=job_name, **configs)\n with open(script_filename, 'w') as f:\n f.write(submit_script)\n\n except KeyError as e:\n logger.error(\"Missing keys for submit script : %s\", e)\n raise (SchedulerMissingArgs(e.args, self.label))\n\n except IOError as e:\n logger.error(\"Failed writing to submit script: %s\", script_filename)\n raise (ScriptPathError(script_filename, e))\n except Exception as e:\n print(\"Template : \", template)\n print(\"Args : \", job_name)\n print(\"Kwargs : \", configs)\n logger.error(\"Uncategorized error: %s\", e)\n raise (e)\n\n return True\n\n @abstractmethod\n def _status(self):\n pass\n\n def status(self, job_ids):\n \"\"\" Get the status of a list of jobs identified by the job identifiers\n returned from the submit request.\n\n Args:\n - job_ids (list) : A list of job identifiers\n\n Returns:\n - A list of JobStatus objects corresponding to each job_id in the job_ids list.\n\n Raises:\n - ExecutionProviderException or its subclasses\n\n \"\"\"\n if job_ids:\n self._status()\n return [self.resources[jid]['status'] for jid in job_ids]\n\n @property\n def label(self):\n return self._label\n", "path": "parsl/providers/cluster_provider.py"}]} | 1,869 | 218 |
gh_patches_debug_939 | rasdani/github-patches | git_diff | apache__airflow-28730 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
CSRF token should be expire with session
### Apache Airflow version
2.5.0
### What happened
In the default configuration, the CSRF token [expires in one hour](https://pythonhosted.org/Flask-WTF/config.html#forms-and-csrf). This setting leads to frequent errors in the UI – for no good reason.
### What you think should happen instead
A short expiration date for the CSRF token is not the right value in my view and I [agree with this answer](https://security.stackexchange.com/a/56520/22108) that the CSRF token should basically never expire, instead pegging itself to the current session.
That is, the CSRF token should last as long as the current session. The easiest way to accomplish this is by generating the CSRF token from the session id.
### How to reproduce
_No response_
### Operating System
Linux
### Versions of Apache Airflow Providers
_No response_
### Deployment
Official Apache Airflow Helm Chart
### Deployment details
_No response_
### Anything else
_No response_
### Are you willing to submit PR?
- [ ] Yes I am willing to submit a PR!
### Code of Conduct
- [X] I agree to follow this project's [Code of Conduct](https://github.com/apache/airflow/blob/main/CODE_OF_CONDUCT.md)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `airflow/config_templates/default_webserver_config.py`
Content:
```
1 #
2 # Licensed to the Apache Software Foundation (ASF) under one
3 # or more contributor license agreements. See the NOTICE file
4 # distributed with this work for additional information
5 # regarding copyright ownership. The ASF licenses this file
6 # to you under the Apache License, Version 2.0 (the
7 # "License"); you may not use this file except in compliance
8 # with the License. You may obtain a copy of the License at
9 #
10 # http://www.apache.org/licenses/LICENSE-2.0
11 #
12 # Unless required by applicable law or agreed to in writing,
13 # software distributed under the License is distributed on an
14 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 # KIND, either express or implied. See the License for the
16 # specific language governing permissions and limitations
17 # under the License.
18 """Default configuration for the Airflow webserver."""
19 from __future__ import annotations
20
21 import os
22
23 from airflow.www.fab_security.manager import AUTH_DB
24
25 # from airflow.www.fab_security.manager import AUTH_LDAP
26 # from airflow.www.fab_security.manager import AUTH_OAUTH
27 # from airflow.www.fab_security.manager import AUTH_OID
28 # from airflow.www.fab_security.manager import AUTH_REMOTE_USER
29
30
31 basedir = os.path.abspath(os.path.dirname(__file__))
32
33 # Flask-WTF flag for CSRF
34 WTF_CSRF_ENABLED = True
35
36 # ----------------------------------------------------
37 # AUTHENTICATION CONFIG
38 # ----------------------------------------------------
39 # For details on how to set up each of the following authentication, see
40 # http://flask-appbuilder.readthedocs.io/en/latest/security.html# authentication-methods
41 # for details.
42
43 # The authentication type
44 # AUTH_OID : Is for OpenID
45 # AUTH_DB : Is for database
46 # AUTH_LDAP : Is for LDAP
47 # AUTH_REMOTE_USER : Is for using REMOTE_USER from web server
48 # AUTH_OAUTH : Is for OAuth
49 AUTH_TYPE = AUTH_DB
50
51 # Uncomment to setup Full admin role name
52 # AUTH_ROLE_ADMIN = 'Admin'
53
54 # Uncomment and set to desired role to enable access without authentication
55 # AUTH_ROLE_PUBLIC = 'Viewer'
56
57 # Will allow user self registration
58 # AUTH_USER_REGISTRATION = True
59
60 # The recaptcha it's automatically enabled for user self registration is active and the keys are necessary
61 # RECAPTCHA_PRIVATE_KEY = PRIVATE_KEY
62 # RECAPTCHA_PUBLIC_KEY = PUBLIC_KEY
63
64 # Config for Flask-Mail necessary for user self registration
65 # MAIL_SERVER = 'smtp.gmail.com'
66 # MAIL_USE_TLS = True
67 # MAIL_USERNAME = '[email protected]'
68 # MAIL_PASSWORD = 'passwordformail'
69 # MAIL_DEFAULT_SENDER = '[email protected]'
70
71 # The default user self registration role
72 # AUTH_USER_REGISTRATION_ROLE = "Public"
73
74 # When using OAuth Auth, uncomment to setup provider(s) info
75 # Google OAuth example:
76 # OAUTH_PROVIDERS = [{
77 # 'name':'google',
78 # 'token_key':'access_token',
79 # 'icon':'fa-google',
80 # 'remote_app': {
81 # 'api_base_url':'https://www.googleapis.com/oauth2/v2/',
82 # 'client_kwargs':{
83 # 'scope': 'email profile'
84 # },
85 # 'access_token_url':'https://accounts.google.com/o/oauth2/token',
86 # 'authorize_url':'https://accounts.google.com/o/oauth2/auth',
87 # 'request_token_url': None,
88 # 'client_id': GOOGLE_KEY,
89 # 'client_secret': GOOGLE_SECRET_KEY,
90 # }
91 # }]
92
93 # When using LDAP Auth, setup the ldap server
94 # AUTH_LDAP_SERVER = "ldap://ldapserver.new"
95
96 # When using OpenID Auth, uncomment to setup OpenID providers.
97 # example for OpenID authentication
98 # OPENID_PROVIDERS = [
99 # { 'name': 'Yahoo', 'url': 'https://me.yahoo.com' },
100 # { 'name': 'AOL', 'url': 'http://openid.aol.com/<username>' },
101 # { 'name': 'Flickr', 'url': 'http://www.flickr.com/<username>' },
102 # { 'name': 'MyOpenID', 'url': 'https://www.myopenid.com' }]
103
104 # ----------------------------------------------------
105 # Theme CONFIG
106 # ----------------------------------------------------
107 # Flask App Builder comes up with a number of predefined themes
108 # that you can use for Apache Airflow.
109 # http://flask-appbuilder.readthedocs.io/en/latest/customizing.html#changing-themes
110 # Please make sure to remove "navbar_color" configuration from airflow.cfg
111 # in order to fully utilize the theme. (or use that property in conjunction with theme)
112 # APP_THEME = "bootstrap-theme.css" # default bootstrap
113 # APP_THEME = "amelia.css"
114 # APP_THEME = "cerulean.css"
115 # APP_THEME = "cosmo.css"
116 # APP_THEME = "cyborg.css"
117 # APP_THEME = "darkly.css"
118 # APP_THEME = "flatly.css"
119 # APP_THEME = "journal.css"
120 # APP_THEME = "lumen.css"
121 # APP_THEME = "paper.css"
122 # APP_THEME = "readable.css"
123 # APP_THEME = "sandstone.css"
124 # APP_THEME = "simplex.css"
125 # APP_THEME = "slate.css"
126 # APP_THEME = "solar.css"
127 # APP_THEME = "spacelab.css"
128 # APP_THEME = "superhero.css"
129 # APP_THEME = "united.css"
130 # APP_THEME = "yeti.css"
131
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/airflow/config_templates/default_webserver_config.py b/airflow/config_templates/default_webserver_config.py
--- a/airflow/config_templates/default_webserver_config.py
+++ b/airflow/config_templates/default_webserver_config.py
@@ -32,6 +32,7 @@
# Flask-WTF flag for CSRF
WTF_CSRF_ENABLED = True
+WTF_CSRF_TIME_LIMIT = None
# ----------------------------------------------------
# AUTHENTICATION CONFIG
| {"golden_diff": "diff --git a/airflow/config_templates/default_webserver_config.py b/airflow/config_templates/default_webserver_config.py\n--- a/airflow/config_templates/default_webserver_config.py\n+++ b/airflow/config_templates/default_webserver_config.py\n@@ -32,6 +32,7 @@\n \n # Flask-WTF flag for CSRF\n WTF_CSRF_ENABLED = True\n+WTF_CSRF_TIME_LIMIT = None\n \n # ----------------------------------------------------\n # AUTHENTICATION CONFIG\n", "issue": "CSRF token should be expire with session\n### Apache Airflow version\n\n2.5.0\n\n### What happened\n\nIn the default configuration, the CSRF token [expires in one hour](https://pythonhosted.org/Flask-WTF/config.html#forms-and-csrf). This setting leads to frequent errors in the UI \u2013 for no good reason.\r\n\n\n### What you think should happen instead\n\nA short expiration date for the CSRF token is not the right value in my view and I [agree with this answer](https://security.stackexchange.com/a/56520/22108) that the CSRF token should basically never expire, instead pegging itself to the current session.\r\n\r\nThat is, the CSRF token should last as long as the current session. The easiest way to accomplish this is by generating the CSRF token from the session id.\r\n\r\n\n\n### How to reproduce\n\n_No response_\n\n### Operating System\n\nLinux\n\n### Versions of Apache Airflow Providers\n\n_No response_\n\n### Deployment\n\nOfficial Apache Airflow Helm Chart\n\n### Deployment details\n\n_No response_\n\n### Anything else\n\n_No response_\n\n### Are you willing to submit PR?\n\n- [ ] Yes I am willing to submit a PR!\n\n### Code of Conduct\n\n- [X] I agree to follow this project's [Code of Conduct](https://github.com/apache/airflow/blob/main/CODE_OF_CONDUCT.md)\n\n", "before_files": [{"content": "#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\"\"\"Default configuration for the Airflow webserver.\"\"\"\nfrom __future__ import annotations\n\nimport os\n\nfrom airflow.www.fab_security.manager import AUTH_DB\n\n# from airflow.www.fab_security.manager import AUTH_LDAP\n# from airflow.www.fab_security.manager import AUTH_OAUTH\n# from airflow.www.fab_security.manager import AUTH_OID\n# from airflow.www.fab_security.manager import AUTH_REMOTE_USER\n\n\nbasedir = os.path.abspath(os.path.dirname(__file__))\n\n# Flask-WTF flag for CSRF\nWTF_CSRF_ENABLED = True\n\n# ----------------------------------------------------\n# AUTHENTICATION CONFIG\n# ----------------------------------------------------\n# For details on how to set up each of the following authentication, see\n# http://flask-appbuilder.readthedocs.io/en/latest/security.html# authentication-methods\n# for details.\n\n# The authentication type\n# AUTH_OID : Is for OpenID\n# AUTH_DB : Is for database\n# AUTH_LDAP : Is for LDAP\n# AUTH_REMOTE_USER : Is for using REMOTE_USER from web server\n# AUTH_OAUTH : Is for OAuth\nAUTH_TYPE = AUTH_DB\n\n# Uncomment to setup Full admin role name\n# AUTH_ROLE_ADMIN = 'Admin'\n\n# Uncomment and set to desired role to enable access without authentication\n# AUTH_ROLE_PUBLIC = 'Viewer'\n\n# Will allow user self registration\n# AUTH_USER_REGISTRATION = True\n\n# The recaptcha it's automatically enabled for user self registration is active and the keys are necessary\n# RECAPTCHA_PRIVATE_KEY = PRIVATE_KEY\n# RECAPTCHA_PUBLIC_KEY = PUBLIC_KEY\n\n# Config for Flask-Mail necessary for user self registration\n# MAIL_SERVER = 'smtp.gmail.com'\n# MAIL_USE_TLS = True\n# MAIL_USERNAME = '[email protected]'\n# MAIL_PASSWORD = 'passwordformail'\n# MAIL_DEFAULT_SENDER = '[email protected]'\n\n# The default user self registration role\n# AUTH_USER_REGISTRATION_ROLE = \"Public\"\n\n# When using OAuth Auth, uncomment to setup provider(s) info\n# Google OAuth example:\n# OAUTH_PROVIDERS = [{\n# 'name':'google',\n# 'token_key':'access_token',\n# 'icon':'fa-google',\n# 'remote_app': {\n# 'api_base_url':'https://www.googleapis.com/oauth2/v2/',\n# 'client_kwargs':{\n# 'scope': 'email profile'\n# },\n# 'access_token_url':'https://accounts.google.com/o/oauth2/token',\n# 'authorize_url':'https://accounts.google.com/o/oauth2/auth',\n# 'request_token_url': None,\n# 'client_id': GOOGLE_KEY,\n# 'client_secret': GOOGLE_SECRET_KEY,\n# }\n# }]\n\n# When using LDAP Auth, setup the ldap server\n# AUTH_LDAP_SERVER = \"ldap://ldapserver.new\"\n\n# When using OpenID Auth, uncomment to setup OpenID providers.\n# example for OpenID authentication\n# OPENID_PROVIDERS = [\n# { 'name': 'Yahoo', 'url': 'https://me.yahoo.com' },\n# { 'name': 'AOL', 'url': 'http://openid.aol.com/<username>' },\n# { 'name': 'Flickr', 'url': 'http://www.flickr.com/<username>' },\n# { 'name': 'MyOpenID', 'url': 'https://www.myopenid.com' }]\n\n# ----------------------------------------------------\n# Theme CONFIG\n# ----------------------------------------------------\n# Flask App Builder comes up with a number of predefined themes\n# that you can use for Apache Airflow.\n# http://flask-appbuilder.readthedocs.io/en/latest/customizing.html#changing-themes\n# Please make sure to remove \"navbar_color\" configuration from airflow.cfg\n# in order to fully utilize the theme. (or use that property in conjunction with theme)\n# APP_THEME = \"bootstrap-theme.css\" # default bootstrap\n# APP_THEME = \"amelia.css\"\n# APP_THEME = \"cerulean.css\"\n# APP_THEME = \"cosmo.css\"\n# APP_THEME = \"cyborg.css\"\n# APP_THEME = \"darkly.css\"\n# APP_THEME = \"flatly.css\"\n# APP_THEME = \"journal.css\"\n# APP_THEME = \"lumen.css\"\n# APP_THEME = \"paper.css\"\n# APP_THEME = \"readable.css\"\n# APP_THEME = \"sandstone.css\"\n# APP_THEME = \"simplex.css\"\n# APP_THEME = \"slate.css\"\n# APP_THEME = \"solar.css\"\n# APP_THEME = \"spacelab.css\"\n# APP_THEME = \"superhero.css\"\n# APP_THEME = \"united.css\"\n# APP_THEME = \"yeti.css\"\n", "path": "airflow/config_templates/default_webserver_config.py"}], "after_files": [{"content": "#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\"\"\"Default configuration for the Airflow webserver.\"\"\"\nfrom __future__ import annotations\n\nimport os\n\nfrom airflow.www.fab_security.manager import AUTH_DB\n\n# from airflow.www.fab_security.manager import AUTH_LDAP\n# from airflow.www.fab_security.manager import AUTH_OAUTH\n# from airflow.www.fab_security.manager import AUTH_OID\n# from airflow.www.fab_security.manager import AUTH_REMOTE_USER\n\n\nbasedir = os.path.abspath(os.path.dirname(__file__))\n\n# Flask-WTF flag for CSRF\nWTF_CSRF_ENABLED = True\nWTF_CSRF_TIME_LIMIT = None\n\n# ----------------------------------------------------\n# AUTHENTICATION CONFIG\n# ----------------------------------------------------\n# For details on how to set up each of the following authentication, see\n# http://flask-appbuilder.readthedocs.io/en/latest/security.html# authentication-methods\n# for details.\n\n# The authentication type\n# AUTH_OID : Is for OpenID\n# AUTH_DB : Is for database\n# AUTH_LDAP : Is for LDAP\n# AUTH_REMOTE_USER : Is for using REMOTE_USER from web server\n# AUTH_OAUTH : Is for OAuth\nAUTH_TYPE = AUTH_DB\n\n# Uncomment to setup Full admin role name\n# AUTH_ROLE_ADMIN = 'Admin'\n\n# Uncomment and set to desired role to enable access without authentication\n# AUTH_ROLE_PUBLIC = 'Viewer'\n\n# Will allow user self registration\n# AUTH_USER_REGISTRATION = True\n\n# The recaptcha it's automatically enabled for user self registration is active and the keys are necessary\n# RECAPTCHA_PRIVATE_KEY = PRIVATE_KEY\n# RECAPTCHA_PUBLIC_KEY = PUBLIC_KEY\n\n# Config for Flask-Mail necessary for user self registration\n# MAIL_SERVER = 'smtp.gmail.com'\n# MAIL_USE_TLS = True\n# MAIL_USERNAME = '[email protected]'\n# MAIL_PASSWORD = 'passwordformail'\n# MAIL_DEFAULT_SENDER = '[email protected]'\n\n# The default user self registration role\n# AUTH_USER_REGISTRATION_ROLE = \"Public\"\n\n# When using OAuth Auth, uncomment to setup provider(s) info\n# Google OAuth example:\n# OAUTH_PROVIDERS = [{\n# 'name':'google',\n# 'token_key':'access_token',\n# 'icon':'fa-google',\n# 'remote_app': {\n# 'api_base_url':'https://www.googleapis.com/oauth2/v2/',\n# 'client_kwargs':{\n# 'scope': 'email profile'\n# },\n# 'access_token_url':'https://accounts.google.com/o/oauth2/token',\n# 'authorize_url':'https://accounts.google.com/o/oauth2/auth',\n# 'request_token_url': None,\n# 'client_id': GOOGLE_KEY,\n# 'client_secret': GOOGLE_SECRET_KEY,\n# }\n# }]\n\n# When using LDAP Auth, setup the ldap server\n# AUTH_LDAP_SERVER = \"ldap://ldapserver.new\"\n\n# When using OpenID Auth, uncomment to setup OpenID providers.\n# example for OpenID authentication\n# OPENID_PROVIDERS = [\n# { 'name': 'Yahoo', 'url': 'https://me.yahoo.com' },\n# { 'name': 'AOL', 'url': 'http://openid.aol.com/<username>' },\n# { 'name': 'Flickr', 'url': 'http://www.flickr.com/<username>' },\n# { 'name': 'MyOpenID', 'url': 'https://www.myopenid.com' }]\n\n# ----------------------------------------------------\n# Theme CONFIG\n# ----------------------------------------------------\n# Flask App Builder comes up with a number of predefined themes\n# that you can use for Apache Airflow.\n# http://flask-appbuilder.readthedocs.io/en/latest/customizing.html#changing-themes\n# Please make sure to remove \"navbar_color\" configuration from airflow.cfg\n# in order to fully utilize the theme. (or use that property in conjunction with theme)\n# APP_THEME = \"bootstrap-theme.css\" # default bootstrap\n# APP_THEME = \"amelia.css\"\n# APP_THEME = \"cerulean.css\"\n# APP_THEME = \"cosmo.css\"\n# APP_THEME = \"cyborg.css\"\n# APP_THEME = \"darkly.css\"\n# APP_THEME = \"flatly.css\"\n# APP_THEME = \"journal.css\"\n# APP_THEME = \"lumen.css\"\n# APP_THEME = \"paper.css\"\n# APP_THEME = \"readable.css\"\n# APP_THEME = \"sandstone.css\"\n# APP_THEME = \"simplex.css\"\n# APP_THEME = \"slate.css\"\n# APP_THEME = \"solar.css\"\n# APP_THEME = \"spacelab.css\"\n# APP_THEME = \"superhero.css\"\n# APP_THEME = \"united.css\"\n# APP_THEME = \"yeti.css\"\n", "path": "airflow/config_templates/default_webserver_config.py"}]} | 1,985 | 97 |
gh_patches_debug_17919 | rasdani/github-patches | git_diff | googleapis__google-cloud-python-4061 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Spelling Error (_medical_likeliehood)
_medical_likeliehood -> _medical_likelihood
https://github.com/GoogleCloudPlatform/google-cloud-python/blob/b28a4eb667ae08c3f4dcf9af891ed4931884989c/vision/google/cloud/vision/safe_search.py#L43
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `vision/google/cloud/vision/safe_search.py`
Content:
```
1 # Copyright 2017 Google Inc.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """Safe search class for information returned from annotating an image."""
16
17 from google.cloud.vision.likelihood import _get_pb_likelihood
18 from google.cloud.vision.likelihood import Likelihood
19
20
21 class SafeSearchAnnotation(object):
22 """Representation of a SafeSearchAnnotation.
23
24 :type adult_likelihood: :class:`~google.cloud.vision.likelihood.Likelihood`
25 :param adult_likelihood: Likelihood that image contains adult material.
26
27 :type spoof_likelihood: :class:`~google.cloud.vision.likelihood.Likelihood`
28 :param spoof_likelihood: Likelihood that image is a spoof.
29
30 :type medical_likelihood:
31 :class:`~google.cloud.vision.likelihood.Likelihood`
32 :param medical_likelihood: Likelihood that image contains medical material.
33
34 :type violence_likelihood:
35 :class:`~google.cloud.vision.likelihood.Likelihood`
36 :param violence_likelihood: Likelihood that image contains violence.
37 """
38
39 def __init__(self, adult_likelihood, spoof_likelihood, medical_likelihood,
40 violence_likelihood):
41 self._adult_likelihood = adult_likelihood
42 self._spoof_likelihood = spoof_likelihood
43 self._medical_likeliehood = medical_likelihood
44 self._violence_likelihood = violence_likelihood
45
46 @classmethod
47 def from_api_repr(cls, response):
48 """Factory: construct SafeSearchAnnotation from Vision API response.
49
50 :type response: dict
51 :param response: Dictionary response from Vision API with safe search
52 data.
53
54 :rtype: :class:`~google.cloud.vision.safe_search.SafeSearchAnnotation`
55 :returns: Instance of ``SafeSearchAnnotation``.
56 """
57 adult_likelihood = Likelihood[response['adult']]
58 spoof_likelihood = Likelihood[response['spoof']]
59 medical_likelihood = Likelihood[response['medical']]
60 violence_likelihood = Likelihood[response['violence']]
61
62 return cls(adult_likelihood, spoof_likelihood, medical_likelihood,
63 violence_likelihood)
64
65 @classmethod
66 def from_pb(cls, image):
67 """Factory: construct SafeSearchAnnotation from Vision API response.
68
69 :type image: :class:`~google.cloud.vision_v1.proto.\
70 image_annotator_pb2.SafeSearchAnnotation`
71 :param image: Protobuf response from Vision API with safe search data.
72
73 :rtype: :class:`~google.cloud.vision.safe_search.SafeSearchAnnotation`
74 :returns: Instance of ``SafeSearchAnnotation``.
75 """
76 values = [image.adult, image.spoof, image.medical, image.violence]
77 classifications = map(_get_pb_likelihood, values)
78 return cls(*classifications)
79
80 @property
81 def adult(self):
82 """Represents the adult contents likelihood for the image.
83
84 :rtype: :class:`~google.cloud.vision.likelihood.Likelihood`
85 :returns: ``Likelihood`` of the image containing adult content.
86 """
87 return self._adult_likelihood
88
89 @property
90 def spoof(self):
91 """The likelihood that an obvious modification was made to the image.
92
93 :rtype: :class:`~google.cloud.vision.likelihood.Likelihood`
94 :returns: The ``Likelihood`` that an obvious modification was made to
95 the image's canonical version to make it appear funny or
96 offensive.
97 """
98 return self._spoof_likelihood
99
100 @property
101 def medical(self):
102 """Likelihood this is a medical image.
103
104 :rtype: :class:`~google.cloud.vision.likelihood.Likelihood`
105 :returns: The ``Likelihood`` that the image is medical in origin.
106 """
107 return self._medical_likeliehood
108
109 @property
110 def violence(self):
111 """Likeliehood that this image contains violence.
112
113 :rtype: :class:`~google.cloud.vision.likelihood.Likelihood`
114 :returns: The ``Likelihood`` that the image contains violence.
115 """
116 return self._violence_likelihood
117
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/vision/google/cloud/vision/safe_search.py b/vision/google/cloud/vision/safe_search.py
--- a/vision/google/cloud/vision/safe_search.py
+++ b/vision/google/cloud/vision/safe_search.py
@@ -40,7 +40,7 @@
violence_likelihood):
self._adult_likelihood = adult_likelihood
self._spoof_likelihood = spoof_likelihood
- self._medical_likeliehood = medical_likelihood
+ self._medical_likelihood = medical_likelihood
self._violence_likelihood = violence_likelihood
@classmethod
@@ -104,7 +104,7 @@
:rtype: :class:`~google.cloud.vision.likelihood.Likelihood`
:returns: The ``Likelihood`` that the image is medical in origin.
"""
- return self._medical_likeliehood
+ return self._medical_likelihood
@property
def violence(self):
| {"golden_diff": "diff --git a/vision/google/cloud/vision/safe_search.py b/vision/google/cloud/vision/safe_search.py\n--- a/vision/google/cloud/vision/safe_search.py\n+++ b/vision/google/cloud/vision/safe_search.py\n@@ -40,7 +40,7 @@\n violence_likelihood):\n self._adult_likelihood = adult_likelihood\n self._spoof_likelihood = spoof_likelihood\n- self._medical_likeliehood = medical_likelihood\n+ self._medical_likelihood = medical_likelihood\n self._violence_likelihood = violence_likelihood\n \n @classmethod\n@@ -104,7 +104,7 @@\n :rtype: :class:`~google.cloud.vision.likelihood.Likelihood`\n :returns: The ``Likelihood`` that the image is medical in origin.\n \"\"\"\n- return self._medical_likeliehood\n+ return self._medical_likelihood\n \n @property\n def violence(self):\n", "issue": "Spelling Error (_medical_likeliehood)\n_medical_likeliehood -> _medical_likelihood\r\n\r\nhttps://github.com/GoogleCloudPlatform/google-cloud-python/blob/b28a4eb667ae08c3f4dcf9af891ed4931884989c/vision/google/cloud/vision/safe_search.py#L43\n", "before_files": [{"content": "# Copyright 2017 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Safe search class for information returned from annotating an image.\"\"\"\n\nfrom google.cloud.vision.likelihood import _get_pb_likelihood\nfrom google.cloud.vision.likelihood import Likelihood\n\n\nclass SafeSearchAnnotation(object):\n \"\"\"Representation of a SafeSearchAnnotation.\n\n :type adult_likelihood: :class:`~google.cloud.vision.likelihood.Likelihood`\n :param adult_likelihood: Likelihood that image contains adult material.\n\n :type spoof_likelihood: :class:`~google.cloud.vision.likelihood.Likelihood`\n :param spoof_likelihood: Likelihood that image is a spoof.\n\n :type medical_likelihood:\n :class:`~google.cloud.vision.likelihood.Likelihood`\n :param medical_likelihood: Likelihood that image contains medical material.\n\n :type violence_likelihood:\n :class:`~google.cloud.vision.likelihood.Likelihood`\n :param violence_likelihood: Likelihood that image contains violence.\n \"\"\"\n\n def __init__(self, adult_likelihood, spoof_likelihood, medical_likelihood,\n violence_likelihood):\n self._adult_likelihood = adult_likelihood\n self._spoof_likelihood = spoof_likelihood\n self._medical_likeliehood = medical_likelihood\n self._violence_likelihood = violence_likelihood\n\n @classmethod\n def from_api_repr(cls, response):\n \"\"\"Factory: construct SafeSearchAnnotation from Vision API response.\n\n :type response: dict\n :param response: Dictionary response from Vision API with safe search\n data.\n\n :rtype: :class:`~google.cloud.vision.safe_search.SafeSearchAnnotation`\n :returns: Instance of ``SafeSearchAnnotation``.\n \"\"\"\n adult_likelihood = Likelihood[response['adult']]\n spoof_likelihood = Likelihood[response['spoof']]\n medical_likelihood = Likelihood[response['medical']]\n violence_likelihood = Likelihood[response['violence']]\n\n return cls(adult_likelihood, spoof_likelihood, medical_likelihood,\n violence_likelihood)\n\n @classmethod\n def from_pb(cls, image):\n \"\"\"Factory: construct SafeSearchAnnotation from Vision API response.\n\n :type image: :class:`~google.cloud.vision_v1.proto.\\\n image_annotator_pb2.SafeSearchAnnotation`\n :param image: Protobuf response from Vision API with safe search data.\n\n :rtype: :class:`~google.cloud.vision.safe_search.SafeSearchAnnotation`\n :returns: Instance of ``SafeSearchAnnotation``.\n \"\"\"\n values = [image.adult, image.spoof, image.medical, image.violence]\n classifications = map(_get_pb_likelihood, values)\n return cls(*classifications)\n\n @property\n def adult(self):\n \"\"\"Represents the adult contents likelihood for the image.\n\n :rtype: :class:`~google.cloud.vision.likelihood.Likelihood`\n :returns: ``Likelihood`` of the image containing adult content.\n \"\"\"\n return self._adult_likelihood\n\n @property\n def spoof(self):\n \"\"\"The likelihood that an obvious modification was made to the image.\n\n :rtype: :class:`~google.cloud.vision.likelihood.Likelihood`\n :returns: The ``Likelihood`` that an obvious modification was made to\n the image's canonical version to make it appear funny or\n offensive.\n \"\"\"\n return self._spoof_likelihood\n\n @property\n def medical(self):\n \"\"\"Likelihood this is a medical image.\n\n :rtype: :class:`~google.cloud.vision.likelihood.Likelihood`\n :returns: The ``Likelihood`` that the image is medical in origin.\n \"\"\"\n return self._medical_likeliehood\n\n @property\n def violence(self):\n \"\"\"Likeliehood that this image contains violence.\n\n :rtype: :class:`~google.cloud.vision.likelihood.Likelihood`\n :returns: The ``Likelihood`` that the image contains violence.\n \"\"\"\n return self._violence_likelihood\n", "path": "vision/google/cloud/vision/safe_search.py"}], "after_files": [{"content": "# Copyright 2017 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Safe search class for information returned from annotating an image.\"\"\"\n\nfrom google.cloud.vision.likelihood import _get_pb_likelihood\nfrom google.cloud.vision.likelihood import Likelihood\n\n\nclass SafeSearchAnnotation(object):\n \"\"\"Representation of a SafeSearchAnnotation.\n\n :type adult_likelihood: :class:`~google.cloud.vision.likelihood.Likelihood`\n :param adult_likelihood: Likelihood that image contains adult material.\n\n :type spoof_likelihood: :class:`~google.cloud.vision.likelihood.Likelihood`\n :param spoof_likelihood: Likelihood that image is a spoof.\n\n :type medical_likelihood:\n :class:`~google.cloud.vision.likelihood.Likelihood`\n :param medical_likelihood: Likelihood that image contains medical material.\n\n :type violence_likelihood:\n :class:`~google.cloud.vision.likelihood.Likelihood`\n :param violence_likelihood: Likelihood that image contains violence.\n \"\"\"\n\n def __init__(self, adult_likelihood, spoof_likelihood, medical_likelihood,\n violence_likelihood):\n self._adult_likelihood = adult_likelihood\n self._spoof_likelihood = spoof_likelihood\n self._medical_likelihood = medical_likelihood\n self._violence_likelihood = violence_likelihood\n\n @classmethod\n def from_api_repr(cls, response):\n \"\"\"Factory: construct SafeSearchAnnotation from Vision API response.\n\n :type response: dict\n :param response: Dictionary response from Vision API with safe search\n data.\n\n :rtype: :class:`~google.cloud.vision.safe_search.SafeSearchAnnotation`\n :returns: Instance of ``SafeSearchAnnotation``.\n \"\"\"\n adult_likelihood = Likelihood[response['adult']]\n spoof_likelihood = Likelihood[response['spoof']]\n medical_likelihood = Likelihood[response['medical']]\n violence_likelihood = Likelihood[response['violence']]\n\n return cls(adult_likelihood, spoof_likelihood, medical_likelihood,\n violence_likelihood)\n\n @classmethod\n def from_pb(cls, image):\n \"\"\"Factory: construct SafeSearchAnnotation from Vision API response.\n\n :type image: :class:`~google.cloud.vision_v1.proto.\\\n image_annotator_pb2.SafeSearchAnnotation`\n :param image: Protobuf response from Vision API with safe search data.\n\n :rtype: :class:`~google.cloud.vision.safe_search.SafeSearchAnnotation`\n :returns: Instance of ``SafeSearchAnnotation``.\n \"\"\"\n values = [image.adult, image.spoof, image.medical, image.violence]\n classifications = map(_get_pb_likelihood, values)\n return cls(*classifications)\n\n @property\n def adult(self):\n \"\"\"Represents the adult contents likelihood for the image.\n\n :rtype: :class:`~google.cloud.vision.likelihood.Likelihood`\n :returns: ``Likelihood`` of the image containing adult content.\n \"\"\"\n return self._adult_likelihood\n\n @property\n def spoof(self):\n \"\"\"The likelihood that an obvious modification was made to the image.\n\n :rtype: :class:`~google.cloud.vision.likelihood.Likelihood`\n :returns: The ``Likelihood`` that an obvious modification was made to\n the image's canonical version to make it appear funny or\n offensive.\n \"\"\"\n return self._spoof_likelihood\n\n @property\n def medical(self):\n \"\"\"Likelihood this is a medical image.\n\n :rtype: :class:`~google.cloud.vision.likelihood.Likelihood`\n :returns: The ``Likelihood`` that the image is medical in origin.\n \"\"\"\n return self._medical_likelihood\n\n @property\n def violence(self):\n \"\"\"Likeliehood that this image contains violence.\n\n :rtype: :class:`~google.cloud.vision.likelihood.Likelihood`\n :returns: The ``Likelihood`` that the image contains violence.\n \"\"\"\n return self._violence_likelihood\n", "path": "vision/google/cloud/vision/safe_search.py"}]} | 1,552 | 204 |
gh_patches_debug_64529 | rasdani/github-patches | git_diff | kartoza__prj.app-293 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
We need to support animated GIF's
Using licecap or silentcast it is easy to make animated GIF's. When images are uploaded to django though they are resized and converted to PNG. We need to update the logic so thumbs etc. can be created for animate GIF's without losing the animation.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `django_project/base/templatetags/custom_markup.py`
Content:
```
1 import markdown
2 from django import template
3 from django.template.defaultfilters import stringfilter
4 from django.utils.encoding import force_unicode
5 from django.utils.safestring import mark_safe
6
7 register = template.Library()
8
9
10 @register.filter(name='base_markdown', is_safe=True)
11 @stringfilter
12 def base_markdown(value):
13 extensions = ["nl2br", ]
14
15 return mark_safe(markdown.markdown(force_unicode(value),
16 extensions,
17 safe_mode=True,
18 enable_attributes=False))
19
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/django_project/base/templatetags/custom_markup.py b/django_project/base/templatetags/custom_markup.py
--- a/django_project/base/templatetags/custom_markup.py
+++ b/django_project/base/templatetags/custom_markup.py
@@ -16,3 +16,9 @@
extensions,
safe_mode=True,
enable_attributes=False))
+
+
[email protected](name='is_gif', is_safe=True)
+@stringfilter
+def is_gif(value):
+ return value[-4:] == '.gif'
| {"golden_diff": "diff --git a/django_project/base/templatetags/custom_markup.py b/django_project/base/templatetags/custom_markup.py\n--- a/django_project/base/templatetags/custom_markup.py\n+++ b/django_project/base/templatetags/custom_markup.py\n@@ -16,3 +16,9 @@\n extensions,\n safe_mode=True,\n enable_attributes=False))\n+\n+\[email protected](name='is_gif', is_safe=True)\n+@stringfilter\n+def is_gif(value):\n+ return value[-4:] == '.gif'\n", "issue": "We need to support animated GIF's\nUsing licecap or silentcast it is easy to make animated GIF's. When images are uploaded to django though they are resized and converted to PNG. We need to update the logic so thumbs etc. can be created for animate GIF's without losing the animation. \n\n", "before_files": [{"content": "import markdown\nfrom django import template\nfrom django.template.defaultfilters import stringfilter\nfrom django.utils.encoding import force_unicode\nfrom django.utils.safestring import mark_safe\n\nregister = template.Library()\n\n\[email protected](name='base_markdown', is_safe=True)\n@stringfilter\ndef base_markdown(value):\n extensions = [\"nl2br\", ]\n\n return mark_safe(markdown.markdown(force_unicode(value),\n extensions,\n safe_mode=True,\n enable_attributes=False))\n", "path": "django_project/base/templatetags/custom_markup.py"}], "after_files": [{"content": "import markdown\nfrom django import template\nfrom django.template.defaultfilters import stringfilter\nfrom django.utils.encoding import force_unicode\nfrom django.utils.safestring import mark_safe\n\nregister = template.Library()\n\n\[email protected](name='base_markdown', is_safe=True)\n@stringfilter\ndef base_markdown(value):\n extensions = [\"nl2br\", ]\n\n return mark_safe(markdown.markdown(force_unicode(value),\n extensions,\n safe_mode=True,\n enable_attributes=False))\n\n\[email protected](name='is_gif', is_safe=True)\n@stringfilter\ndef is_gif(value):\n return value[-4:] == '.gif'\n", "path": "django_project/base/templatetags/custom_markup.py"}]} | 457 | 124 |
gh_patches_debug_43425 | rasdani/github-patches | git_diff | cisagov__manage.get.gov-1759 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Increase reliability on OIDC connection
### Issue description
While investigating #1726 we realized there is some areas for improvement in our handling of the connection to OIDC so that all scenarios of failure show a properly formatted 500 error page
### Acceptance criteria
- [ ] Make sure all login.gov/identity sandbox connection issues result in the usual 500 error
- [ ] refactor the connection set up as needed
### Additional context
_No response_
### Links to other issues
relates to: #1726
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/djangooidc/views.py`
Content:
```
1 # coding: utf-8
2
3 import logging
4
5 from django.conf import settings
6 from django.contrib.auth import logout as auth_logout
7 from django.contrib.auth import authenticate, login
8 from django.http import HttpResponseRedirect
9 from django.shortcuts import redirect, render
10 from urllib.parse import parse_qs, urlencode
11
12 from djangooidc.oidc import Client
13 from djangooidc import exceptions as o_e
14 from registrar.models import User
15
16 logger = logging.getLogger(__name__)
17
18 try:
19 # Initialize provider using pyOICD
20 OP = getattr(settings, "OIDC_ACTIVE_PROVIDER")
21 CLIENT = Client(OP)
22 logger.debug("client initialized %s" % CLIENT)
23 except Exception as err:
24 CLIENT = None # type: ignore
25 logger.warning(err)
26 logger.warning("Unable to configure OpenID Connect provider. Users cannot log in.")
27
28
29 def error_page(request, error):
30 """Display a sensible message and log the error."""
31 logger.error(error)
32 if isinstance(error, o_e.AuthenticationFailed):
33 return render(
34 request,
35 "401.html",
36 context={
37 "friendly_message": error.friendly_message,
38 "log_identifier": error.locator,
39 },
40 status=401,
41 )
42 if isinstance(error, o_e.InternalError):
43 return render(
44 request,
45 "500.html",
46 context={
47 "friendly_message": error.friendly_message,
48 "log_identifier": error.locator,
49 },
50 status=500,
51 )
52 if isinstance(error, Exception):
53 return render(request, "500.html", status=500)
54
55
56 def openid(request):
57 """Redirect the user to an authentication provider (OP)."""
58 # If the session reset because of a server restart, attempt to login again
59 request.session["acr_value"] = CLIENT.get_default_acr_value()
60
61 request.session["next"] = request.GET.get("next", "/")
62
63 try:
64 return CLIENT.create_authn_request(request.session)
65 except Exception as err:
66 return error_page(request, err)
67
68
69 def login_callback(request):
70 """Analyze the token returned by the authentication provider (OP)."""
71 try:
72 query = parse_qs(request.GET.urlencode())
73 userinfo = CLIENT.callback(query, request.session)
74 # test for need for identity verification and if it is satisfied
75 # if not satisfied, redirect user to login with stepped up acr_value
76 if requires_step_up_auth(userinfo):
77 # add acr_value to request.session
78 request.session["acr_value"] = CLIENT.get_step_up_acr_value()
79 return CLIENT.create_authn_request(request.session)
80 user = authenticate(request=request, **userinfo)
81 if user:
82 login(request, user)
83 logger.info("Successfully logged in user %s" % user)
84 # Double login bug (1507)?
85 return redirect(request.session.get("next", "/"))
86 else:
87 raise o_e.BannedUser()
88 except o_e.NoStateDefined as nsd_err:
89 logger.warning(f"No State Defined: {nsd_err}")
90 return redirect(request.session.get("next", "/"))
91 except Exception as err:
92 return error_page(request, err)
93
94
95 def requires_step_up_auth(userinfo):
96 """if User.needs_identity_verification and step_up_acr_value not in
97 ial returned from callback, return True"""
98 step_up_acr_value = CLIENT.get_step_up_acr_value()
99 acr_value = userinfo.get("ial", "")
100 uuid = userinfo.get("sub", "")
101 email = userinfo.get("email", "")
102 if acr_value != step_up_acr_value:
103 # The acr of this attempt is not at the highest level
104 # so check if the user needs the higher level
105 return User.needs_identity_verification(email, uuid)
106 else:
107 # This attempt already came back at the highest level
108 # so does not require step up
109 return False
110
111
112 def logout(request, next_page=None):
113 """Redirect the user to the authentication provider (OP) logout page."""
114 try:
115 user = request.user
116 request_args = {
117 "client_id": CLIENT.client_id,
118 "state": request.session["state"],
119 }
120 if (
121 "post_logout_redirect_uris" in CLIENT.registration_response.keys()
122 and len(CLIENT.registration_response["post_logout_redirect_uris"]) > 0
123 ):
124 request_args.update(
125 {"post_logout_redirect_uri": CLIENT.registration_response["post_logout_redirect_uris"][0]}
126 )
127 url = CLIENT.provider_info["end_session_endpoint"]
128 url += "?" + urlencode(request_args)
129 return HttpResponseRedirect(url)
130 except Exception as err:
131 return error_page(request, err)
132 finally:
133 # Always remove Django session stuff - even if not logged out from OP.
134 # Don't wait for the callback as it may never come.
135 auth_logout(request)
136 logger.info("Successfully logged out user %s" % user)
137 next_page = getattr(settings, "LOGOUT_REDIRECT_URL", None)
138 if next_page:
139 request.session["next"] = next_page
140
141
142 def logout_callback(request):
143 """Simple redirection view: after logout, redirect to `next`."""
144 next = request.session.get("next", "/")
145 return redirect(next)
146
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/djangooidc/views.py b/src/djangooidc/views.py
--- a/src/djangooidc/views.py
+++ b/src/djangooidc/views.py
@@ -15,15 +15,34 @@
logger = logging.getLogger(__name__)
-try:
+CLIENT = None
+
+
+def _initialize_client():
+ """Initialize the OIDC client. Exceptions are allowed to raise
+ and will need to be caught."""
+ global CLIENT
# Initialize provider using pyOICD
OP = getattr(settings, "OIDC_ACTIVE_PROVIDER")
CLIENT = Client(OP)
- logger.debug("client initialized %s" % CLIENT)
+ logger.debug("Client initialized: %s" % CLIENT)
+
+
+def _client_is_none():
+ """Return if the CLIENT is currently None."""
+ global CLIENT
+ return CLIENT is None
+
+
+# Initialize CLIENT
+try:
+ _initialize_client()
except Exception as err:
- CLIENT = None # type: ignore
- logger.warning(err)
- logger.warning("Unable to configure OpenID Connect provider. Users cannot log in.")
+ # In the event of an exception, log the error and allow the app load to continue
+ # without the OIDC Client. Subsequent login attempts will attempt to initialize
+ # again if Client is None
+ logger.error(err)
+ logger.error("Unable to configure OpenID Connect provider. Users cannot log in.")
def error_page(request, error):
@@ -55,12 +74,15 @@
def openid(request):
"""Redirect the user to an authentication provider (OP)."""
- # If the session reset because of a server restart, attempt to login again
- request.session["acr_value"] = CLIENT.get_default_acr_value()
-
- request.session["next"] = request.GET.get("next", "/")
-
+ global CLIENT
try:
+ # If the CLIENT is none, attempt to reinitialize before handling the request
+ if _client_is_none():
+ logger.debug("OIDC client is None, attempting to initialize")
+ _initialize_client()
+ request.session["acr_value"] = CLIENT.get_default_acr_value()
+ request.session["next"] = request.GET.get("next", "/")
+ # Create the authentication request
return CLIENT.create_authn_request(request.session)
except Exception as err:
return error_page(request, err)
@@ -68,12 +90,17 @@
def login_callback(request):
"""Analyze the token returned by the authentication provider (OP)."""
+ global CLIENT
try:
+ # If the CLIENT is none, attempt to reinitialize before handling the request
+ if _client_is_none():
+ logger.debug("OIDC client is None, attempting to initialize")
+ _initialize_client()
query = parse_qs(request.GET.urlencode())
userinfo = CLIENT.callback(query, request.session)
# test for need for identity verification and if it is satisfied
# if not satisfied, redirect user to login with stepped up acr_value
- if requires_step_up_auth(userinfo):
+ if _requires_step_up_auth(userinfo):
# add acr_value to request.session
request.session["acr_value"] = CLIENT.get_step_up_acr_value()
return CLIENT.create_authn_request(request.session)
@@ -86,13 +113,16 @@
else:
raise o_e.BannedUser()
except o_e.NoStateDefined as nsd_err:
+ # In the event that a user is in the middle of a login when the app is restarted,
+ # their session state will no longer be available, so redirect the user to the
+ # beginning of login process without raising an error to the user.
logger.warning(f"No State Defined: {nsd_err}")
return redirect(request.session.get("next", "/"))
except Exception as err:
return error_page(request, err)
-def requires_step_up_auth(userinfo):
+def _requires_step_up_auth(userinfo):
"""if User.needs_identity_verification and step_up_acr_value not in
ial returned from callback, return True"""
step_up_acr_value = CLIENT.get_step_up_acr_value()
| {"golden_diff": "diff --git a/src/djangooidc/views.py b/src/djangooidc/views.py\n--- a/src/djangooidc/views.py\n+++ b/src/djangooidc/views.py\n@@ -15,15 +15,34 @@\n \n logger = logging.getLogger(__name__)\n \n-try:\n+CLIENT = None\n+\n+\n+def _initialize_client():\n+ \"\"\"Initialize the OIDC client. Exceptions are allowed to raise\n+ and will need to be caught.\"\"\"\n+ global CLIENT\n # Initialize provider using pyOICD\n OP = getattr(settings, \"OIDC_ACTIVE_PROVIDER\")\n CLIENT = Client(OP)\n- logger.debug(\"client initialized %s\" % CLIENT)\n+ logger.debug(\"Client initialized: %s\" % CLIENT)\n+\n+\n+def _client_is_none():\n+ \"\"\"Return if the CLIENT is currently None.\"\"\"\n+ global CLIENT\n+ return CLIENT is None\n+\n+\n+# Initialize CLIENT\n+try:\n+ _initialize_client()\n except Exception as err:\n- CLIENT = None # type: ignore\n- logger.warning(err)\n- logger.warning(\"Unable to configure OpenID Connect provider. Users cannot log in.\")\n+ # In the event of an exception, log the error and allow the app load to continue\n+ # without the OIDC Client. Subsequent login attempts will attempt to initialize\n+ # again if Client is None\n+ logger.error(err)\n+ logger.error(\"Unable to configure OpenID Connect provider. Users cannot log in.\")\n \n \n def error_page(request, error):\n@@ -55,12 +74,15 @@\n \n def openid(request):\n \"\"\"Redirect the user to an authentication provider (OP).\"\"\"\n- # If the session reset because of a server restart, attempt to login again\n- request.session[\"acr_value\"] = CLIENT.get_default_acr_value()\n-\n- request.session[\"next\"] = request.GET.get(\"next\", \"/\")\n-\n+ global CLIENT\n try:\n+ # If the CLIENT is none, attempt to reinitialize before handling the request\n+ if _client_is_none():\n+ logger.debug(\"OIDC client is None, attempting to initialize\")\n+ _initialize_client()\n+ request.session[\"acr_value\"] = CLIENT.get_default_acr_value()\n+ request.session[\"next\"] = request.GET.get(\"next\", \"/\")\n+ # Create the authentication request\n return CLIENT.create_authn_request(request.session)\n except Exception as err:\n return error_page(request, err)\n@@ -68,12 +90,17 @@\n \n def login_callback(request):\n \"\"\"Analyze the token returned by the authentication provider (OP).\"\"\"\n+ global CLIENT\n try:\n+ # If the CLIENT is none, attempt to reinitialize before handling the request\n+ if _client_is_none():\n+ logger.debug(\"OIDC client is None, attempting to initialize\")\n+ _initialize_client()\n query = parse_qs(request.GET.urlencode())\n userinfo = CLIENT.callback(query, request.session)\n # test for need for identity verification and if it is satisfied\n # if not satisfied, redirect user to login with stepped up acr_value\n- if requires_step_up_auth(userinfo):\n+ if _requires_step_up_auth(userinfo):\n # add acr_value to request.session\n request.session[\"acr_value\"] = CLIENT.get_step_up_acr_value()\n return CLIENT.create_authn_request(request.session)\n@@ -86,13 +113,16 @@\n else:\n raise o_e.BannedUser()\n except o_e.NoStateDefined as nsd_err:\n+ # In the event that a user is in the middle of a login when the app is restarted,\n+ # their session state will no longer be available, so redirect the user to the\n+ # beginning of login process without raising an error to the user.\n logger.warning(f\"No State Defined: {nsd_err}\")\n return redirect(request.session.get(\"next\", \"/\"))\n except Exception as err:\n return error_page(request, err)\n \n \n-def requires_step_up_auth(userinfo):\n+def _requires_step_up_auth(userinfo):\n \"\"\"if User.needs_identity_verification and step_up_acr_value not in\n ial returned from callback, return True\"\"\"\n step_up_acr_value = CLIENT.get_step_up_acr_value()\n", "issue": "Increase reliability on OIDC connection\n### Issue description\n\nWhile investigating #1726 we realized there is some areas for improvement in our handling of the connection to OIDC so that all scenarios of failure show a properly formatted 500 error page\n\n### Acceptance criteria\n\n- [ ] Make sure all login.gov/identity sandbox connection issues result in the usual 500 error \r\n- [ ] refactor the connection set up as needed\n\n### Additional context\n\n_No response_\n\n### Links to other issues\n\nrelates to: #1726\n", "before_files": [{"content": "# coding: utf-8\n\nimport logging\n\nfrom django.conf import settings\nfrom django.contrib.auth import logout as auth_logout\nfrom django.contrib.auth import authenticate, login\nfrom django.http import HttpResponseRedirect\nfrom django.shortcuts import redirect, render\nfrom urllib.parse import parse_qs, urlencode\n\nfrom djangooidc.oidc import Client\nfrom djangooidc import exceptions as o_e\nfrom registrar.models import User\n\nlogger = logging.getLogger(__name__)\n\ntry:\n # Initialize provider using pyOICD\n OP = getattr(settings, \"OIDC_ACTIVE_PROVIDER\")\n CLIENT = Client(OP)\n logger.debug(\"client initialized %s\" % CLIENT)\nexcept Exception as err:\n CLIENT = None # type: ignore\n logger.warning(err)\n logger.warning(\"Unable to configure OpenID Connect provider. Users cannot log in.\")\n\n\ndef error_page(request, error):\n \"\"\"Display a sensible message and log the error.\"\"\"\n logger.error(error)\n if isinstance(error, o_e.AuthenticationFailed):\n return render(\n request,\n \"401.html\",\n context={\n \"friendly_message\": error.friendly_message,\n \"log_identifier\": error.locator,\n },\n status=401,\n )\n if isinstance(error, o_e.InternalError):\n return render(\n request,\n \"500.html\",\n context={\n \"friendly_message\": error.friendly_message,\n \"log_identifier\": error.locator,\n },\n status=500,\n )\n if isinstance(error, Exception):\n return render(request, \"500.html\", status=500)\n\n\ndef openid(request):\n \"\"\"Redirect the user to an authentication provider (OP).\"\"\"\n # If the session reset because of a server restart, attempt to login again\n request.session[\"acr_value\"] = CLIENT.get_default_acr_value()\n\n request.session[\"next\"] = request.GET.get(\"next\", \"/\")\n\n try:\n return CLIENT.create_authn_request(request.session)\n except Exception as err:\n return error_page(request, err)\n\n\ndef login_callback(request):\n \"\"\"Analyze the token returned by the authentication provider (OP).\"\"\"\n try:\n query = parse_qs(request.GET.urlencode())\n userinfo = CLIENT.callback(query, request.session)\n # test for need for identity verification and if it is satisfied\n # if not satisfied, redirect user to login with stepped up acr_value\n if requires_step_up_auth(userinfo):\n # add acr_value to request.session\n request.session[\"acr_value\"] = CLIENT.get_step_up_acr_value()\n return CLIENT.create_authn_request(request.session)\n user = authenticate(request=request, **userinfo)\n if user:\n login(request, user)\n logger.info(\"Successfully logged in user %s\" % user)\n # Double login bug (1507)?\n return redirect(request.session.get(\"next\", \"/\"))\n else:\n raise o_e.BannedUser()\n except o_e.NoStateDefined as nsd_err:\n logger.warning(f\"No State Defined: {nsd_err}\")\n return redirect(request.session.get(\"next\", \"/\"))\n except Exception as err:\n return error_page(request, err)\n\n\ndef requires_step_up_auth(userinfo):\n \"\"\"if User.needs_identity_verification and step_up_acr_value not in\n ial returned from callback, return True\"\"\"\n step_up_acr_value = CLIENT.get_step_up_acr_value()\n acr_value = userinfo.get(\"ial\", \"\")\n uuid = userinfo.get(\"sub\", \"\")\n email = userinfo.get(\"email\", \"\")\n if acr_value != step_up_acr_value:\n # The acr of this attempt is not at the highest level\n # so check if the user needs the higher level\n return User.needs_identity_verification(email, uuid)\n else:\n # This attempt already came back at the highest level\n # so does not require step up\n return False\n\n\ndef logout(request, next_page=None):\n \"\"\"Redirect the user to the authentication provider (OP) logout page.\"\"\"\n try:\n user = request.user\n request_args = {\n \"client_id\": CLIENT.client_id,\n \"state\": request.session[\"state\"],\n }\n if (\n \"post_logout_redirect_uris\" in CLIENT.registration_response.keys()\n and len(CLIENT.registration_response[\"post_logout_redirect_uris\"]) > 0\n ):\n request_args.update(\n {\"post_logout_redirect_uri\": CLIENT.registration_response[\"post_logout_redirect_uris\"][0]}\n )\n url = CLIENT.provider_info[\"end_session_endpoint\"]\n url += \"?\" + urlencode(request_args)\n return HttpResponseRedirect(url)\n except Exception as err:\n return error_page(request, err)\n finally:\n # Always remove Django session stuff - even if not logged out from OP.\n # Don't wait for the callback as it may never come.\n auth_logout(request)\n logger.info(\"Successfully logged out user %s\" % user)\n next_page = getattr(settings, \"LOGOUT_REDIRECT_URL\", None)\n if next_page:\n request.session[\"next\"] = next_page\n\n\ndef logout_callback(request):\n \"\"\"Simple redirection view: after logout, redirect to `next`.\"\"\"\n next = request.session.get(\"next\", \"/\")\n return redirect(next)\n", "path": "src/djangooidc/views.py"}], "after_files": [{"content": "# coding: utf-8\n\nimport logging\n\nfrom django.conf import settings\nfrom django.contrib.auth import logout as auth_logout\nfrom django.contrib.auth import authenticate, login\nfrom django.http import HttpResponseRedirect\nfrom django.shortcuts import redirect, render\nfrom urllib.parse import parse_qs, urlencode\n\nfrom djangooidc.oidc import Client\nfrom djangooidc import exceptions as o_e\nfrom registrar.models import User\n\nlogger = logging.getLogger(__name__)\n\nCLIENT = None\n\n\ndef _initialize_client():\n \"\"\"Initialize the OIDC client. Exceptions are allowed to raise\n and will need to be caught.\"\"\"\n global CLIENT\n # Initialize provider using pyOICD\n OP = getattr(settings, \"OIDC_ACTIVE_PROVIDER\")\n CLIENT = Client(OP)\n logger.debug(\"Client initialized: %s\" % CLIENT)\n\n\ndef _client_is_none():\n \"\"\"Return if the CLIENT is currently None.\"\"\"\n global CLIENT\n return CLIENT is None\n\n\n# Initialize CLIENT\ntry:\n _initialize_client()\nexcept Exception as err:\n # In the event of an exception, log the error and allow the app load to continue\n # without the OIDC Client. Subsequent login attempts will attempt to initialize\n # again if Client is None\n logger.error(err)\n logger.error(\"Unable to configure OpenID Connect provider. Users cannot log in.\")\n\n\ndef error_page(request, error):\n \"\"\"Display a sensible message and log the error.\"\"\"\n logger.error(error)\n if isinstance(error, o_e.AuthenticationFailed):\n return render(\n request,\n \"401.html\",\n context={\n \"friendly_message\": error.friendly_message,\n \"log_identifier\": error.locator,\n },\n status=401,\n )\n if isinstance(error, o_e.InternalError):\n return render(\n request,\n \"500.html\",\n context={\n \"friendly_message\": error.friendly_message,\n \"log_identifier\": error.locator,\n },\n status=500,\n )\n if isinstance(error, Exception):\n return render(request, \"500.html\", status=500)\n\n\ndef openid(request):\n \"\"\"Redirect the user to an authentication provider (OP).\"\"\"\n global CLIENT\n try:\n # If the CLIENT is none, attempt to reinitialize before handling the request\n if _client_is_none():\n logger.debug(\"OIDC client is None, attempting to initialize\")\n _initialize_client()\n request.session[\"acr_value\"] = CLIENT.get_default_acr_value()\n request.session[\"next\"] = request.GET.get(\"next\", \"/\")\n # Create the authentication request\n return CLIENT.create_authn_request(request.session)\n except Exception as err:\n return error_page(request, err)\n\n\ndef login_callback(request):\n \"\"\"Analyze the token returned by the authentication provider (OP).\"\"\"\n global CLIENT\n try:\n # If the CLIENT is none, attempt to reinitialize before handling the request\n if _client_is_none():\n logger.debug(\"OIDC client is None, attempting to initialize\")\n _initialize_client()\n query = parse_qs(request.GET.urlencode())\n userinfo = CLIENT.callback(query, request.session)\n # test for need for identity verification and if it is satisfied\n # if not satisfied, redirect user to login with stepped up acr_value\n if _requires_step_up_auth(userinfo):\n # add acr_value to request.session\n request.session[\"acr_value\"] = CLIENT.get_step_up_acr_value()\n return CLIENT.create_authn_request(request.session)\n user = authenticate(request=request, **userinfo)\n if user:\n login(request, user)\n logger.info(\"Successfully logged in user %s\" % user)\n # Double login bug (1507)?\n return redirect(request.session.get(\"next\", \"/\"))\n else:\n raise o_e.BannedUser()\n except o_e.NoStateDefined as nsd_err:\n # In the event that a user is in the middle of a login when the app is restarted,\n # their session state will no longer be available, so redirect the user to the\n # beginning of login process without raising an error to the user.\n logger.warning(f\"No State Defined: {nsd_err}\")\n return redirect(request.session.get(\"next\", \"/\"))\n except Exception as err:\n return error_page(request, err)\n\n\ndef _requires_step_up_auth(userinfo):\n \"\"\"if User.needs_identity_verification and step_up_acr_value not in\n ial returned from callback, return True\"\"\"\n step_up_acr_value = CLIENT.get_step_up_acr_value()\n acr_value = userinfo.get(\"ial\", \"\")\n uuid = userinfo.get(\"sub\", \"\")\n email = userinfo.get(\"email\", \"\")\n if acr_value != step_up_acr_value:\n # The acr of this attempt is not at the highest level\n # so check if the user needs the higher level\n return User.needs_identity_verification(email, uuid)\n else:\n # This attempt already came back at the highest level\n # so does not require step up\n return False\n\n\ndef logout(request, next_page=None):\n \"\"\"Redirect the user to the authentication provider (OP) logout page.\"\"\"\n try:\n user = request.user\n request_args = {\n \"client_id\": CLIENT.client_id,\n \"state\": request.session[\"state\"],\n }\n if (\n \"post_logout_redirect_uris\" in CLIENT.registration_response.keys()\n and len(CLIENT.registration_response[\"post_logout_redirect_uris\"]) > 0\n ):\n request_args.update(\n {\"post_logout_redirect_uri\": CLIENT.registration_response[\"post_logout_redirect_uris\"][0]}\n )\n url = CLIENT.provider_info[\"end_session_endpoint\"]\n url += \"?\" + urlencode(request_args)\n return HttpResponseRedirect(url)\n except Exception as err:\n return error_page(request, err)\n finally:\n # Always remove Django session stuff - even if not logged out from OP.\n # Don't wait for the callback as it may never come.\n auth_logout(request)\n logger.info(\"Successfully logged out user %s\" % user)\n next_page = getattr(settings, \"LOGOUT_REDIRECT_URL\", None)\n if next_page:\n request.session[\"next\"] = next_page\n\n\ndef logout_callback(request):\n \"\"\"Simple redirection view: after logout, redirect to `next`.\"\"\"\n next = request.session.get(\"next\", \"/\")\n return redirect(next)\n", "path": "src/djangooidc/views.py"}]} | 1,817 | 924 |
gh_patches_debug_13360 | rasdani/github-patches | git_diff | urllib3__urllib3-60 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
nosetests crashes under IPv4 (error: getsockaddrarg: bad family)
Turns out tornado is really eager to use IPv6. Unless you expressly hand the server the address, it doesn't even check for socket IPv6 support. I'll submit a pull request for the one-line fix in dummyserver/server.py momentarily.
Source: https://groups.google.com/group/python-tornado/browse_thread/thread/3ec04536e57a2833?pli=1
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `dummyserver/server.py`
Content:
```
1 #!/usr/bin/env python
2
3 """
4 Dummy server used for unit testing.
5 """
6 from __future__ import print_function
7
8 import logging
9 import os
10 import sys
11 import threading
12 import socket
13
14 import tornado.wsgi
15 import tornado.httpserver
16 import tornado.ioloop
17
18 from dummyserver.handlers import TestingApp
19
20
21 log = logging.getLogger(__name__)
22
23 CERTS_PATH = os.path.join(os.path.dirname(__file__), 'certs')
24 DEFAULT_CERTS = {
25 'certfile': os.path.join(CERTS_PATH, 'server.crt'),
26 'keyfile': os.path.join(CERTS_PATH, 'server.key'),
27 }
28 DEFAULT_CA = os.path.join(CERTS_PATH, 'cacert.pem')
29 DEFAULT_CA_BAD = os.path.join(CERTS_PATH, 'client_bad.pem')
30
31
32 # Different types of servers we have:
33
34
35 class SocketServerThread(threading.Thread):
36 """
37 :param socket_handler: Callable which receives a socket argument for one
38 request.
39 :param ready_lock: Lock which gets released when the socket handler is
40 ready to receive requests.
41 """
42 def __init__(self, socket_handler, host='localhost', port=8081,
43 ready_lock=None):
44 threading.Thread.__init__(self)
45
46 self.socket_handler = socket_handler
47 self.host = host
48 self.port = port
49 self.ready_lock = ready_lock
50
51 def _start_server(self):
52 sock = socket.socket()
53 sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
54 sock.bind((self.host, self.port))
55
56 # Once listen() returns, the server socket is ready
57 sock.listen(1)
58
59 if self.ready_lock:
60 self.ready_lock.release()
61
62 self.socket_handler(sock)
63
64 def run(self):
65 self.server = self._start_server()
66
67
68 class TornadoServerThread(threading.Thread):
69 def __init__(self, host='localhost', port=8081, scheme='http', certs=None):
70 threading.Thread.__init__(self)
71
72 self.host = host
73 self.port = port
74 self.scheme = scheme
75 self.certs = certs
76
77 def _start_server(self):
78 container = tornado.wsgi.WSGIContainer(TestingApp())
79
80 if self.scheme == 'https':
81 http_server = tornado.httpserver.HTTPServer(container,
82 ssl_options=self.certs)
83 else:
84 http_server = tornado.httpserver.HTTPServer(container)
85
86 http_server.listen(self.port)
87 return http_server
88
89 def run(self):
90 self.server = self._start_server()
91 self.ioloop = tornado.ioloop.IOLoop.instance()
92 self.ioloop.start()
93
94 def stop(self):
95 self.server.stop()
96 self.ioloop.stop()
97
98
99 if __name__ == '__main__':
100 log.setLevel(logging.DEBUG)
101 log.addHandler(logging.StreamHandler(sys.stderr))
102
103 from urllib3 import get_host
104
105 url = "http://localhost:8081"
106 if len(sys.argv) > 1:
107 url = sys.argv[1]
108
109 print("Starting WGI server at: %s" % url)
110
111 scheme, host, port = get_host(url)
112 t = TornadoServerThread(scheme=scheme, host=host, port=port)
113 t.start()
114
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/dummyserver/server.py b/dummyserver/server.py
--- a/dummyserver/server.py
+++ b/dummyserver/server.py
@@ -83,7 +83,7 @@
else:
http_server = tornado.httpserver.HTTPServer(container)
- http_server.listen(self.port)
+ http_server.listen(self.port, address=self.host)
return http_server
def run(self):
@@ -106,7 +106,7 @@
if len(sys.argv) > 1:
url = sys.argv[1]
- print("Starting WGI server at: %s" % url)
+ print("Starting WSGI server at: %s" % url)
scheme, host, port = get_host(url)
t = TornadoServerThread(scheme=scheme, host=host, port=port)
| {"golden_diff": "diff --git a/dummyserver/server.py b/dummyserver/server.py\n--- a/dummyserver/server.py\n+++ b/dummyserver/server.py\n@@ -83,7 +83,7 @@\n else:\n http_server = tornado.httpserver.HTTPServer(container)\n \n- http_server.listen(self.port)\n+ http_server.listen(self.port, address=self.host)\n return http_server\n \n def run(self):\n@@ -106,7 +106,7 @@\n if len(sys.argv) > 1:\n url = sys.argv[1]\n \n- print(\"Starting WGI server at: %s\" % url)\n+ print(\"Starting WSGI server at: %s\" % url)\n \n scheme, host, port = get_host(url)\n t = TornadoServerThread(scheme=scheme, host=host, port=port)\n", "issue": "nosetests crashes under IPv4 (error: getsockaddrarg: bad family)\nTurns out tornado is really eager to use IPv6. Unless you expressly hand the server the address, it doesn't even check for socket IPv6 support. I'll submit a pull request for the one-line fix in dummyserver/server.py momentarily.\n\nSource: https://groups.google.com/group/python-tornado/browse_thread/thread/3ec04536e57a2833?pli=1\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\n\"\"\"\nDummy server used for unit testing.\n\"\"\"\nfrom __future__ import print_function\n\nimport logging\nimport os\nimport sys\nimport threading\nimport socket\n\nimport tornado.wsgi\nimport tornado.httpserver\nimport tornado.ioloop\n\nfrom dummyserver.handlers import TestingApp\n\n\nlog = logging.getLogger(__name__)\n\nCERTS_PATH = os.path.join(os.path.dirname(__file__), 'certs')\nDEFAULT_CERTS = {\n 'certfile': os.path.join(CERTS_PATH, 'server.crt'),\n 'keyfile': os.path.join(CERTS_PATH, 'server.key'),\n}\nDEFAULT_CA = os.path.join(CERTS_PATH, 'cacert.pem')\nDEFAULT_CA_BAD = os.path.join(CERTS_PATH, 'client_bad.pem')\n\n\n# Different types of servers we have:\n\n\nclass SocketServerThread(threading.Thread):\n \"\"\"\n :param socket_handler: Callable which receives a socket argument for one\n request.\n :param ready_lock: Lock which gets released when the socket handler is\n ready to receive requests.\n \"\"\"\n def __init__(self, socket_handler, host='localhost', port=8081,\n ready_lock=None):\n threading.Thread.__init__(self)\n\n self.socket_handler = socket_handler\n self.host = host\n self.port = port\n self.ready_lock = ready_lock\n\n def _start_server(self):\n sock = socket.socket()\n sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n sock.bind((self.host, self.port))\n\n # Once listen() returns, the server socket is ready\n sock.listen(1)\n\n if self.ready_lock:\n self.ready_lock.release()\n\n self.socket_handler(sock)\n\n def run(self):\n self.server = self._start_server()\n\n\nclass TornadoServerThread(threading.Thread):\n def __init__(self, host='localhost', port=8081, scheme='http', certs=None):\n threading.Thread.__init__(self)\n\n self.host = host\n self.port = port\n self.scheme = scheme\n self.certs = certs\n\n def _start_server(self):\n container = tornado.wsgi.WSGIContainer(TestingApp())\n\n if self.scheme == 'https':\n http_server = tornado.httpserver.HTTPServer(container,\n ssl_options=self.certs)\n else:\n http_server = tornado.httpserver.HTTPServer(container)\n\n http_server.listen(self.port)\n return http_server\n\n def run(self):\n self.server = self._start_server()\n self.ioloop = tornado.ioloop.IOLoop.instance()\n self.ioloop.start()\n\n def stop(self):\n self.server.stop()\n self.ioloop.stop()\n\n\nif __name__ == '__main__':\n log.setLevel(logging.DEBUG)\n log.addHandler(logging.StreamHandler(sys.stderr))\n\n from urllib3 import get_host\n\n url = \"http://localhost:8081\"\n if len(sys.argv) > 1:\n url = sys.argv[1]\n\n print(\"Starting WGI server at: %s\" % url)\n\n scheme, host, port = get_host(url)\n t = TornadoServerThread(scheme=scheme, host=host, port=port)\n t.start()\n", "path": "dummyserver/server.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\n\"\"\"\nDummy server used for unit testing.\n\"\"\"\nfrom __future__ import print_function\n\nimport logging\nimport os\nimport sys\nimport threading\nimport socket\n\nimport tornado.wsgi\nimport tornado.httpserver\nimport tornado.ioloop\n\nfrom dummyserver.handlers import TestingApp\n\n\nlog = logging.getLogger(__name__)\n\nCERTS_PATH = os.path.join(os.path.dirname(__file__), 'certs')\nDEFAULT_CERTS = {\n 'certfile': os.path.join(CERTS_PATH, 'server.crt'),\n 'keyfile': os.path.join(CERTS_PATH, 'server.key'),\n}\nDEFAULT_CA = os.path.join(CERTS_PATH, 'cacert.pem')\nDEFAULT_CA_BAD = os.path.join(CERTS_PATH, 'client_bad.pem')\n\n\n# Different types of servers we have:\n\n\nclass SocketServerThread(threading.Thread):\n \"\"\"\n :param socket_handler: Callable which receives a socket argument for one\n request.\n :param ready_lock: Lock which gets released when the socket handler is\n ready to receive requests.\n \"\"\"\n def __init__(self, socket_handler, host='localhost', port=8081,\n ready_lock=None):\n threading.Thread.__init__(self)\n\n self.socket_handler = socket_handler\n self.host = host\n self.port = port\n self.ready_lock = ready_lock\n\n def _start_server(self):\n sock = socket.socket()\n sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n sock.bind((self.host, self.port))\n\n # Once listen() returns, the server socket is ready\n sock.listen(1)\n\n if self.ready_lock:\n self.ready_lock.release()\n\n self.socket_handler(sock)\n\n def run(self):\n self.server = self._start_server()\n\n\nclass TornadoServerThread(threading.Thread):\n def __init__(self, host='localhost', port=8081, scheme='http', certs=None):\n threading.Thread.__init__(self)\n\n self.host = host\n self.port = port\n self.scheme = scheme\n self.certs = certs\n\n def _start_server(self):\n container = tornado.wsgi.WSGIContainer(TestingApp())\n\n if self.scheme == 'https':\n http_server = tornado.httpserver.HTTPServer(container,\n ssl_options=self.certs)\n else:\n http_server = tornado.httpserver.HTTPServer(container)\n\n http_server.listen(self.port, address=self.host)\n return http_server\n\n def run(self):\n self.server = self._start_server()\n self.ioloop = tornado.ioloop.IOLoop.instance()\n self.ioloop.start()\n\n def stop(self):\n self.server.stop()\n self.ioloop.stop()\n\n\nif __name__ == '__main__':\n log.setLevel(logging.DEBUG)\n log.addHandler(logging.StreamHandler(sys.stderr))\n\n from urllib3 import get_host\n\n url = \"http://localhost:8081\"\n if len(sys.argv) > 1:\n url = sys.argv[1]\n\n print(\"Starting WSGI server at: %s\" % url)\n\n scheme, host, port = get_host(url)\n t = TornadoServerThread(scheme=scheme, host=host, port=port)\n t.start()\n", "path": "dummyserver/server.py"}]} | 1,305 | 187 |
gh_patches_debug_22208 | rasdani/github-patches | git_diff | wagtail__wagtail-1576 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Project template needs updating for search promotions changes
The provided view still references the EditorsPick model: https://github.com/torchbox/wagtail/blob/master/wagtail/project_template/search/views.py#L5
Shall we update this to use the new contrib module or remove it completely?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `wagtail/project_template/search/views.py`
Content:
```
1 from django.shortcuts import render
2 from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
3
4 from wagtail.wagtailcore.models import Page
5 from wagtail.wagtailsearch.models import Query, EditorsPick
6
7
8 def search(request):
9 search_query = request.GET.get('query', None)
10 page = request.GET.get('page', 1)
11
12 # Search
13 if search_query:
14 search_results = Page.objects.live().search(search_query)
15 query = Query.get(search_query)
16
17 # Record hit
18 query.add_hit()
19
20 # Get search picks
21 search_picks = query.editors_picks.all()
22 else:
23 search_results = Page.objects.none()
24 search_picks = EditorsPick.objects.none()
25
26 # Pagination
27 paginator = Paginator(search_results, 10)
28 try:
29 search_results = paginator.page(page)
30 except PageNotAnInteger:
31 search_results = paginator.page(1)
32 except EmptyPage:
33 search_results = paginator.page(paginator.num_pages)
34
35 return render(request, 'search/search.html', {
36 'search_query': search_query,
37 'search_results': search_results,
38 'search_picks': search_picks,
39 })
40
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/wagtail/project_template/search/views.py b/wagtail/project_template/search/views.py
--- a/wagtail/project_template/search/views.py
+++ b/wagtail/project_template/search/views.py
@@ -2,7 +2,7 @@
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from wagtail.wagtailcore.models import Page
-from wagtail.wagtailsearch.models import Query, EditorsPick
+from wagtail.wagtailsearch.models import Query
def search(request):
@@ -16,12 +16,8 @@
# Record hit
query.add_hit()
-
- # Get search picks
- search_picks = query.editors_picks.all()
else:
search_results = Page.objects.none()
- search_picks = EditorsPick.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
@@ -35,5 +31,4 @@
return render(request, 'search/search.html', {
'search_query': search_query,
'search_results': search_results,
- 'search_picks': search_picks,
})
| {"golden_diff": "diff --git a/wagtail/project_template/search/views.py b/wagtail/project_template/search/views.py\n--- a/wagtail/project_template/search/views.py\n+++ b/wagtail/project_template/search/views.py\n@@ -2,7 +2,7 @@\n from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger\n \n from wagtail.wagtailcore.models import Page\n-from wagtail.wagtailsearch.models import Query, EditorsPick\n+from wagtail.wagtailsearch.models import Query\n \n \n def search(request):\n@@ -16,12 +16,8 @@\n \n # Record hit\n query.add_hit()\n-\n- # Get search picks\n- search_picks = query.editors_picks.all()\n else:\n search_results = Page.objects.none()\n- search_picks = EditorsPick.objects.none()\n \n # Pagination\n paginator = Paginator(search_results, 10)\n@@ -35,5 +31,4 @@\n return render(request, 'search/search.html', {\n 'search_query': search_query,\n 'search_results': search_results,\n- 'search_picks': search_picks,\n })\n", "issue": "Project template needs updating for search promotions changes\nThe provided view still references the EditorsPick model: https://github.com/torchbox/wagtail/blob/master/wagtail/project_template/search/views.py#L5\n\nShall we update this to use the new contrib module or remove it completely?\n\n", "before_files": [{"content": "from django.shortcuts import render\nfrom django.core.paginator import Paginator, EmptyPage, PageNotAnInteger\n\nfrom wagtail.wagtailcore.models import Page\nfrom wagtail.wagtailsearch.models import Query, EditorsPick\n\n\ndef search(request):\n search_query = request.GET.get('query', None)\n page = request.GET.get('page', 1)\n\n # Search\n if search_query:\n search_results = Page.objects.live().search(search_query)\n query = Query.get(search_query)\n\n # Record hit\n query.add_hit()\n\n # Get search picks\n search_picks = query.editors_picks.all()\n else:\n search_results = Page.objects.none()\n search_picks = EditorsPick.objects.none()\n\n # Pagination\n paginator = Paginator(search_results, 10)\n try:\n search_results = paginator.page(page)\n except PageNotAnInteger:\n search_results = paginator.page(1)\n except EmptyPage:\n search_results = paginator.page(paginator.num_pages)\n\n return render(request, 'search/search.html', {\n 'search_query': search_query,\n 'search_results': search_results,\n 'search_picks': search_picks,\n })\n", "path": "wagtail/project_template/search/views.py"}], "after_files": [{"content": "from django.shortcuts import render\nfrom django.core.paginator import Paginator, EmptyPage, PageNotAnInteger\n\nfrom wagtail.wagtailcore.models import Page\nfrom wagtail.wagtailsearch.models import Query\n\n\ndef search(request):\n search_query = request.GET.get('query', None)\n page = request.GET.get('page', 1)\n\n # Search\n if search_query:\n search_results = Page.objects.live().search(search_query)\n query = Query.get(search_query)\n\n # Record hit\n query.add_hit()\n else:\n search_results = Page.objects.none()\n\n # Pagination\n paginator = Paginator(search_results, 10)\n try:\n search_results = paginator.page(page)\n except PageNotAnInteger:\n search_results = paginator.page(1)\n except EmptyPage:\n search_results = paginator.page(paginator.num_pages)\n\n return render(request, 'search/search.html', {\n 'search_query': search_query,\n 'search_results': search_results,\n })\n", "path": "wagtail/project_template/search/views.py"}]} | 649 | 246 |
gh_patches_debug_23718 | rasdani/github-patches | git_diff | GeotrekCE__Geotrek-admin-1398 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
SIGNAGES module / Type filter disappeared
I think TYPE filter was available before.
Crucial filter of course.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `geotrek/infrastructure/filters.py`
Content:
```
1 from django.utils.translation import ugettext_lazy as _
2
3 from geotrek.common.filters import StructureRelatedFilterSet, YearFilter
4 from geotrek.maintenance.filters import InterventionYearSelect
5
6 from .models import INFRASTRUCTURE_TYPES, Infrastructure, Signage
7
8
9 class InfrastructureYearSelect(InterventionYearSelect):
10 label = _(u"Intervention year")
11
12
13 class InfrastructureFilterSet(StructureRelatedFilterSet):
14 intervention_year = YearFilter(name='interventions_set__date',
15 widget=InfrastructureYearSelect,
16 label=_(u"Intervention year"))
17
18 def __init__(self, *args, **kwargs):
19 super(InfrastructureFilterSet, self).__init__(*args, **kwargs)
20 field = self.form.fields['type']
21 field.queryset = field.queryset.exclude(type=INFRASTRUCTURE_TYPES.SIGNAGE)
22
23 field = self.form.fields['type__type']
24 all_choices = field.widget.choices
25 all_choices = [c for c in all_choices if c[0] != INFRASTRUCTURE_TYPES.SIGNAGE]
26 field.widget.choices = [('', _(u"Category"))] + all_choices
27
28 class Meta(StructureRelatedFilterSet.Meta):
29 model = Infrastructure
30 fields = StructureRelatedFilterSet.Meta.fields + ['type__type', 'type']
31
32
33 class SignageFilterSet(StructureRelatedFilterSet):
34 intervention_year = YearFilter(name='interventions_set__date',
35 widget=InfrastructureYearSelect)
36
37 class Meta(StructureRelatedFilterSet.Meta):
38 model = Signage
39 fields = StructureRelatedFilterSet.Meta.fields
40
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/geotrek/infrastructure/filters.py b/geotrek/infrastructure/filters.py
--- a/geotrek/infrastructure/filters.py
+++ b/geotrek/infrastructure/filters.py
@@ -12,8 +12,7 @@
class InfrastructureFilterSet(StructureRelatedFilterSet):
intervention_year = YearFilter(name='interventions_set__date',
- widget=InfrastructureYearSelect,
- label=_(u"Intervention year"))
+ widget=InfrastructureYearSelect)
def __init__(self, *args, **kwargs):
super(InfrastructureFilterSet, self).__init__(*args, **kwargs)
@@ -34,6 +33,11 @@
intervention_year = YearFilter(name='interventions_set__date',
widget=InfrastructureYearSelect)
+ def __init__(self, *args, **kwargs):
+ super(SignageFilterSet, self).__init__(*args, **kwargs)
+ field = self.form.fields['type']
+ field.queryset = field.queryset.filter(type=INFRASTRUCTURE_TYPES.SIGNAGE)
+
class Meta(StructureRelatedFilterSet.Meta):
model = Signage
- fields = StructureRelatedFilterSet.Meta.fields
+ fields = StructureRelatedFilterSet.Meta.fields + ['type']
| {"golden_diff": "diff --git a/geotrek/infrastructure/filters.py b/geotrek/infrastructure/filters.py\n--- a/geotrek/infrastructure/filters.py\n+++ b/geotrek/infrastructure/filters.py\n@@ -12,8 +12,7 @@\n \n class InfrastructureFilterSet(StructureRelatedFilterSet):\n intervention_year = YearFilter(name='interventions_set__date',\n- widget=InfrastructureYearSelect,\n- label=_(u\"Intervention year\"))\n+ widget=InfrastructureYearSelect)\n \n def __init__(self, *args, **kwargs):\n super(InfrastructureFilterSet, self).__init__(*args, **kwargs)\n@@ -34,6 +33,11 @@\n intervention_year = YearFilter(name='interventions_set__date',\n widget=InfrastructureYearSelect)\n \n+ def __init__(self, *args, **kwargs):\n+ super(SignageFilterSet, self).__init__(*args, **kwargs)\n+ field = self.form.fields['type']\n+ field.queryset = field.queryset.filter(type=INFRASTRUCTURE_TYPES.SIGNAGE)\n+\n class Meta(StructureRelatedFilterSet.Meta):\n model = Signage\n- fields = StructureRelatedFilterSet.Meta.fields\n+ fields = StructureRelatedFilterSet.Meta.fields + ['type']\n", "issue": "SIGNAGES module / Type filter disappeared\nI think TYPE filter was available before.\nCrucial filter of course.\n\n", "before_files": [{"content": "from django.utils.translation import ugettext_lazy as _\n\nfrom geotrek.common.filters import StructureRelatedFilterSet, YearFilter\nfrom geotrek.maintenance.filters import InterventionYearSelect\n\nfrom .models import INFRASTRUCTURE_TYPES, Infrastructure, Signage\n\n\nclass InfrastructureYearSelect(InterventionYearSelect):\n label = _(u\"Intervention year\")\n\n\nclass InfrastructureFilterSet(StructureRelatedFilterSet):\n intervention_year = YearFilter(name='interventions_set__date',\n widget=InfrastructureYearSelect,\n label=_(u\"Intervention year\"))\n\n def __init__(self, *args, **kwargs):\n super(InfrastructureFilterSet, self).__init__(*args, **kwargs)\n field = self.form.fields['type']\n field.queryset = field.queryset.exclude(type=INFRASTRUCTURE_TYPES.SIGNAGE)\n\n field = self.form.fields['type__type']\n all_choices = field.widget.choices\n all_choices = [c for c in all_choices if c[0] != INFRASTRUCTURE_TYPES.SIGNAGE]\n field.widget.choices = [('', _(u\"Category\"))] + all_choices\n\n class Meta(StructureRelatedFilterSet.Meta):\n model = Infrastructure\n fields = StructureRelatedFilterSet.Meta.fields + ['type__type', 'type']\n\n\nclass SignageFilterSet(StructureRelatedFilterSet):\n intervention_year = YearFilter(name='interventions_set__date',\n widget=InfrastructureYearSelect)\n\n class Meta(StructureRelatedFilterSet.Meta):\n model = Signage\n fields = StructureRelatedFilterSet.Meta.fields\n", "path": "geotrek/infrastructure/filters.py"}], "after_files": [{"content": "from django.utils.translation import ugettext_lazy as _\n\nfrom geotrek.common.filters import StructureRelatedFilterSet, YearFilter\nfrom geotrek.maintenance.filters import InterventionYearSelect\n\nfrom .models import INFRASTRUCTURE_TYPES, Infrastructure, Signage\n\n\nclass InfrastructureYearSelect(InterventionYearSelect):\n label = _(u\"Intervention year\")\n\n\nclass InfrastructureFilterSet(StructureRelatedFilterSet):\n intervention_year = YearFilter(name='interventions_set__date',\n widget=InfrastructureYearSelect)\n\n def __init__(self, *args, **kwargs):\n super(InfrastructureFilterSet, self).__init__(*args, **kwargs)\n field = self.form.fields['type']\n field.queryset = field.queryset.exclude(type=INFRASTRUCTURE_TYPES.SIGNAGE)\n\n field = self.form.fields['type__type']\n all_choices = field.widget.choices\n all_choices = [c for c in all_choices if c[0] != INFRASTRUCTURE_TYPES.SIGNAGE]\n field.widget.choices = [('', _(u\"Category\"))] + all_choices\n\n class Meta(StructureRelatedFilterSet.Meta):\n model = Infrastructure\n fields = StructureRelatedFilterSet.Meta.fields + ['type__type', 'type']\n\n\nclass SignageFilterSet(StructureRelatedFilterSet):\n intervention_year = YearFilter(name='interventions_set__date',\n widget=InfrastructureYearSelect)\n\n def __init__(self, *args, **kwargs):\n super(SignageFilterSet, self).__init__(*args, **kwargs)\n field = self.form.fields['type']\n field.queryset = field.queryset.filter(type=INFRASTRUCTURE_TYPES.SIGNAGE)\n\n class Meta(StructureRelatedFilterSet.Meta):\n model = Signage\n fields = StructureRelatedFilterSet.Meta.fields + ['type']\n", "path": "geotrek/infrastructure/filters.py"}]} | 694 | 284 |
gh_patches_debug_61517 | rasdani/github-patches | git_diff | open-mmlab__mmpose-271 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Pylint: R1710
```bash
mmpose/apis/test.py:142:0: R1710: Either all return statements in a function should return an expression, or none of them should. (inconsistent-return-statements)
mmpose/datasets/datasets/mesh/mesh_mix_dataset.py:38:4: R1710: Either all return statements in a function should return an expression, or none of them should. (inconsistent-return-statements)
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mmpose/datasets/datasets/mesh/mesh_mix_dataset.py`
Content:
```
1 from abc import ABCMeta
2
3 import numpy as np
4 from torch.utils.data import Dataset
5
6 from mmpose.datasets.builder import DATASETS
7 from .mesh_base_dataset import MeshBaseDataset
8
9
10 @DATASETS.register_module()
11 class MeshMixDataset(Dataset, metaclass=ABCMeta):
12 """Mix Dataset for 3D human mesh estimation.
13
14 The dataset combines data from multiple datasets (MeshBaseDataset) and
15 sample the data from different datasets with the provided proportions.
16 The dataset loads raw features and apply specified transforms
17 to return a dict containing the image tensors and other information.
18
19 Args:
20 configs (list): List of configs for multiple datasets.
21 partition (list): Sample proportion of multiple datasets.
22 The the elements of it should be non-negative and the
23 sum of it should be 1.
24 """
25
26 def __init__(self, configs, partition):
27 """Load data from multiple datasets."""
28 assert min(partition) >= 0
29 assert sum(partition) == 1
30 self.partition = np.array(partition).cumsum()
31 self.datasets = [MeshBaseDataset(**cfg) for cfg in configs]
32 self.length = max(len(ds) for ds in self.datasets)
33
34 def __len__(self):
35 """Get the size of the dataset."""
36 return self.length
37
38 def __getitem__(self, idx):
39 """Given index, sample the data from multiple datasets with the given
40 proportion."""
41 p = np.random.rand()
42 for i in range(len(self.datasets)):
43 if p <= self.partition[i]:
44 index_new = (idx + np.random.rand()) * len(
45 self.datasets[i]) / self.length
46 index_new = int(np.round(index_new)) % (len(self.datasets[i]))
47 return self.datasets[i][index_new]
48
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mmpose/datasets/datasets/mesh/mesh_mix_dataset.py b/mmpose/datasets/datasets/mesh/mesh_mix_dataset.py
--- a/mmpose/datasets/datasets/mesh/mesh_mix_dataset.py
+++ b/mmpose/datasets/datasets/mesh/mesh_mix_dataset.py
@@ -45,3 +45,4 @@
self.datasets[i]) / self.length
index_new = int(np.round(index_new)) % (len(self.datasets[i]))
return self.datasets[i][index_new]
+ return None
| {"golden_diff": "diff --git a/mmpose/datasets/datasets/mesh/mesh_mix_dataset.py b/mmpose/datasets/datasets/mesh/mesh_mix_dataset.py\n--- a/mmpose/datasets/datasets/mesh/mesh_mix_dataset.py\n+++ b/mmpose/datasets/datasets/mesh/mesh_mix_dataset.py\n@@ -45,3 +45,4 @@\n self.datasets[i]) / self.length\n index_new = int(np.round(index_new)) % (len(self.datasets[i]))\n return self.datasets[i][index_new]\n+ return None\n", "issue": "Pylint: R1710\n```bash\r\nmmpose/apis/test.py:142:0: R1710: Either all return statements in a function should return an expression, or none of them should. (inconsistent-return-statements)\r\nmmpose/datasets/datasets/mesh/mesh_mix_dataset.py:38:4: R1710: Either all return statements in a function should return an expression, or none of them should. (inconsistent-return-statements)\r\n```\n", "before_files": [{"content": "from abc import ABCMeta\n\nimport numpy as np\nfrom torch.utils.data import Dataset\n\nfrom mmpose.datasets.builder import DATASETS\nfrom .mesh_base_dataset import MeshBaseDataset\n\n\[email protected]_module()\nclass MeshMixDataset(Dataset, metaclass=ABCMeta):\n \"\"\"Mix Dataset for 3D human mesh estimation.\n\n The dataset combines data from multiple datasets (MeshBaseDataset) and\n sample the data from different datasets with the provided proportions.\n The dataset loads raw features and apply specified transforms\n to return a dict containing the image tensors and other information.\n\n Args:\n configs (list): List of configs for multiple datasets.\n partition (list): Sample proportion of multiple datasets.\n The the elements of it should be non-negative and the\n sum of it should be 1.\n \"\"\"\n\n def __init__(self, configs, partition):\n \"\"\"Load data from multiple datasets.\"\"\"\n assert min(partition) >= 0\n assert sum(partition) == 1\n self.partition = np.array(partition).cumsum()\n self.datasets = [MeshBaseDataset(**cfg) for cfg in configs]\n self.length = max(len(ds) for ds in self.datasets)\n\n def __len__(self):\n \"\"\"Get the size of the dataset.\"\"\"\n return self.length\n\n def __getitem__(self, idx):\n \"\"\"Given index, sample the data from multiple datasets with the given\n proportion.\"\"\"\n p = np.random.rand()\n for i in range(len(self.datasets)):\n if p <= self.partition[i]:\n index_new = (idx + np.random.rand()) * len(\n self.datasets[i]) / self.length\n index_new = int(np.round(index_new)) % (len(self.datasets[i]))\n return self.datasets[i][index_new]\n", "path": "mmpose/datasets/datasets/mesh/mesh_mix_dataset.py"}], "after_files": [{"content": "from abc import ABCMeta\n\nimport numpy as np\nfrom torch.utils.data import Dataset\n\nfrom mmpose.datasets.builder import DATASETS\nfrom .mesh_base_dataset import MeshBaseDataset\n\n\[email protected]_module()\nclass MeshMixDataset(Dataset, metaclass=ABCMeta):\n \"\"\"Mix Dataset for 3D human mesh estimation.\n\n The dataset combines data from multiple datasets (MeshBaseDataset) and\n sample the data from different datasets with the provided proportions.\n The dataset loads raw features and apply specified transforms\n to return a dict containing the image tensors and other information.\n\n Args:\n configs (list): List of configs for multiple datasets.\n partition (list): Sample proportion of multiple datasets.\n The the elements of it should be non-negative and the\n sum of it should be 1.\n \"\"\"\n\n def __init__(self, configs, partition):\n \"\"\"Load data from multiple datasets.\"\"\"\n assert min(partition) >= 0\n assert sum(partition) == 1\n self.partition = np.array(partition).cumsum()\n self.datasets = [MeshBaseDataset(**cfg) for cfg in configs]\n self.length = max(len(ds) for ds in self.datasets)\n\n def __len__(self):\n \"\"\"Get the size of the dataset.\"\"\"\n return self.length\n\n def __getitem__(self, idx):\n \"\"\"Given index, sample the data from multiple datasets with the given\n proportion.\"\"\"\n p = np.random.rand()\n for i in range(len(self.datasets)):\n if p <= self.partition[i]:\n index_new = (idx + np.random.rand()) * len(\n self.datasets[i]) / self.length\n index_new = int(np.round(index_new)) % (len(self.datasets[i]))\n return self.datasets[i][index_new]\n return None\n", "path": "mmpose/datasets/datasets/mesh/mesh_mix_dataset.py"}]} | 849 | 120 |
gh_patches_debug_16387 | rasdani/github-patches | git_diff | python__python-docs-es-106 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Revisar los `:role:!key`
Cuando hicimos la migración #27 aceptamos `:role:!key` como `:role:key`.
La única diferencia entre ellos es que el que tiene `!` no hace un link a la referencia.
Tenemos que revisar que queden consistentes nuevamente.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `conf.py`
Content:
```
1 # Sphinx configuration file.
2 #
3 # - import original configurations from cpython/Doc/conf.py
4 # - append the path considering the cpython submodule is at ./cpython
5 # - create the symbolic links under ./cpython/locale/es/LC_MESSAGES
6 # - make the build to work under Read the Docs
7 #
8 # The git submodule was created using this Stack Overflow answer
9 # to fetch only the commit that I needed and avoid clonning the whole history
10 # https://stackoverflow.com/a/27445058
11 #
12 # This can be built locally using `sphinx-build` by running
13 #
14 # $ sphinx-build -b html -n -d _build/doctrees -D language=es . _build/html
15
16 import sys, os, time
17 sys.path.append(os.path.abspath('cpython/Doc/tools/extensions'))
18 sys.path.append(os.path.abspath('cpython/Doc/includes'))
19
20 # Import all the Sphinx settings from cpython
21 sys.path.append(os.path.abspath('cpython/Doc'))
22 from conf import *
23
24 # Call patchlevel with the proper path to get the version from
25 # instead of hardcoding it
26 import patchlevel
27 version, release = patchlevel.get_header_version_info(os.path.abspath('cpython/Doc'))
28
29 project = 'Python en Español'
30 copyright = '2001-%s, Python Software Foundation' % time.strftime('%Y')
31
32 html_theme_path = ['cpython/Doc/tools']
33 templates_path = ['cpython/Doc/tools/templates']
34 html_static_path = ['cpython/Doc/tools/static']
35
36 os.system('mkdir -p cpython/locales/es/')
37 os.system('ln -nfs `pwd` cpython/locales/es/LC_MESSAGES')
38
39
40 if not os.environ.get('SPHINX_GETTEXT') == 'True':
41 # Override all the files from ``.overrides`` directory
42 import glob
43 for root, dirs, files in os.walk('.overrides'):
44 for fname in files:
45 if fname == 'README.rst' and root == '.overrides':
46 continue
47 destroot = root.replace('.overrides', '').lstrip('/')
48 outputdir = os.path.join(
49 'cpython',
50 'Doc',
51 destroot,
52 fname,
53 )
54 os.system(f'ln -nfs `pwd`/{root}/{fname} {outputdir}')
55
56 gettext_compact = False
57 locale_dirs = ['../locales', 'cpython/locales'] # relative to the sourcedir
58
59
60 # NOTE: Read the Docs does not support "multi document output".
61 # So, we put all the documentation as a single file for now.
62 _stdauthor = r'Guido van Rossum\\and the Python development team'
63 latex_documents = [
64 ('contents', 'python-docs-es.tex', u'Documentación de Python en Español',
65 _stdauthor, 'manual'),
66 ]
67
68 def setup(app):
69
70 def add_contributing_banner(app, doctree):
71 """
72 Insert a banner at the top of the index.
73
74 This way, we can easily communicate people to help with the translation,
75 pointing them to different resources.
76 """
77
78 if app.builder.format != 'html':
79 # Do not include the banner when building with other formats
80 # (this is useful when using -b gettext)
81 return
82
83 from docutils import nodes, core
84
85 message = '¡Ayúdanos a traducir la documentación oficial de Python al Español! ' \
86 f'Puedes encontrar más información en `Como contribuir </es/{version}/CONTRIBUTING.html>`_. ' \
87 'Ayuda a acercar Python a más personas de habla hispana.'
88
89 paragraph = core.publish_doctree(message)[0]
90 banner = nodes.warning(ids=['contributing-banner'])
91 banner.append(paragraph)
92
93 for document in doctree.traverse(nodes.document):
94 document.insert(0, banner)
95
96 # Change the sourcedir programmatically because Read the Docs always call it with `.`
97 app.srcdir = 'cpython/Doc'
98
99 app.connect('doctree-read', add_contributing_banner)
100
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/conf.py b/conf.py
--- a/conf.py
+++ b/conf.py
@@ -11,7 +11,7 @@
#
# This can be built locally using `sphinx-build` by running
#
-# $ sphinx-build -b html -n -d _build/doctrees -D language=es . _build/html
+# $ sphinx-build -b html -d _build/doctrees -D language=es . _build/html
import sys, os, time
sys.path.append(os.path.abspath('cpython/Doc/tools/extensions'))
@@ -37,6 +37,12 @@
os.system('ln -nfs `pwd` cpython/locales/es/LC_MESSAGES')
+exclude_patterns = [
+ # This file is not included and it not marked as :orphan:
+ 'distutils/_setuptools_disclaimer.rst',
+ 'README.rst',
+]
+
if not os.environ.get('SPHINX_GETTEXT') == 'True':
# Override all the files from ``.overrides`` directory
import glob
| {"golden_diff": "diff --git a/conf.py b/conf.py\n--- a/conf.py\n+++ b/conf.py\n@@ -11,7 +11,7 @@\n #\n # This can be built locally using `sphinx-build` by running\n #\n-# $ sphinx-build -b html -n -d _build/doctrees -D language=es . _build/html\n+# $ sphinx-build -b html -d _build/doctrees -D language=es . _build/html\n \n import sys, os, time\n sys.path.append(os.path.abspath('cpython/Doc/tools/extensions'))\n@@ -37,6 +37,12 @@\n os.system('ln -nfs `pwd` cpython/locales/es/LC_MESSAGES')\n \n \n+exclude_patterns = [\n+ # This file is not included and it not marked as :orphan:\n+ 'distutils/_setuptools_disclaimer.rst',\n+ 'README.rst',\n+]\n+\n if not os.environ.get('SPHINX_GETTEXT') == 'True':\n # Override all the files from ``.overrides`` directory\n import glob\n", "issue": "Revisar los `:role:!key`\nCuando hicimos la migraci\u00f3n #27 aceptamos `:role:!key` como `:role:key`.\r\n\r\nLa \u00fanica diferencia entre ellos es que el que tiene `!` no hace un link a la referencia.\r\n\r\nTenemos que revisar que queden consistentes nuevamente.\n", "before_files": [{"content": "# Sphinx configuration file.\n#\n# - import original configurations from cpython/Doc/conf.py\n# - append the path considering the cpython submodule is at ./cpython\n# - create the symbolic links under ./cpython/locale/es/LC_MESSAGES\n# - make the build to work under Read the Docs\n#\n# The git submodule was created using this Stack Overflow answer\n# to fetch only the commit that I needed and avoid clonning the whole history\n# https://stackoverflow.com/a/27445058\n#\n# This can be built locally using `sphinx-build` by running\n#\n# $ sphinx-build -b html -n -d _build/doctrees -D language=es . _build/html\n\nimport sys, os, time\nsys.path.append(os.path.abspath('cpython/Doc/tools/extensions'))\nsys.path.append(os.path.abspath('cpython/Doc/includes'))\n\n# Import all the Sphinx settings from cpython\nsys.path.append(os.path.abspath('cpython/Doc'))\nfrom conf import *\n\n# Call patchlevel with the proper path to get the version from\n# instead of hardcoding it\nimport patchlevel\nversion, release = patchlevel.get_header_version_info(os.path.abspath('cpython/Doc'))\n\nproject = 'Python en Espa\u00f1ol'\ncopyright = '2001-%s, Python Software Foundation' % time.strftime('%Y')\n\nhtml_theme_path = ['cpython/Doc/tools']\ntemplates_path = ['cpython/Doc/tools/templates']\nhtml_static_path = ['cpython/Doc/tools/static']\n\nos.system('mkdir -p cpython/locales/es/')\nos.system('ln -nfs `pwd` cpython/locales/es/LC_MESSAGES')\n\n\nif not os.environ.get('SPHINX_GETTEXT') == 'True':\n # Override all the files from ``.overrides`` directory\n import glob\n for root, dirs, files in os.walk('.overrides'):\n for fname in files:\n if fname == 'README.rst' and root == '.overrides':\n continue\n destroot = root.replace('.overrides', '').lstrip('/')\n outputdir = os.path.join(\n 'cpython',\n 'Doc',\n destroot,\n fname,\n )\n os.system(f'ln -nfs `pwd`/{root}/{fname} {outputdir}')\n\ngettext_compact = False\nlocale_dirs = ['../locales', 'cpython/locales'] # relative to the sourcedir\n\n\n# NOTE: Read the Docs does not support \"multi document output\".\n# So, we put all the documentation as a single file for now.\n_stdauthor = r'Guido van Rossum\\\\and the Python development team'\nlatex_documents = [\n ('contents', 'python-docs-es.tex', u'Documentaci\u00f3n de Python en Espa\u00f1ol',\n _stdauthor, 'manual'),\n]\n\ndef setup(app):\n\n def add_contributing_banner(app, doctree):\n \"\"\"\n Insert a banner at the top of the index.\n\n This way, we can easily communicate people to help with the translation,\n pointing them to different resources.\n \"\"\"\n\n if app.builder.format != 'html':\n # Do not include the banner when building with other formats\n # (this is useful when using -b gettext)\n return\n\n from docutils import nodes, core\n\n message = '\u00a1Ay\u00fadanos a traducir la documentaci\u00f3n oficial de Python al Espa\u00f1ol! ' \\\n f'Puedes encontrar m\u00e1s informaci\u00f3n en `Como contribuir </es/{version}/CONTRIBUTING.html>`_. ' \\\n 'Ayuda a acercar Python a m\u00e1s personas de habla hispana.'\n\n paragraph = core.publish_doctree(message)[0]\n banner = nodes.warning(ids=['contributing-banner'])\n banner.append(paragraph)\n\n for document in doctree.traverse(nodes.document):\n document.insert(0, banner)\n\n # Change the sourcedir programmatically because Read the Docs always call it with `.`\n app.srcdir = 'cpython/Doc'\n\n app.connect('doctree-read', add_contributing_banner)\n", "path": "conf.py"}], "after_files": [{"content": "# Sphinx configuration file.\n#\n# - import original configurations from cpython/Doc/conf.py\n# - append the path considering the cpython submodule is at ./cpython\n# - create the symbolic links under ./cpython/locale/es/LC_MESSAGES\n# - make the build to work under Read the Docs\n#\n# The git submodule was created using this Stack Overflow answer\n# to fetch only the commit that I needed and avoid clonning the whole history\n# https://stackoverflow.com/a/27445058\n#\n# This can be built locally using `sphinx-build` by running\n#\n# $ sphinx-build -b html -d _build/doctrees -D language=es . _build/html\n\nimport sys, os, time\nsys.path.append(os.path.abspath('cpython/Doc/tools/extensions'))\nsys.path.append(os.path.abspath('cpython/Doc/includes'))\n\n# Import all the Sphinx settings from cpython\nsys.path.append(os.path.abspath('cpython/Doc'))\nfrom conf import *\n\n# Call patchlevel with the proper path to get the version from\n# instead of hardcoding it\nimport patchlevel\nversion, release = patchlevel.get_header_version_info(os.path.abspath('cpython/Doc'))\n\nproject = 'Python en Espa\u00f1ol'\ncopyright = '2001-%s, Python Software Foundation' % time.strftime('%Y')\n\nhtml_theme_path = ['cpython/Doc/tools']\ntemplates_path = ['cpython/Doc/tools/templates']\nhtml_static_path = ['cpython/Doc/tools/static']\n\nos.system('mkdir -p cpython/locales/es/')\nos.system('ln -nfs `pwd` cpython/locales/es/LC_MESSAGES')\n\n\nexclude_patterns = [\n # This file is not included and it not marked as :orphan:\n 'distutils/_setuptools_disclaimer.rst',\n 'README.rst',\n]\n\nif not os.environ.get('SPHINX_GETTEXT') == 'True':\n # Override all the files from ``.overrides`` directory\n import glob\n for root, dirs, files in os.walk('.overrides'):\n for fname in files:\n if fname == 'README.rst' and root == '.overrides':\n continue\n destroot = root.replace('.overrides', '').lstrip('/')\n outputdir = os.path.join(\n 'cpython',\n 'Doc',\n destroot,\n fname,\n )\n os.system(f'ln -nfs `pwd`/{root}/{fname} {outputdir}')\n\ngettext_compact = False\nlocale_dirs = ['../locales', 'cpython/locales'] # relative to the sourcedir\n\n\n# NOTE: Read the Docs does not support \"multi document output\".\n# So, we put all the documentation as a single file for now.\n_stdauthor = r'Guido van Rossum\\\\and the Python development team'\nlatex_documents = [\n ('contents', 'python-docs-es.tex', u'Documentaci\u00f3n de Python en Espa\u00f1ol',\n _stdauthor, 'manual'),\n]\n\ndef setup(app):\n\n def add_contributing_banner(app, doctree):\n \"\"\"\n Insert a banner at the top of the index.\n\n This way, we can easily communicate people to help with the translation,\n pointing them to different resources.\n \"\"\"\n\n if app.builder.format != 'html':\n # Do not include the banner when building with other formats\n # (this is useful when using -b gettext)\n return\n\n from docutils import nodes, core\n\n message = '\u00a1Ay\u00fadanos a traducir la documentaci\u00f3n oficial de Python al Espa\u00f1ol! ' \\\n f'Puedes encontrar m\u00e1s informaci\u00f3n en `Como contribuir </es/{version}/CONTRIBUTING.html>`_. ' \\\n 'Ayuda a acercar Python a m\u00e1s personas de habla hispana.'\n\n paragraph = core.publish_doctree(message)[0]\n banner = nodes.warning(ids=['contributing-banner'])\n banner.append(paragraph)\n\n for document in doctree.traverse(nodes.document):\n document.insert(0, banner)\n\n # Change the sourcedir programmatically because Read the Docs always call it with `.`\n app.srcdir = 'cpython/Doc'\n\n app.connect('doctree-read', add_contributing_banner)\n", "path": "conf.py"}]} | 1,395 | 237 |
gh_patches_debug_30419 | rasdani/github-patches | git_diff | uccser__cs-unplugged-731 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Implement automatic update of .po file
The .po file needs to be updated with all static strings for translation, by running the `python manage.py makemessages`. This needs to be run upon any change to templates or database content.
This process should be automated on Travis to run when any such files are updated on `develop`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_complete_translations.py`
Content:
```
1 """Script to print list of file paths of all completely translated files for a given language."""
2
3 import os
4 import argparse
5
6 from crowdin_bot import api
7
8 SOURCE_LANGUAGE = "en"
9
10 def get_language_info(language):
11 """Get xml tree from language info api call.
12
13 Args:
14 language: (str) crowdin language code
15
16 Returns:
17 lxml.etree object
18 """
19 return api.api_call_xml(
20 "language-status",
21 language=language
22 )
23
24 def process_item(item, parent_path=None, csu_language_code=None):
25 """Return list of completely translated file paths in a given directory tree node.
26
27 Args:
28 item: (etree.Element): itemm node in language-status xml tree
29 (see https://support.crowdin.com/api/language-status/)
30 parent_path: (str) path to the translated file node (None if the current item is
31 the root of the directory tree).
32 csu_language_code: (str) Language code (in locale format) on CSU end
33 (may differ from crowdin language code according to language mapping
34 in yaml file)
35
36 Returns:
37 (list) list of file paths that are completely translated
38 """
39 if item.find("node_type").text == "file":
40 filename = item.find("name").text
41 if parent_path:
42 path = os.path.join(parent_path, filename)
43 else:
44 path = filename
45
46 # Skip full translated check for *.po - they can always be included
47 if filename.endswith(".po"):
48 return [path]
49
50 if item.find("phrases").text == item.find("approved").text:
51 return [path]
52 else:
53 return []
54
55 else:
56 inner_nodes = item.find("files")
57 dirname = item.find("name").text
58 if dirname == SOURCE_LANGUAGE:
59 dirname = csu_language_code
60 if parent_path:
61 path = os.path.join(parent_path, dirname)
62 else:
63 path = dirname
64 completed = []
65 for inner_node in inner_nodes:
66 completed += process_item(inner_node, parent_path=path, csu_language_code=csu_language_code)
67 return completed
68
69
70 if __name__ == "__main__":
71 parser = argparse.ArgumentParser()
72 parser.add_argument('--crowdin-code', required=True,
73 help='Crowdin language code for target language')
74 parser.add_argument('--csu-code', required=True,
75 help='CSU language code for target language')
76 args = parser.parse_args()
77 lang_info = get_language_info(args.crowdin_code)
78 files = lang_info.find("files")
79 completed = []
80 for item in files:
81 completed += process_item(item, csu_language_code=args.csu_code)
82 print('\n'.join(completed))
83
```
Path: `infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_crowdin_languages.py`
Content:
```
1 """Script to print list of all crowdin language codes for project."""
2
3 from crowdin_bot import api
4
5 NS_DICT = {
6 'ns': "urn:oasis:names:tc:xliff:document:1.2"
7 }
8
9 def get_project_languages():
10 """Get list of crowdin language codes.
11
12 Returns:
13 (list) list of project crowdin language codes
14 """
15 info_xml = api.api_call_xml("info")
16 languages = info_xml.find('languages')
17 translatable_languages = []
18 for language in languages:
19 # Check it's not the incontext pseudo language
20 if language.find("can_translate").text == "1":
21 translatable_languages.append(language.find('code').text)
22 return translatable_languages
23
24 if __name__ == "__main__":
25 print('\n'.join(get_project_languages()))
26
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_complete_translations.py b/infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_complete_translations.py
--- a/infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_complete_translations.py
+++ b/infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_complete_translations.py
@@ -43,9 +43,9 @@
else:
path = filename
- # Skip full translated check for *.po - they can always be included
+ # Skip *.po - they are handled separately
if filename.endswith(".po"):
- return [path]
+ return []
if item.find("phrases").text == item.find("approved").text:
return [path]
@@ -79,4 +79,5 @@
completed = []
for item in files:
completed += process_item(item, csu_language_code=args.csu_code)
- print('\n'.join(completed))
+ for path in completed:
+ print(path)
diff --git a/infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_crowdin_languages.py b/infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_crowdin_languages.py
--- a/infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_crowdin_languages.py
+++ b/infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_crowdin_languages.py
@@ -12,14 +12,14 @@
Returns:
(list) list of project crowdin language codes
"""
- info_xml = api.api_call_xml("info")
- languages = info_xml.find('languages')
- translatable_languages = []
- for language in languages:
- # Check it's not the incontext pseudo language
- if language.find("can_translate").text == "1":
- translatable_languages.append(language.find('code').text)
- return translatable_languages
+ active_languages = []
+ trans_status = api.api_call_json("status")
+ for language in trans_status:
+ # Check language has actually had some translation done
+ if int(language["words_approved"]) > 0:
+ active_languages.append(language["code"])
+ return active_languages
if __name__ == "__main__":
- print('\n'.join(get_project_languages()))
+ for language in get_project_languages():
+ print(language)
| {"golden_diff": "diff --git a/infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_complete_translations.py b/infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_complete_translations.py\n--- a/infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_complete_translations.py\n+++ b/infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_complete_translations.py\n@@ -43,9 +43,9 @@\n else:\n path = filename\n \n- # Skip full translated check for *.po - they can always be included\n+ # Skip *.po - they are handled separately\n if filename.endswith(\".po\"):\n- return [path]\n+ return []\n \n if item.find(\"phrases\").text == item.find(\"approved\").text:\n return [path]\n@@ -79,4 +79,5 @@\n completed = []\n for item in files:\n completed += process_item(item, csu_language_code=args.csu_code)\n- print('\\n'.join(completed))\n+ for path in completed:\n+ print(path)\ndiff --git a/infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_crowdin_languages.py b/infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_crowdin_languages.py\n--- a/infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_crowdin_languages.py\n+++ b/infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_crowdin_languages.py\n@@ -12,14 +12,14 @@\n Returns:\n (list) list of project crowdin language codes\n \"\"\"\n- info_xml = api.api_call_xml(\"info\")\n- languages = info_xml.find('languages')\n- translatable_languages = []\n- for language in languages:\n- # Check it's not the incontext pseudo language\n- if language.find(\"can_translate\").text == \"1\":\n- translatable_languages.append(language.find('code').text)\n- return translatable_languages\n+ active_languages = []\n+ trans_status = api.api_call_json(\"status\")\n+ for language in trans_status:\n+ # Check language has actually had some translation done\n+ if int(language[\"words_approved\"]) > 0:\n+ active_languages.append(language[\"code\"])\n+ return active_languages\n \n if __name__ == \"__main__\":\n- print('\\n'.join(get_project_languages()))\n+ for language in get_project_languages():\n+ print(language)\n", "issue": "Implement automatic update of .po file \nThe .po file needs to be updated with all static strings for translation, by running the `python manage.py makemessages`. This needs to be run upon any change to templates or database content.\r\n\r\nThis process should be automated on Travis to run when any such files are updated on `develop`.\n", "before_files": [{"content": "\"\"\"Script to print list of file paths of all completely translated files for a given language.\"\"\"\n\nimport os\nimport argparse\n\nfrom crowdin_bot import api\n\nSOURCE_LANGUAGE = \"en\"\n\ndef get_language_info(language):\n \"\"\"Get xml tree from language info api call.\n\n Args:\n language: (str) crowdin language code\n\n Returns:\n lxml.etree object\n \"\"\"\n return api.api_call_xml(\n \"language-status\",\n language=language\n )\n\ndef process_item(item, parent_path=None, csu_language_code=None):\n \"\"\"Return list of completely translated file paths in a given directory tree node.\n\n Args:\n item: (etree.Element): itemm node in language-status xml tree\n (see https://support.crowdin.com/api/language-status/)\n parent_path: (str) path to the translated file node (None if the current item is\n the root of the directory tree).\n csu_language_code: (str) Language code (in locale format) on CSU end\n (may differ from crowdin language code according to language mapping\n in yaml file)\n\n Returns:\n (list) list of file paths that are completely translated\n \"\"\"\n if item.find(\"node_type\").text == \"file\":\n filename = item.find(\"name\").text\n if parent_path:\n path = os.path.join(parent_path, filename)\n else:\n path = filename\n\n # Skip full translated check for *.po - they can always be included\n if filename.endswith(\".po\"):\n return [path]\n\n if item.find(\"phrases\").text == item.find(\"approved\").text:\n return [path]\n else:\n return []\n\n else:\n inner_nodes = item.find(\"files\")\n dirname = item.find(\"name\").text\n if dirname == SOURCE_LANGUAGE:\n dirname = csu_language_code\n if parent_path:\n path = os.path.join(parent_path, dirname)\n else:\n path = dirname\n completed = []\n for inner_node in inner_nodes:\n completed += process_item(inner_node, parent_path=path, csu_language_code=csu_language_code)\n return completed\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser()\n parser.add_argument('--crowdin-code', required=True,\n help='Crowdin language code for target language')\n parser.add_argument('--csu-code', required=True,\n help='CSU language code for target language')\n args = parser.parse_args()\n lang_info = get_language_info(args.crowdin_code)\n files = lang_info.find(\"files\")\n completed = []\n for item in files:\n completed += process_item(item, csu_language_code=args.csu_code)\n print('\\n'.join(completed))\n", "path": "infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_complete_translations.py"}, {"content": "\"\"\"Script to print list of all crowdin language codes for project.\"\"\"\n\nfrom crowdin_bot import api\n\nNS_DICT = {\n 'ns': \"urn:oasis:names:tc:xliff:document:1.2\"\n}\n\ndef get_project_languages():\n \"\"\"Get list of crowdin language codes.\n\n Returns:\n (list) list of project crowdin language codes\n \"\"\"\n info_xml = api.api_call_xml(\"info\")\n languages = info_xml.find('languages')\n translatable_languages = []\n for language in languages:\n # Check it's not the incontext pseudo language\n if language.find(\"can_translate\").text == \"1\":\n translatable_languages.append(language.find('code').text)\n return translatable_languages\n\nif __name__ == \"__main__\":\n print('\\n'.join(get_project_languages()))\n", "path": "infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_crowdin_languages.py"}], "after_files": [{"content": "\"\"\"Script to print list of file paths of all completely translated files for a given language.\"\"\"\n\nimport os\nimport argparse\n\nfrom crowdin_bot import api\n\nSOURCE_LANGUAGE = \"en\"\n\ndef get_language_info(language):\n \"\"\"Get xml tree from language info api call.\n\n Args:\n language: (str) crowdin language code\n\n Returns:\n lxml.etree object\n \"\"\"\n return api.api_call_xml(\n \"language-status\",\n language=language\n )\n\ndef process_item(item, parent_path=None, csu_language_code=None):\n \"\"\"Return list of completely translated file paths in a given directory tree node.\n\n Args:\n item: (etree.Element): itemm node in language-status xml tree\n (see https://support.crowdin.com/api/language-status/)\n parent_path: (str) path to the translated file node (None if the current item is\n the root of the directory tree).\n csu_language_code: (str) Language code (in locale format) on CSU end\n (may differ from crowdin language code according to language mapping\n in yaml file)\n\n Returns:\n (list) list of file paths that are completely translated\n \"\"\"\n if item.find(\"node_type\").text == \"file\":\n filename = item.find(\"name\").text\n if parent_path:\n path = os.path.join(parent_path, filename)\n else:\n path = filename\n\n # Skip *.po - they are handled separately\n if filename.endswith(\".po\"):\n return []\n\n if item.find(\"phrases\").text == item.find(\"approved\").text:\n return [path]\n else:\n return []\n\n else:\n inner_nodes = item.find(\"files\")\n dirname = item.find(\"name\").text\n if dirname == SOURCE_LANGUAGE:\n dirname = csu_language_code\n if parent_path:\n path = os.path.join(parent_path, dirname)\n else:\n path = dirname\n completed = []\n for inner_node in inner_nodes:\n completed += process_item(inner_node, parent_path=path, csu_language_code=csu_language_code)\n return completed\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser()\n parser.add_argument('--crowdin-code', required=True,\n help='Crowdin language code for target language')\n parser.add_argument('--csu-code', required=True,\n help='CSU language code for target language')\n args = parser.parse_args()\n lang_info = get_language_info(args.crowdin_code)\n files = lang_info.find(\"files\")\n completed = []\n for item in files:\n completed += process_item(item, csu_language_code=args.csu_code)\n for path in completed:\n print(path)\n", "path": "infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_complete_translations.py"}, {"content": "\"\"\"Script to print list of all crowdin language codes for project.\"\"\"\n\nfrom crowdin_bot import api\n\nNS_DICT = {\n 'ns': \"urn:oasis:names:tc:xliff:document:1.2\"\n}\n\ndef get_project_languages():\n \"\"\"Get list of crowdin language codes.\n\n Returns:\n (list) list of project crowdin language codes\n \"\"\"\n active_languages = []\n trans_status = api.api_call_json(\"status\")\n for language in trans_status:\n # Check language has actually had some translation done\n if int(language[\"words_approved\"]) > 0:\n active_languages.append(language[\"code\"])\n return active_languages\n\nif __name__ == \"__main__\":\n for language in get_project_languages():\n print(language)\n", "path": "infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_crowdin_languages.py"}]} | 1,341 | 558 |
gh_patches_debug_3174 | rasdani/github-patches | git_diff | pre-commit__pre-commit-1614 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Can't use ruby 2.7.1 on MacOS
Hi,
Bumping my ruby hooks to version 2.7.1 worked fine for me on Ubuntu but doesn't work for my colleagues using MacOS, is there something to do about bumping rbenv archives ?
Thanks
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pre_commit/make_archives.py`
Content:
```
1 import argparse
2 import os.path
3 import tarfile
4 from typing import Optional
5 from typing import Sequence
6
7 from pre_commit import output
8 from pre_commit.util import cmd_output_b
9 from pre_commit.util import rmtree
10 from pre_commit.util import tmpdir
11
12
13 # This is a script for generating the tarred resources for git repo
14 # dependencies. Currently it's just for "vendoring" ruby support packages.
15
16
17 REPOS = (
18 ('rbenv', 'git://github.com/rbenv/rbenv', 'a3fa9b7'),
19 ('ruby-build', 'git://github.com/rbenv/ruby-build', '1a902f3'),
20 (
21 'ruby-download',
22 'git://github.com/garnieretienne/rvm-download',
23 '09bd7c6',
24 ),
25 )
26
27
28 def make_archive(name: str, repo: str, ref: str, destdir: str) -> str:
29 """Makes an archive of a repository in the given destdir.
30
31 :param text name: Name to give the archive. For instance foo. The file
32 that is created will be called foo.tar.gz.
33 :param text repo: Repository to clone.
34 :param text ref: Tag/SHA/branch to check out.
35 :param text destdir: Directory to place archives in.
36 """
37 output_path = os.path.join(destdir, f'{name}.tar.gz')
38 with tmpdir() as tempdir:
39 # Clone the repository to the temporary directory
40 cmd_output_b('git', 'clone', repo, tempdir)
41 cmd_output_b('git', 'checkout', ref, cwd=tempdir)
42
43 # We don't want the '.git' directory
44 # It adds a bunch of size to the archive and we don't use it at
45 # runtime
46 rmtree(os.path.join(tempdir, '.git'))
47
48 with tarfile.open(output_path, 'w|gz') as tf:
49 tf.add(tempdir, name)
50
51 return output_path
52
53
54 def main(argv: Optional[Sequence[str]] = None) -> int:
55 parser = argparse.ArgumentParser()
56 parser.add_argument('--dest', default='pre_commit/resources')
57 args = parser.parse_args(argv)
58 for archive_name, repo, ref in REPOS:
59 output.write_line(f'Making {archive_name}.tar.gz for {repo}@{ref}')
60 make_archive(archive_name, repo, ref, args.dest)
61 return 0
62
63
64 if __name__ == '__main__':
65 exit(main())
66
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pre_commit/make_archives.py b/pre_commit/make_archives.py
--- a/pre_commit/make_archives.py
+++ b/pre_commit/make_archives.py
@@ -15,8 +15,8 @@
REPOS = (
- ('rbenv', 'git://github.com/rbenv/rbenv', 'a3fa9b7'),
- ('ruby-build', 'git://github.com/rbenv/ruby-build', '1a902f3'),
+ ('rbenv', 'git://github.com/rbenv/rbenv', '0843745'),
+ ('ruby-build', 'git://github.com/rbenv/ruby-build', '258455e'),
(
'ruby-download',
'git://github.com/garnieretienne/rvm-download',
| {"golden_diff": "diff --git a/pre_commit/make_archives.py b/pre_commit/make_archives.py\n--- a/pre_commit/make_archives.py\n+++ b/pre_commit/make_archives.py\n@@ -15,8 +15,8 @@\n \n \n REPOS = (\n- ('rbenv', 'git://github.com/rbenv/rbenv', 'a3fa9b7'),\n- ('ruby-build', 'git://github.com/rbenv/ruby-build', '1a902f3'),\n+ ('rbenv', 'git://github.com/rbenv/rbenv', '0843745'),\n+ ('ruby-build', 'git://github.com/rbenv/ruby-build', '258455e'),\n (\n 'ruby-download',\n 'git://github.com/garnieretienne/rvm-download',\n", "issue": "Can't use ruby 2.7.1 on MacOS\nHi, \r\n\r\nBumping my ruby hooks to version 2.7.1 worked fine for me on Ubuntu but doesn't work for my colleagues using MacOS, is there something to do about bumping rbenv archives ? \r\n\r\nThanks\n", "before_files": [{"content": "import argparse\nimport os.path\nimport tarfile\nfrom typing import Optional\nfrom typing import Sequence\n\nfrom pre_commit import output\nfrom pre_commit.util import cmd_output_b\nfrom pre_commit.util import rmtree\nfrom pre_commit.util import tmpdir\n\n\n# This is a script for generating the tarred resources for git repo\n# dependencies. Currently it's just for \"vendoring\" ruby support packages.\n\n\nREPOS = (\n ('rbenv', 'git://github.com/rbenv/rbenv', 'a3fa9b7'),\n ('ruby-build', 'git://github.com/rbenv/ruby-build', '1a902f3'),\n (\n 'ruby-download',\n 'git://github.com/garnieretienne/rvm-download',\n '09bd7c6',\n ),\n)\n\n\ndef make_archive(name: str, repo: str, ref: str, destdir: str) -> str:\n \"\"\"Makes an archive of a repository in the given destdir.\n\n :param text name: Name to give the archive. For instance foo. The file\n that is created will be called foo.tar.gz.\n :param text repo: Repository to clone.\n :param text ref: Tag/SHA/branch to check out.\n :param text destdir: Directory to place archives in.\n \"\"\"\n output_path = os.path.join(destdir, f'{name}.tar.gz')\n with tmpdir() as tempdir:\n # Clone the repository to the temporary directory\n cmd_output_b('git', 'clone', repo, tempdir)\n cmd_output_b('git', 'checkout', ref, cwd=tempdir)\n\n # We don't want the '.git' directory\n # It adds a bunch of size to the archive and we don't use it at\n # runtime\n rmtree(os.path.join(tempdir, '.git'))\n\n with tarfile.open(output_path, 'w|gz') as tf:\n tf.add(tempdir, name)\n\n return output_path\n\n\ndef main(argv: Optional[Sequence[str]] = None) -> int:\n parser = argparse.ArgumentParser()\n parser.add_argument('--dest', default='pre_commit/resources')\n args = parser.parse_args(argv)\n for archive_name, repo, ref in REPOS:\n output.write_line(f'Making {archive_name}.tar.gz for {repo}@{ref}')\n make_archive(archive_name, repo, ref, args.dest)\n return 0\n\n\nif __name__ == '__main__':\n exit(main())\n", "path": "pre_commit/make_archives.py"}], "after_files": [{"content": "import argparse\nimport os.path\nimport tarfile\nfrom typing import Optional\nfrom typing import Sequence\n\nfrom pre_commit import output\nfrom pre_commit.util import cmd_output_b\nfrom pre_commit.util import rmtree\nfrom pre_commit.util import tmpdir\n\n\n# This is a script for generating the tarred resources for git repo\n# dependencies. Currently it's just for \"vendoring\" ruby support packages.\n\n\nREPOS = (\n ('rbenv', 'git://github.com/rbenv/rbenv', '0843745'),\n ('ruby-build', 'git://github.com/rbenv/ruby-build', '258455e'),\n (\n 'ruby-download',\n 'git://github.com/garnieretienne/rvm-download',\n '09bd7c6',\n ),\n)\n\n\ndef make_archive(name: str, repo: str, ref: str, destdir: str) -> str:\n \"\"\"Makes an archive of a repository in the given destdir.\n\n :param text name: Name to give the archive. For instance foo. The file\n that is created will be called foo.tar.gz.\n :param text repo: Repository to clone.\n :param text ref: Tag/SHA/branch to check out.\n :param text destdir: Directory to place archives in.\n \"\"\"\n output_path = os.path.join(destdir, f'{name}.tar.gz')\n with tmpdir() as tempdir:\n # Clone the repository to the temporary directory\n cmd_output_b('git', 'clone', repo, tempdir)\n cmd_output_b('git', 'checkout', ref, cwd=tempdir)\n\n # We don't want the '.git' directory\n # It adds a bunch of size to the archive and we don't use it at\n # runtime\n rmtree(os.path.join(tempdir, '.git'))\n\n with tarfile.open(output_path, 'w|gz') as tf:\n tf.add(tempdir, name)\n\n return output_path\n\n\ndef main(argv: Optional[Sequence[str]] = None) -> int:\n parser = argparse.ArgumentParser()\n parser.add_argument('--dest', default='pre_commit/resources')\n args = parser.parse_args(argv)\n for archive_name, repo, ref in REPOS:\n output.write_line(f'Making {archive_name}.tar.gz for {repo}@{ref}')\n make_archive(archive_name, repo, ref, args.dest)\n return 0\n\n\nif __name__ == '__main__':\n exit(main())\n", "path": "pre_commit/make_archives.py"}]} | 994 | 186 |
gh_patches_debug_61831 | rasdani/github-patches | git_diff | pulp__pulpcore-3411 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
0077_move_remote_url_credentials.py fails on Remotes that have @ in path, not netloc
**Version**
3.18.10
**Describe the bug**
Migration 0077 fails when you have a remote that has an @ somewhere in the path
```
Applying core.0077_move_remote_url_credentials...Traceback (most recent call last):
File "/usr/bin/pulpcore-manager", line 33, in <module>
sys.exit(load_entry_point('pulpcore==3.18.10', 'console_scripts', 'pulpcore-manager')())
File "/usr/lib/python3.9/site-packages/pulpcore/app/manage.py", line 11, in manage
execute_from_command_line(sys.argv)
File "/usr/lib/python3.9/site-packages/django/core/management/__init__.py", line 419, in execute_from_command_line
utility.execute()
File "/usr/lib/python3.9/site-packages/django/core/management/__init__.py", line 413, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/usr/lib/python3.9/site-packages/django/core/management/base.py", line 354, in run_from_argv
self.execute(*args, **cmd_options)
File "/usr/lib/python3.9/site-packages/django/core/management/base.py", line 398, in execute
output = self.handle(*args, **options)
File "/usr/lib/python3.9/site-packages/django/core/management/base.py", line 89, in wrapped
res = handle_func(*args, **kwargs)
File "/usr/lib/python3.9/site-packages/django/core/management/commands/migrate.py", line 244, in handle
post_migrate_state = executor.migrate(
File "/usr/lib/python3.9/site-packages/django/db/migrations/executor.py", line 117, in migrate
state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)
File "/usr/lib/python3.9/site-packages/django/db/migrations/executor.py", line 147, in _migrate_all_forwards
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
File "/usr/lib/python3.9/site-packages/django/db/migrations/executor.py", line 227, in apply_migration
state = migration.apply(state, schema_editor)
File "/usr/lib/python3.9/site-packages/django/db/migrations/migration.py", line 126, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
File "/usr/lib/python3.9/site-packages/django/db/migrations/operations/special.py", line 190, in database_forwards
self.code(from_state.apps, schema_editor)
File "/usr/lib/python3.9/site-packages/pulpcore/app/migrations/0077_move_remote_url_credentials.py", line 19, in move_remote_url_credentials
_, url_split = url.netloc.rsplit("@", maxsplit=1)
ValueError: not enough values to unpack (expected 2, got 1)
```
**To Reproduce**
Steps to reproduce the behavior:
* Have a remote `https://download.copr.fedorainfracloud.org/results/@caddy/caddy/epel-8-x86_64/`
* Try to migrate 0077
**Expected behavior**
migration aplies
**Additional context**
https://community.theforeman.org/t/foreman-3-3-katello-4-5-upgrade-failed-pulpcore-manager-migrate-noinput/31088
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pulpcore/app/migrations/0077_move_remote_url_credentials.py`
Content:
```
1 # Generated by Django 3.2.6 on 2021-09-29 14:00
2
3 from urllib.parse import urlparse, urlunparse
4
5 from django.db import migrations
6
7
8 def move_remote_url_credentials(apps, schema_editor):
9 Remote = apps.get_model("core", "Remote")
10
11 for remote in Remote.objects.filter(url__contains="@").iterator():
12 url = urlparse(remote.url)
13
14 if not remote.username:
15 remote.username = url.username
16 if not remote.password:
17 remote.password = url.password
18
19 _, url_split = url.netloc.rsplit("@", maxsplit=1)
20 remote.url = urlunparse(url._replace(netloc=url_split))
21 remote.save()
22
23
24 class Migration(migrations.Migration):
25
26 dependencies = [
27 ('core', '0076_remove_reserved_resource'),
28 ]
29
30 operations = [
31 migrations.RunPython(
32 code=move_remote_url_credentials,
33 reverse_code=migrations.RunPython.noop,
34 elidable=True,
35 )
36 ]
37
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pulpcore/app/migrations/0077_move_remote_url_credentials.py b/pulpcore/app/migrations/0077_move_remote_url_credentials.py
--- a/pulpcore/app/migrations/0077_move_remote_url_credentials.py
+++ b/pulpcore/app/migrations/0077_move_remote_url_credentials.py
@@ -11,6 +11,11 @@
for remote in Remote.objects.filter(url__contains="@").iterator():
url = urlparse(remote.url)
+ if '@' not in url.netloc:
+ # URLs can have an @ in other places than the netloc,
+ # but those do not indicate credentials
+ continue
+
if not remote.username:
remote.username = url.username
if not remote.password:
| {"golden_diff": "diff --git a/pulpcore/app/migrations/0077_move_remote_url_credentials.py b/pulpcore/app/migrations/0077_move_remote_url_credentials.py\n--- a/pulpcore/app/migrations/0077_move_remote_url_credentials.py\n+++ b/pulpcore/app/migrations/0077_move_remote_url_credentials.py\n@@ -11,6 +11,11 @@\n for remote in Remote.objects.filter(url__contains=\"@\").iterator():\n url = urlparse(remote.url)\n \n+ if '@' not in url.netloc:\n+ # URLs can have an @ in other places than the netloc,\n+ # but those do not indicate credentials\n+ continue\n+\n if not remote.username:\n remote.username = url.username\n if not remote.password:\n", "issue": "0077_move_remote_url_credentials.py fails on Remotes that have @ in path, not netloc\n**Version**\r\n3.18.10\r\n\r\n**Describe the bug**\r\nMigration 0077 fails when you have a remote that has an @ somewhere in the path\r\n\r\n```\r\n Applying core.0077_move_remote_url_credentials...Traceback (most recent call last):\r\n File \"/usr/bin/pulpcore-manager\", line 33, in <module>\r\n sys.exit(load_entry_point('pulpcore==3.18.10', 'console_scripts', 'pulpcore-manager')())\r\n File \"/usr/lib/python3.9/site-packages/pulpcore/app/manage.py\", line 11, in manage\r\n execute_from_command_line(sys.argv)\r\n File \"/usr/lib/python3.9/site-packages/django/core/management/__init__.py\", line 419, in execute_from_command_line\r\n utility.execute()\r\n File \"/usr/lib/python3.9/site-packages/django/core/management/__init__.py\", line 413, in execute\r\n self.fetch_command(subcommand).run_from_argv(self.argv)\r\n File \"/usr/lib/python3.9/site-packages/django/core/management/base.py\", line 354, in run_from_argv\r\n self.execute(*args, **cmd_options)\r\n File \"/usr/lib/python3.9/site-packages/django/core/management/base.py\", line 398, in execute\r\n output = self.handle(*args, **options)\r\n File \"/usr/lib/python3.9/site-packages/django/core/management/base.py\", line 89, in wrapped\r\n res = handle_func(*args, **kwargs)\r\n File \"/usr/lib/python3.9/site-packages/django/core/management/commands/migrate.py\", line 244, in handle\r\n post_migrate_state = executor.migrate(\r\n File \"/usr/lib/python3.9/site-packages/django/db/migrations/executor.py\", line 117, in migrate\r\n state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)\r\n File \"/usr/lib/python3.9/site-packages/django/db/migrations/executor.py\", line 147, in _migrate_all_forwards\r\n state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)\r\n File \"/usr/lib/python3.9/site-packages/django/db/migrations/executor.py\", line 227, in apply_migration\r\n state = migration.apply(state, schema_editor)\r\n File \"/usr/lib/python3.9/site-packages/django/db/migrations/migration.py\", line 126, in apply\r\n operation.database_forwards(self.app_label, schema_editor, old_state, project_state)\r\n File \"/usr/lib/python3.9/site-packages/django/db/migrations/operations/special.py\", line 190, in database_forwards\r\n self.code(from_state.apps, schema_editor)\r\n File \"/usr/lib/python3.9/site-packages/pulpcore/app/migrations/0077_move_remote_url_credentials.py\", line 19, in move_remote_url_credentials\r\n _, url_split = url.netloc.rsplit(\"@\", maxsplit=1)\r\nValueError: not enough values to unpack (expected 2, got 1)\r\n```\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n* Have a remote `https://download.copr.fedorainfracloud.org/results/@caddy/caddy/epel-8-x86_64/`\r\n* Try to migrate 0077\r\n\r\n**Expected behavior**\r\nmigration aplies\r\n\r\n**Additional context**\r\nhttps://community.theforeman.org/t/foreman-3-3-katello-4-5-upgrade-failed-pulpcore-manager-migrate-noinput/31088\r\n\n", "before_files": [{"content": "# Generated by Django 3.2.6 on 2021-09-29 14:00\n\nfrom urllib.parse import urlparse, urlunparse\n\nfrom django.db import migrations\n\n\ndef move_remote_url_credentials(apps, schema_editor):\n Remote = apps.get_model(\"core\", \"Remote\")\n\n for remote in Remote.objects.filter(url__contains=\"@\").iterator():\n url = urlparse(remote.url)\n\n if not remote.username:\n remote.username = url.username\n if not remote.password:\n remote.password = url.password\n\n _, url_split = url.netloc.rsplit(\"@\", maxsplit=1)\n remote.url = urlunparse(url._replace(netloc=url_split))\n remote.save()\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('core', '0076_remove_reserved_resource'),\n ]\n\n operations = [\n migrations.RunPython(\n code=move_remote_url_credentials,\n reverse_code=migrations.RunPython.noop,\n elidable=True,\n )\n ]\n", "path": "pulpcore/app/migrations/0077_move_remote_url_credentials.py"}], "after_files": [{"content": "# Generated by Django 3.2.6 on 2021-09-29 14:00\n\nfrom urllib.parse import urlparse, urlunparse\n\nfrom django.db import migrations\n\n\ndef move_remote_url_credentials(apps, schema_editor):\n Remote = apps.get_model(\"core\", \"Remote\")\n\n for remote in Remote.objects.filter(url__contains=\"@\").iterator():\n url = urlparse(remote.url)\n\n if '@' not in url.netloc:\n # URLs can have an @ in other places than the netloc,\n # but those do not indicate credentials\n continue\n\n if not remote.username:\n remote.username = url.username\n if not remote.password:\n remote.password = url.password\n\n _, url_split = url.netloc.rsplit(\"@\", maxsplit=1)\n remote.url = urlunparse(url._replace(netloc=url_split))\n remote.save()\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('core', '0076_remove_reserved_resource'),\n ]\n\n operations = [\n migrations.RunPython(\n code=move_remote_url_credentials,\n reverse_code=migrations.RunPython.noop,\n elidable=True,\n )\n ]\n", "path": "pulpcore/app/migrations/0077_move_remote_url_credentials.py"}]} | 1,388 | 172 |
gh_patches_debug_3983 | rasdani/github-patches | git_diff | pwndbg__pwndbg-642 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
nextcall with symbol bug
### Description
after nextcall with unknown symbol/address (like nextcall lol) gdb won't run again
### Steps to reproduce
```
gdb whatever
> start
> nextcall lol
> start
> continue
Warning:
Cannot insert breakpoint -46.
Cannot access memory at address 0x7ffff7a6f916
Command aborted.
```
### My setup
GNU gdb (Debian 7.12-6) 7.12.0.20161007
nextcall with symbol bug
### Description
after nextcall with unknown symbol/address (like nextcall lol) gdb won't run again
### Steps to reproduce
```
gdb whatever
> start
> nextcall lol
> start
> continue
Warning:
Cannot insert breakpoint -46.
Cannot access memory at address 0x7ffff7a6f916
Command aborted.
```
### My setup
GNU gdb (Debian 7.12-6) 7.12.0.20161007
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pwndbg/next.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 """
4 Commands for setting temporary breakpoints on the next
5 instruction of some type (call, branch, etc.)
6 """
7 from __future__ import absolute_import
8 from __future__ import division
9 from __future__ import print_function
10 from __future__ import unicode_literals
11
12 import re
13
14 import capstone
15 import gdb
16
17 import pwndbg.disasm
18 import pwndbg.regs
19 from pwndbg.color import message
20
21 jumps = set((
22 capstone.CS_GRP_CALL,
23 capstone.CS_GRP_JUMP,
24 capstone.CS_GRP_RET,
25 capstone.CS_GRP_IRET
26 ))
27
28 interrupts = set((capstone.CS_GRP_INT,))
29
30
31 def next_int(address=None):
32 """
33 If there is a syscall in the current basic black,
34 return the instruction of the one closest to $PC.
35
36 Otherwise, return None.
37 """
38 if address is None:
39 ins = pwndbg.disasm.one(pwndbg.regs.pc)
40 if not ins:
41 return None
42 address = ins.next
43
44 ins = pwndbg.disasm.one(address)
45 while ins:
46 if set(ins.groups) & jumps:
47 return None
48 if set(ins.groups) & interrupts:
49 return ins
50 ins = pwndbg.disasm.one(ins.next)
51
52 return None
53
54
55 def next_branch(address=None):
56 if address is None:
57 ins = pwndbg.disasm.one(pwndbg.regs.pc)
58 if not ins:
59 return None
60 address = ins.next
61
62 ins = pwndbg.disasm.one(address)
63 while ins:
64 if set(ins.groups) & jumps:
65 return ins
66 ins = pwndbg.disasm.one(ins.next)
67
68 return None
69
70
71 def break_next_branch(address=None):
72 ins = next_branch(address)
73
74 if ins:
75 gdb.Breakpoint("*%#x" % ins.address, internal=True, temporary=True)
76 gdb.execute('continue', from_tty=False, to_string=True)
77 return ins
78
79
80 def break_next_interrupt(address=None):
81 ins = next_int(address)
82
83 if ins:
84 gdb.Breakpoint("*%#x" % ins.address, internal=True, temporary=True)
85 gdb.execute('continue', from_tty=False, to_string=True)
86 return ins
87
88
89 def break_next_call(symbol_regex=None):
90 while pwndbg.proc.alive:
91 ins = break_next_branch()
92
93 if not ins:
94 break
95
96 # continue if not a call
97 if capstone.CS_GRP_CALL not in ins.groups:
98 continue
99
100 # return call if we don't search for a symbol
101 if not symbol_regex:
102 return ins
103
104 # return call if we match target address
105 if ins.target_const and re.match('%s$' % symbol_regex, hex(ins.target)):
106 return ins
107
108 # return call if we match symbol name
109 if ins.symbol and re.match('%s$' % symbol_regex, ins.symbol):
110 return ins
111
112
113 def break_next_ret(address=None):
114 while pwndbg.proc.alive:
115 ins = break_next_branch(address)
116
117 if not ins:
118 break
119
120 if capstone.CS_GRP_RET in ins.groups:
121 return ins
122
123
124 def break_on_program_code():
125 """
126 Breaks on next instruction that belongs to process' objfile code.
127 :return: True for success, False when process ended or when pc is at the code.
128 """
129 mp = pwndbg.proc.mem_page
130 start = mp.start
131 end = mp.end
132
133 if start <= pwndbg.regs.pc < end:
134 print(message.error('The pc is already at the binary objfile code. Not stepping.'))
135 return False
136
137 while pwndbg.proc.alive:
138 gdb.execute('si', from_tty=False, to_string=False)
139
140 addr = pwndbg.regs.pc
141 if start <= addr < end:
142 return True
143
144 return False
145
146
147 def break_on_next(address=None):
148 address = address or pwndbg.regs.pc
149 ins = pwndbg.disasm.one(address)
150
151 gdb.Breakpoint("*%#x" % (ins.address + ins.size), temporary=True)
152 gdb.execute('continue', from_tty=False, to_string=True)
153
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pwndbg/next.py b/pwndbg/next.py
--- a/pwndbg/next.py
+++ b/pwndbg/next.py
@@ -27,6 +27,14 @@
interrupts = set((capstone.CS_GRP_INT,))
[email protected]
+def clear_temp_breaks():
+ if not pwndbg.proc.alive:
+ breakpoints = gdb.breakpoints()
+ if breakpoints:
+ for bp in breakpoints:
+ if bp.temporary and not bp.visible: #visible is used instead of internal because older gdb's don't support internal
+ bp.delete()
def next_int(address=None):
"""
| {"golden_diff": "diff --git a/pwndbg/next.py b/pwndbg/next.py\n--- a/pwndbg/next.py\n+++ b/pwndbg/next.py\n@@ -27,6 +27,14 @@\n \n interrupts = set((capstone.CS_GRP_INT,))\n \[email protected]\n+def clear_temp_breaks():\n+ if not pwndbg.proc.alive:\n+ breakpoints = gdb.breakpoints()\n+ if breakpoints:\n+ for bp in breakpoints:\n+ if bp.temporary and not bp.visible: #visible is used instead of internal because older gdb's don't support internal \n+ bp.delete()\n \n def next_int(address=None):\n \"\"\"\n", "issue": "nextcall with symbol bug\n### Description\r\n\r\nafter nextcall with unknown symbol/address (like nextcall lol) gdb won't run again\r\n\r\n### Steps to reproduce\r\n```\r\ngdb whatever\r\n> start\r\n> nextcall lol\r\n> start\r\n> continue\r\nWarning:\r\nCannot insert breakpoint -46.\r\nCannot access memory at address 0x7ffff7a6f916\r\n\r\nCommand aborted.\r\n```\r\n\r\n### My setup\r\n\r\nGNU gdb (Debian 7.12-6) 7.12.0.20161007\nnextcall with symbol bug\n### Description\r\n\r\nafter nextcall with unknown symbol/address (like nextcall lol) gdb won't run again\r\n\r\n### Steps to reproduce\r\n```\r\ngdb whatever\r\n> start\r\n> nextcall lol\r\n> start\r\n> continue\r\nWarning:\r\nCannot insert breakpoint -46.\r\nCannot access memory at address 0x7ffff7a6f916\r\n\r\nCommand aborted.\r\n```\r\n\r\n### My setup\r\n\r\nGNU gdb (Debian 7.12-6) 7.12.0.20161007\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\nCommands for setting temporary breakpoints on the next\ninstruction of some type (call, branch, etc.)\n\"\"\"\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nimport re\n\nimport capstone\nimport gdb\n\nimport pwndbg.disasm\nimport pwndbg.regs\nfrom pwndbg.color import message\n\njumps = set((\n capstone.CS_GRP_CALL,\n capstone.CS_GRP_JUMP,\n capstone.CS_GRP_RET,\n capstone.CS_GRP_IRET\n))\n\ninterrupts = set((capstone.CS_GRP_INT,))\n\n\ndef next_int(address=None):\n \"\"\"\n If there is a syscall in the current basic black,\n return the instruction of the one closest to $PC.\n\n Otherwise, return None.\n \"\"\"\n if address is None:\n ins = pwndbg.disasm.one(pwndbg.regs.pc)\n if not ins:\n return None\n address = ins.next\n\n ins = pwndbg.disasm.one(address)\n while ins:\n if set(ins.groups) & jumps:\n return None\n if set(ins.groups) & interrupts:\n return ins\n ins = pwndbg.disasm.one(ins.next)\n\n return None\n\n\ndef next_branch(address=None):\n if address is None:\n ins = pwndbg.disasm.one(pwndbg.regs.pc)\n if not ins:\n return None\n address = ins.next\n\n ins = pwndbg.disasm.one(address)\n while ins:\n if set(ins.groups) & jumps:\n return ins\n ins = pwndbg.disasm.one(ins.next)\n\n return None\n\n\ndef break_next_branch(address=None):\n ins = next_branch(address)\n\n if ins:\n gdb.Breakpoint(\"*%#x\" % ins.address, internal=True, temporary=True)\n gdb.execute('continue', from_tty=False, to_string=True)\n return ins\n\n\ndef break_next_interrupt(address=None):\n ins = next_int(address)\n\n if ins:\n gdb.Breakpoint(\"*%#x\" % ins.address, internal=True, temporary=True)\n gdb.execute('continue', from_tty=False, to_string=True)\n return ins\n\n\ndef break_next_call(symbol_regex=None):\n while pwndbg.proc.alive:\n ins = break_next_branch()\n\n if not ins:\n break\n\n # continue if not a call\n if capstone.CS_GRP_CALL not in ins.groups:\n continue\n\n # return call if we don't search for a symbol\n if not symbol_regex:\n return ins\n\n # return call if we match target address\n if ins.target_const and re.match('%s$' % symbol_regex, hex(ins.target)):\n return ins\n\n # return call if we match symbol name\n if ins.symbol and re.match('%s$' % symbol_regex, ins.symbol):\n return ins\n\n\ndef break_next_ret(address=None):\n while pwndbg.proc.alive:\n ins = break_next_branch(address)\n\n if not ins:\n break\n\n if capstone.CS_GRP_RET in ins.groups:\n return ins\n\n\ndef break_on_program_code():\n \"\"\"\n Breaks on next instruction that belongs to process' objfile code.\n :return: True for success, False when process ended or when pc is at the code.\n \"\"\"\n mp = pwndbg.proc.mem_page\n start = mp.start\n end = mp.end\n\n if start <= pwndbg.regs.pc < end:\n print(message.error('The pc is already at the binary objfile code. Not stepping.'))\n return False\n\n while pwndbg.proc.alive:\n gdb.execute('si', from_tty=False, to_string=False)\n\n addr = pwndbg.regs.pc\n if start <= addr < end:\n return True\n\n return False\n\n\ndef break_on_next(address=None):\n address = address or pwndbg.regs.pc\n ins = pwndbg.disasm.one(address)\n\n gdb.Breakpoint(\"*%#x\" % (ins.address + ins.size), temporary=True)\n gdb.execute('continue', from_tty=False, to_string=True)\n", "path": "pwndbg/next.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\nCommands for setting temporary breakpoints on the next\ninstruction of some type (call, branch, etc.)\n\"\"\"\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nimport re\n\nimport capstone\nimport gdb\n\nimport pwndbg.disasm\nimport pwndbg.regs\nfrom pwndbg.color import message\n\njumps = set((\n capstone.CS_GRP_CALL,\n capstone.CS_GRP_JUMP,\n capstone.CS_GRP_RET,\n capstone.CS_GRP_IRET\n))\n\ninterrupts = set((capstone.CS_GRP_INT,))\n\[email protected]\ndef clear_temp_breaks():\n if not pwndbg.proc.alive:\n breakpoints = gdb.breakpoints()\n if breakpoints:\n for bp in breakpoints:\n if bp.temporary and not bp.visible: #visible is used instead of internal because older gdb's don't support internal \n bp.delete()\n\ndef next_int(address=None):\n \"\"\"\n If there is a syscall in the current basic black,\n return the instruction of the one closest to $PC.\n\n Otherwise, return None.\n \"\"\"\n if address is None:\n ins = pwndbg.disasm.one(pwndbg.regs.pc)\n if not ins:\n return None\n address = ins.next\n\n ins = pwndbg.disasm.one(address)\n while ins:\n if set(ins.groups) & jumps:\n return None\n if set(ins.groups) & interrupts:\n return ins\n ins = pwndbg.disasm.one(ins.next)\n\n return None\n\n\ndef next_branch(address=None):\n if address is None:\n ins = pwndbg.disasm.one(pwndbg.regs.pc)\n if not ins:\n return None\n address = ins.next\n\n ins = pwndbg.disasm.one(address)\n while ins:\n if set(ins.groups) & jumps:\n return ins\n ins = pwndbg.disasm.one(ins.next)\n\n return None\n\n\ndef break_next_branch(address=None):\n ins = next_branch(address)\n\n if ins:\n gdb.Breakpoint(\"*%#x\" % ins.address, internal=True, temporary=True)\n gdb.execute('continue', from_tty=False, to_string=True)\n return ins\n\n\ndef break_next_interrupt(address=None):\n ins = next_int(address)\n\n if ins:\n gdb.Breakpoint(\"*%#x\" % ins.address, internal=True, temporary=True)\n gdb.execute('continue', from_tty=False, to_string=True)\n return ins\n\n\ndef break_next_call(symbol_regex=None):\n while pwndbg.proc.alive:\n ins = break_next_branch()\n\n if not ins:\n break\n\n # continue if not a call\n if capstone.CS_GRP_CALL not in ins.groups:\n continue\n\n # return call if we don't search for a symbol\n if not symbol_regex:\n return ins\n\n # return call if we match target address\n if ins.target_const and re.match('%s$' % symbol_regex, hex(ins.target)):\n return ins\n\n # return call if we match symbol name\n if ins.symbol and re.match('%s$' % symbol_regex, ins.symbol):\n return ins\n\n\ndef break_next_ret(address=None):\n while pwndbg.proc.alive:\n ins = break_next_branch(address)\n\n if not ins:\n break\n\n if capstone.CS_GRP_RET in ins.groups:\n return ins\n\n\ndef break_on_program_code():\n \"\"\"\n Breaks on next instruction that belongs to process' objfile code.\n :return: True for success, False when process ended or when pc is at the code.\n \"\"\"\n mp = pwndbg.proc.mem_page\n start = mp.start\n end = mp.end\n\n if start <= pwndbg.regs.pc < end:\n print(message.error('The pc is already at the binary objfile code. Not stepping.'))\n return False\n\n while pwndbg.proc.alive:\n gdb.execute('si', from_tty=False, to_string=False)\n\n addr = pwndbg.regs.pc\n if start <= addr < end:\n return True\n\n return False\n\n\ndef break_on_next(address=None):\n address = address or pwndbg.regs.pc\n ins = pwndbg.disasm.one(address)\n\n gdb.Breakpoint(\"*%#x\" % (ins.address + ins.size), temporary=True)\n gdb.execute('continue', from_tty=False, to_string=True)\n", "path": "pwndbg/next.py"}]} | 1,787 | 148 |
gh_patches_debug_28439 | rasdani/github-patches | git_diff | iterative__dvc-10423 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Feature proposal: `dvc artifacts get --show-url`
DVC currently supports `dvc get --show-url` as a way to retrieve just the URL of a DVC-versioned object as opposed to the object itself.
However, there is no equivalent for `dvc artifacts get`. This came as a customer request (to allow easier sharing of results even to people who are not DVC/DVC Studio users). It also has advantages e.g. in model deployment to Sagemaker (which requires the artifact URL on S3).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `dvc/commands/artifacts.py`
Content:
```
1 from dvc.cli import completion, formatter
2 from dvc.cli.command import CmdBaseNoRepo
3 from dvc.cli.utils import DictAction, append_doc_link
4 from dvc.exceptions import DvcException
5 from dvc.log import logger
6
7 logger = logger.getChild(__name__)
8
9
10 class CmdArtifactsGet(CmdBaseNoRepo):
11 def run(self):
12 from dvc.repo.artifacts import Artifacts
13 from dvc.scm import CloneError
14 from dvc.ui import ui
15
16 try:
17 count, out = Artifacts.get(
18 self.args.url,
19 name=self.args.name,
20 version=self.args.rev,
21 stage=self.args.stage,
22 force=self.args.force,
23 config=self.args.config,
24 remote=self.args.remote,
25 remote_config=self.args.remote_config,
26 out=self.args.out,
27 )
28 ui.write(f"Downloaded {count} file(s) to '{out}'")
29 return 0
30 except CloneError:
31 logger.exception("failed to get '%s'", self.args.name)
32 return 1
33 except DvcException:
34 logger.exception(
35 "failed to get '%s' from '%s'", self.args.name, self.args.url
36 )
37 return 1
38
39
40 def add_parser(subparsers, parent_parser):
41 ARTIFACTS_HELP = "DVC model registry artifact commands."
42
43 artifacts_parser = subparsers.add_parser(
44 "artifacts",
45 parents=[parent_parser],
46 description=append_doc_link(ARTIFACTS_HELP, "artifacts"),
47 help=ARTIFACTS_HELP,
48 formatter_class=formatter.RawDescriptionHelpFormatter,
49 )
50 artifacts_subparsers = artifacts_parser.add_subparsers(
51 dest="cmd",
52 help="Use `dvc artifacts CMD --help` to display command-specific help.",
53 required=True,
54 )
55
56 ARTIFACTS_GET_HELP = "Download an artifact from a DVC project."
57 get_parser = artifacts_subparsers.add_parser(
58 "get",
59 parents=[parent_parser],
60 description=append_doc_link(ARTIFACTS_GET_HELP, "artifacts/get"),
61 help=ARTIFACTS_HELP,
62 formatter_class=formatter.RawDescriptionHelpFormatter,
63 )
64 get_parser.add_argument("url", help="Location of DVC repository to download from")
65 get_parser.add_argument(
66 "name", help="Name of artifact in the repository"
67 ).complete = completion.FILE
68 get_parser.add_argument(
69 "--rev",
70 nargs="?",
71 help="Artifact version",
72 metavar="<version>",
73 )
74 get_parser.add_argument(
75 "--stage",
76 nargs="?",
77 help="Artifact stage",
78 metavar="<stage>",
79 )
80 get_parser.add_argument(
81 "-o",
82 "--out",
83 nargs="?",
84 help="Destination path to download artifact to",
85 metavar="<path>",
86 ).complete = completion.DIR
87 get_parser.add_argument(
88 "-j",
89 "--jobs",
90 type=int,
91 help=(
92 "Number of jobs to run simultaneously. "
93 "The default value is 4 * cpu_count(). "
94 ),
95 metavar="<number>",
96 )
97 get_parser.add_argument(
98 "-f",
99 "--force",
100 action="store_true",
101 default=False,
102 help="Override local file or folder if exists.",
103 )
104 get_parser.add_argument(
105 "--config",
106 type=str,
107 help=(
108 "Path to a config file that will be merged with the config "
109 "in the target repository."
110 ),
111 )
112 get_parser.add_argument(
113 "--remote",
114 type=str,
115 help=(
116 "Remote name to set as a default in the target repository "
117 "(only applicable when downloading from DVC remote)."
118 ),
119 )
120 get_parser.add_argument(
121 "--remote-config",
122 type=str,
123 nargs="*",
124 action=DictAction,
125 help=(
126 "Remote config options to merge with a remote's config (default or one "
127 "specified by '--remote') in the target repository (only applicable "
128 "when downloading from DVC remote)."
129 ),
130 )
131 get_parser.set_defaults(func=CmdArtifactsGet)
132
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/dvc/commands/artifacts.py b/dvc/commands/artifacts.py
--- a/dvc/commands/artifacts.py
+++ b/dvc/commands/artifacts.py
@@ -13,6 +13,9 @@
from dvc.scm import CloneError
from dvc.ui import ui
+ if self.args.show_url:
+ return self._show_url()
+
try:
count, out = Artifacts.get(
self.args.url,
@@ -36,6 +39,28 @@
)
return 1
+ def _show_url(self):
+ from dvc.api import artifacts_show, get_url
+ from dvc.ui import ui
+
+ artifact = artifacts_show(
+ self.args.name,
+ version=self.args.rev,
+ stage=self.args.stage,
+ repo=self.args.url,
+ )
+
+ url = get_url(
+ artifact["path"],
+ repo=self.args.url,
+ rev=artifact["rev"],
+ remote=self.args.remote,
+ remote_config=self.args.remote_config,
+ )
+ ui.write(url, force=True)
+
+ return 0
+
def add_parser(subparsers, parent_parser):
ARTIFACTS_HELP = "DVC model registry artifact commands."
@@ -84,6 +109,14 @@
help="Destination path to download artifact to",
metavar="<path>",
).complete = completion.DIR
+ get_parser.add_argument(
+ "--show-url",
+ action="store_true",
+ help=(
+ "Print the storage location (URL) the target data would be "
+ "downloaded from, and exit."
+ ),
+ )
get_parser.add_argument(
"-j",
"--jobs",
| {"golden_diff": "diff --git a/dvc/commands/artifacts.py b/dvc/commands/artifacts.py\n--- a/dvc/commands/artifacts.py\n+++ b/dvc/commands/artifacts.py\n@@ -13,6 +13,9 @@\n from dvc.scm import CloneError\n from dvc.ui import ui\n \n+ if self.args.show_url:\n+ return self._show_url()\n+\n try:\n count, out = Artifacts.get(\n self.args.url,\n@@ -36,6 +39,28 @@\n )\n return 1\n \n+ def _show_url(self):\n+ from dvc.api import artifacts_show, get_url\n+ from dvc.ui import ui\n+\n+ artifact = artifacts_show(\n+ self.args.name,\n+ version=self.args.rev,\n+ stage=self.args.stage,\n+ repo=self.args.url,\n+ )\n+\n+ url = get_url(\n+ artifact[\"path\"],\n+ repo=self.args.url,\n+ rev=artifact[\"rev\"],\n+ remote=self.args.remote,\n+ remote_config=self.args.remote_config,\n+ )\n+ ui.write(url, force=True)\n+\n+ return 0\n+\n \n def add_parser(subparsers, parent_parser):\n ARTIFACTS_HELP = \"DVC model registry artifact commands.\"\n@@ -84,6 +109,14 @@\n help=\"Destination path to download artifact to\",\n metavar=\"<path>\",\n ).complete = completion.DIR\n+ get_parser.add_argument(\n+ \"--show-url\",\n+ action=\"store_true\",\n+ help=(\n+ \"Print the storage location (URL) the target data would be \"\n+ \"downloaded from, and exit.\"\n+ ),\n+ )\n get_parser.add_argument(\n \"-j\",\n \"--jobs\",\n", "issue": "Feature proposal: `dvc artifacts get --show-url`\nDVC currently supports `dvc get --show-url` as a way to retrieve just the URL of a DVC-versioned object as opposed to the object itself.\r\n\r\nHowever, there is no equivalent for `dvc artifacts get`. This came as a customer request (to allow easier sharing of results even to people who are not DVC/DVC Studio users). It also has advantages e.g. in model deployment to Sagemaker (which requires the artifact URL on S3).\n", "before_files": [{"content": "from dvc.cli import completion, formatter\nfrom dvc.cli.command import CmdBaseNoRepo\nfrom dvc.cli.utils import DictAction, append_doc_link\nfrom dvc.exceptions import DvcException\nfrom dvc.log import logger\n\nlogger = logger.getChild(__name__)\n\n\nclass CmdArtifactsGet(CmdBaseNoRepo):\n def run(self):\n from dvc.repo.artifacts import Artifacts\n from dvc.scm import CloneError\n from dvc.ui import ui\n\n try:\n count, out = Artifacts.get(\n self.args.url,\n name=self.args.name,\n version=self.args.rev,\n stage=self.args.stage,\n force=self.args.force,\n config=self.args.config,\n remote=self.args.remote,\n remote_config=self.args.remote_config,\n out=self.args.out,\n )\n ui.write(f\"Downloaded {count} file(s) to '{out}'\")\n return 0\n except CloneError:\n logger.exception(\"failed to get '%s'\", self.args.name)\n return 1\n except DvcException:\n logger.exception(\n \"failed to get '%s' from '%s'\", self.args.name, self.args.url\n )\n return 1\n\n\ndef add_parser(subparsers, parent_parser):\n ARTIFACTS_HELP = \"DVC model registry artifact commands.\"\n\n artifacts_parser = subparsers.add_parser(\n \"artifacts\",\n parents=[parent_parser],\n description=append_doc_link(ARTIFACTS_HELP, \"artifacts\"),\n help=ARTIFACTS_HELP,\n formatter_class=formatter.RawDescriptionHelpFormatter,\n )\n artifacts_subparsers = artifacts_parser.add_subparsers(\n dest=\"cmd\",\n help=\"Use `dvc artifacts CMD --help` to display command-specific help.\",\n required=True,\n )\n\n ARTIFACTS_GET_HELP = \"Download an artifact from a DVC project.\"\n get_parser = artifacts_subparsers.add_parser(\n \"get\",\n parents=[parent_parser],\n description=append_doc_link(ARTIFACTS_GET_HELP, \"artifacts/get\"),\n help=ARTIFACTS_HELP,\n formatter_class=formatter.RawDescriptionHelpFormatter,\n )\n get_parser.add_argument(\"url\", help=\"Location of DVC repository to download from\")\n get_parser.add_argument(\n \"name\", help=\"Name of artifact in the repository\"\n ).complete = completion.FILE\n get_parser.add_argument(\n \"--rev\",\n nargs=\"?\",\n help=\"Artifact version\",\n metavar=\"<version>\",\n )\n get_parser.add_argument(\n \"--stage\",\n nargs=\"?\",\n help=\"Artifact stage\",\n metavar=\"<stage>\",\n )\n get_parser.add_argument(\n \"-o\",\n \"--out\",\n nargs=\"?\",\n help=\"Destination path to download artifact to\",\n metavar=\"<path>\",\n ).complete = completion.DIR\n get_parser.add_argument(\n \"-j\",\n \"--jobs\",\n type=int,\n help=(\n \"Number of jobs to run simultaneously. \"\n \"The default value is 4 * cpu_count(). \"\n ),\n metavar=\"<number>\",\n )\n get_parser.add_argument(\n \"-f\",\n \"--force\",\n action=\"store_true\",\n default=False,\n help=\"Override local file or folder if exists.\",\n )\n get_parser.add_argument(\n \"--config\",\n type=str,\n help=(\n \"Path to a config file that will be merged with the config \"\n \"in the target repository.\"\n ),\n )\n get_parser.add_argument(\n \"--remote\",\n type=str,\n help=(\n \"Remote name to set as a default in the target repository \"\n \"(only applicable when downloading from DVC remote).\"\n ),\n )\n get_parser.add_argument(\n \"--remote-config\",\n type=str,\n nargs=\"*\",\n action=DictAction,\n help=(\n \"Remote config options to merge with a remote's config (default or one \"\n \"specified by '--remote') in the target repository (only applicable \"\n \"when downloading from DVC remote).\"\n ),\n )\n get_parser.set_defaults(func=CmdArtifactsGet)\n", "path": "dvc/commands/artifacts.py"}], "after_files": [{"content": "from dvc.cli import completion, formatter\nfrom dvc.cli.command import CmdBaseNoRepo\nfrom dvc.cli.utils import DictAction, append_doc_link\nfrom dvc.exceptions import DvcException\nfrom dvc.log import logger\n\nlogger = logger.getChild(__name__)\n\n\nclass CmdArtifactsGet(CmdBaseNoRepo):\n def run(self):\n from dvc.repo.artifacts import Artifacts\n from dvc.scm import CloneError\n from dvc.ui import ui\n\n if self.args.show_url:\n return self._show_url()\n\n try:\n count, out = Artifacts.get(\n self.args.url,\n name=self.args.name,\n version=self.args.rev,\n stage=self.args.stage,\n force=self.args.force,\n config=self.args.config,\n remote=self.args.remote,\n remote_config=self.args.remote_config,\n out=self.args.out,\n )\n ui.write(f\"Downloaded {count} file(s) to '{out}'\")\n return 0\n except CloneError:\n logger.exception(\"failed to get '%s'\", self.args.name)\n return 1\n except DvcException:\n logger.exception(\n \"failed to get '%s' from '%s'\", self.args.name, self.args.url\n )\n return 1\n\n def _show_url(self):\n from dvc.api import artifacts_show, get_url\n from dvc.ui import ui\n\n artifact = artifacts_show(\n self.args.name,\n version=self.args.rev,\n stage=self.args.stage,\n repo=self.args.url,\n )\n\n url = get_url(\n artifact[\"path\"],\n repo=self.args.url,\n rev=artifact[\"rev\"],\n remote=self.args.remote,\n remote_config=self.args.remote_config,\n )\n ui.write(url, force=True)\n\n return 0\n\n\ndef add_parser(subparsers, parent_parser):\n ARTIFACTS_HELP = \"DVC model registry artifact commands.\"\n\n artifacts_parser = subparsers.add_parser(\n \"artifacts\",\n parents=[parent_parser],\n description=append_doc_link(ARTIFACTS_HELP, \"artifacts\"),\n help=ARTIFACTS_HELP,\n formatter_class=formatter.RawDescriptionHelpFormatter,\n )\n artifacts_subparsers = artifacts_parser.add_subparsers(\n dest=\"cmd\",\n help=\"Use `dvc artifacts CMD --help` to display command-specific help.\",\n required=True,\n )\n\n ARTIFACTS_GET_HELP = \"Download an artifact from a DVC project.\"\n get_parser = artifacts_subparsers.add_parser(\n \"get\",\n parents=[parent_parser],\n description=append_doc_link(ARTIFACTS_GET_HELP, \"artifacts/get\"),\n help=ARTIFACTS_HELP,\n formatter_class=formatter.RawDescriptionHelpFormatter,\n )\n get_parser.add_argument(\"url\", help=\"Location of DVC repository to download from\")\n get_parser.add_argument(\n \"name\", help=\"Name of artifact in the repository\"\n ).complete = completion.FILE\n get_parser.add_argument(\n \"--rev\",\n nargs=\"?\",\n help=\"Artifact version\",\n metavar=\"<version>\",\n )\n get_parser.add_argument(\n \"--stage\",\n nargs=\"?\",\n help=\"Artifact stage\",\n metavar=\"<stage>\",\n )\n get_parser.add_argument(\n \"-o\",\n \"--out\",\n nargs=\"?\",\n help=\"Destination path to download artifact to\",\n metavar=\"<path>\",\n ).complete = completion.DIR\n get_parser.add_argument(\n \"--show-url\",\n action=\"store_true\",\n help=(\n \"Print the storage location (URL) the target data would be \"\n \"downloaded from, and exit.\"\n ),\n )\n get_parser.add_argument(\n \"-j\",\n \"--jobs\",\n type=int,\n help=(\n \"Number of jobs to run simultaneously. \"\n \"The default value is 4 * cpu_count(). \"\n ),\n metavar=\"<number>\",\n )\n get_parser.add_argument(\n \"-f\",\n \"--force\",\n action=\"store_true\",\n default=False,\n help=\"Override local file or folder if exists.\",\n )\n get_parser.add_argument(\n \"--config\",\n type=str,\n help=(\n \"Path to a config file that will be merged with the config \"\n \"in the target repository.\"\n ),\n )\n get_parser.add_argument(\n \"--remote\",\n type=str,\n help=(\n \"Remote name to set as a default in the target repository \"\n \"(only applicable when downloading from DVC remote).\"\n ),\n )\n get_parser.add_argument(\n \"--remote-config\",\n type=str,\n nargs=\"*\",\n action=DictAction,\n help=(\n \"Remote config options to merge with a remote's config (default or one \"\n \"specified by '--remote') in the target repository (only applicable \"\n \"when downloading from DVC remote).\"\n ),\n )\n get_parser.set_defaults(func=CmdArtifactsGet)\n", "path": "dvc/commands/artifacts.py"}]} | 1,527 | 389 |
gh_patches_debug_23316 | rasdani/github-patches | git_diff | holoviz__panel-4619 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add ability to open custom notebook in Panelite.
The Jupyter lite extension https://github.com/jupyterlab-contrib/jupyterlab-open-url-parameter enables you to open a notebook from an url in Jupyterlite.
This would be really powerful to include in the build of Panelite as we can the start to share links to notebooks that opens quickly for the user with a working environment.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `doc/conf.py`
Content:
```
1 import json
2 import os
3 import pathlib
4
5 import param
6
7 param.parameterized.docstring_signature = False
8 param.parameterized.docstring_describe_params = False
9
10 from nbsite.shared_conf import *
11
12 project = 'Panel'
13 authors = 'Panel contributors'
14 copyright_years['start_year'] = '2019'
15 copyright = copyright_fmt.format(**copyright_years)
16 description = 'High-level dashboarding for python visualization libraries'
17
18 import panel
19
20 from panel.io.convert import BOKEH_VERSION, PY_VERSION
21 from panel.io.resources import CDN_DIST
22
23 PANEL_ROOT = pathlib.Path(panel.__file__).parent
24
25 version = release = base_version(panel.__version__)
26 js_version = json.loads((PANEL_ROOT / 'package.json').read_text())['version']
27
28 # For the interactivity warning box created by nbsite to point to the right
29 # git tag instead of the default i.e. main.
30 os.environ['BRANCH'] = f"v{release}"
31
32 html_static_path += ['_static']
33
34 html_css_files = [
35 'nbsite.css',
36 'css/custom.css',
37 'css/dataframe.css',
38 ]
39
40 html_theme = "pydata_sphinx_theme"
41 html_logo = "_static/logo_horizontal.png"
42 html_favicon = "_static/icons/favicon.ico"
43
44 html_theme_options = {
45 "github_url": "https://github.com/holoviz/panel",
46 "icon_links": [
47 {
48 "name": "Twitter",
49 "url": "https://twitter.com/Panel_Org",
50 "icon": "fab fa-twitter-square",
51 },
52 {
53 "name": "Discourse",
54 "url": "https://discourse.holoviz.org/c/panel/5",
55 "icon": "fab fa-discourse",
56 },
57 ],
58 "footer_items": [
59 "copyright",
60 "last-updated",
61 ],
62 "google_analytics_id": "UA-154795830-2",
63 "pygment_light_style": "material",
64 "pygment_dark_style": "material",
65 "header_links_before_dropdown": 6
66 }
67
68 extensions += [
69 'sphinx.ext.napoleon',
70 'nbsite.gallery',
71 'sphinx_copybutton',
72 'nbsite.pyodide'
73 ]
74 napoleon_numpy_docstring = True
75
76 myst_enable_extensions = ["colon_fence", "deflist"]
77
78 nbsite_gallery_conf = {
79 'github_org': 'holoviz',
80 'github_project': 'panel',
81 'galleries': {
82 'gallery': {
83 'title': 'Gallery',
84 'sections': [
85 {'path': 'demos',
86 'title': 'Demos',
87 'description': 'A set of sophisticated apps built to demonstrate the features of Panel.'},
88 {'path': 'simple',
89 'title': 'Simple Apps',
90 'description': 'Simple example apps meant to provide a quick introduction to Panel.'},
91 {'path': 'layout',
92 'title': 'Layouts',
93 'description': 'How to leverage Panel layout components to achieve complex layouts.'},
94 {'path': 'dynamic',
95 'title': 'Dynamic UIs',
96 'description': ('Examples demonstrating how to build dynamic UIs with components that '
97 'are added or removed interactively.')},
98 {'path': 'streaming',
99 'title': 'Streaming',
100 'description': ('Streaming data to a visual component.')},
101 {'path': 'components',
102 'title': 'Custom components',
103 'description': "Components created using Panel's ReactiveHTML class."},
104 {'path': 'styles',
105 'title': 'Styling & Theming',
106 'description': "Examples demonstrating how to style and theme different components."},
107 {'path': 'external',
108 'title': 'External libraries',
109 'description': 'Wrapping external libraries with Panel.'}
110 ]
111 },
112 'reference': {
113 'title': 'Reference Gallery',
114 'sections': [
115 'panes',
116 'layouts',
117 'templates',
118 'global',
119 'indicators',
120 'widgets',
121 ],
122 'titles': {
123 'Vega': 'Altair & Vega',
124 'DeckGL': 'PyDeck & Deck.gl',
125 'ECharts': 'PyEcharts & ECharts',
126 'IPyWidget': 'ipywidgets'
127 },
128 'normalize_titles': False
129 }
130 },
131 'thumbnail_url': 'https://assets.holoviz.org/panel/thumbnails',
132 'deployment_url': 'https://panel-gallery.pyviz.demo.anaconda.com/'
133 }
134
135 if panel.__version__ != version and (PANEL_ROOT / 'dist' / 'wheels').is_dir():
136 py_version = panel.__version__.replace("-dirty", "")
137 panel_req = f'./wheels/panel-{py_version}-py3-none-any.whl'
138 bokeh_req = f'./wheels/bokeh-{BOKEH_VERSION}-py3-none-any.whl'
139 else:
140 panel_req = f'{CDN_DIST}wheels/panel-{PY_VERSION}-py3-none-any.whl'
141 bokeh_req = f'{CDN_DIST}wheels/bokeh-{BOKEH_VERSION}-py3-none-any.whl'
142
143 nbsite_pyodide_conf = {
144 'requirements': [bokeh_req, panel_req, 'pandas', 'pyodide-http', 'holoviews>=1.16.0a2']
145 }
146
147 templates_path = [
148 '_templates'
149 ]
150
151 html_context.update({
152 "last_release": f"v{release}",
153 "github_user": "holoviz",
154 "github_repo": "panel",
155 "default_mode": "light"
156 })
157
158 nbbuild_patterns_to_take_along = ["simple.html", "*.json", "json_*"]
159
160 # Override the Sphinx default title that appends `documentation`
161 html_title = f'{project} v{version}'
162
163 suppress_warnings = ["myst.header", "ref.myst", "mystnb.unknown_mime_type"]
164
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/doc/conf.py b/doc/conf.py
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -55,10 +55,6 @@
"icon": "fab fa-discourse",
},
],
- "footer_items": [
- "copyright",
- "last-updated",
- ],
"google_analytics_id": "UA-154795830-2",
"pygment_light_style": "material",
"pygment_dark_style": "material",
@@ -111,6 +107,7 @@
},
'reference': {
'title': 'Reference Gallery',
+ 'as_pyodide': True,
'sections': [
'panes',
'layouts',
@@ -129,7 +126,8 @@
}
},
'thumbnail_url': 'https://assets.holoviz.org/panel/thumbnails',
- 'deployment_url': 'https://panel-gallery.pyviz.demo.anaconda.com/'
+ 'deployment_url': 'https://panel-gallery.pyviz.demo.anaconda.com/',
+ 'jupyterlite_url': 'https://panelite.holoviz.org/lab/index.html'
}
if panel.__version__ != version and (PANEL_ROOT / 'dist' / 'wheels').is_dir():
| {"golden_diff": "diff --git a/doc/conf.py b/doc/conf.py\n--- a/doc/conf.py\n+++ b/doc/conf.py\n@@ -55,10 +55,6 @@\n \"icon\": \"fab fa-discourse\",\n },\n ],\n- \"footer_items\": [\n- \"copyright\",\n- \"last-updated\",\n- ],\n \"google_analytics_id\": \"UA-154795830-2\",\n \"pygment_light_style\": \"material\",\n \"pygment_dark_style\": \"material\",\n@@ -111,6 +107,7 @@\n },\n 'reference': {\n 'title': 'Reference Gallery',\n+ 'as_pyodide': True,\n 'sections': [\n 'panes',\n 'layouts',\n@@ -129,7 +126,8 @@\n }\n },\n 'thumbnail_url': 'https://assets.holoviz.org/panel/thumbnails',\n- 'deployment_url': 'https://panel-gallery.pyviz.demo.anaconda.com/'\n+ 'deployment_url': 'https://panel-gallery.pyviz.demo.anaconda.com/',\n+ 'jupyterlite_url': 'https://panelite.holoviz.org/lab/index.html'\n }\n \n if panel.__version__ != version and (PANEL_ROOT / 'dist' / 'wheels').is_dir():\n", "issue": "Add ability to open custom notebook in Panelite.\nThe Jupyter lite extension https://github.com/jupyterlab-contrib/jupyterlab-open-url-parameter enables you to open a notebook from an url in Jupyterlite.\n\nThis would be really powerful to include in the build of Panelite as we can the start to share links to notebooks that opens quickly for the user with a working environment.\n", "before_files": [{"content": "import json\nimport os\nimport pathlib\n\nimport param\n\nparam.parameterized.docstring_signature = False\nparam.parameterized.docstring_describe_params = False\n\nfrom nbsite.shared_conf import *\n\nproject = 'Panel'\nauthors = 'Panel contributors'\ncopyright_years['start_year'] = '2019'\ncopyright = copyright_fmt.format(**copyright_years)\ndescription = 'High-level dashboarding for python visualization libraries'\n\nimport panel\n\nfrom panel.io.convert import BOKEH_VERSION, PY_VERSION\nfrom panel.io.resources import CDN_DIST\n\nPANEL_ROOT = pathlib.Path(panel.__file__).parent\n\nversion = release = base_version(panel.__version__)\njs_version = json.loads((PANEL_ROOT / 'package.json').read_text())['version']\n\n# For the interactivity warning box created by nbsite to point to the right\n# git tag instead of the default i.e. main.\nos.environ['BRANCH'] = f\"v{release}\"\n\nhtml_static_path += ['_static']\n\nhtml_css_files = [\n 'nbsite.css',\n 'css/custom.css',\n 'css/dataframe.css',\n]\n\nhtml_theme = \"pydata_sphinx_theme\"\nhtml_logo = \"_static/logo_horizontal.png\"\nhtml_favicon = \"_static/icons/favicon.ico\"\n\nhtml_theme_options = {\n \"github_url\": \"https://github.com/holoviz/panel\",\n \"icon_links\": [\n {\n \"name\": \"Twitter\",\n \"url\": \"https://twitter.com/Panel_Org\",\n \"icon\": \"fab fa-twitter-square\",\n },\n {\n \"name\": \"Discourse\",\n \"url\": \"https://discourse.holoviz.org/c/panel/5\",\n \"icon\": \"fab fa-discourse\",\n },\n ],\n \"footer_items\": [\n \"copyright\",\n \"last-updated\",\n ],\n \"google_analytics_id\": \"UA-154795830-2\",\n \"pygment_light_style\": \"material\",\n \"pygment_dark_style\": \"material\",\n \"header_links_before_dropdown\": 6\n}\n\nextensions += [\n 'sphinx.ext.napoleon',\n 'nbsite.gallery',\n 'sphinx_copybutton',\n 'nbsite.pyodide'\n]\nnapoleon_numpy_docstring = True\n\nmyst_enable_extensions = [\"colon_fence\", \"deflist\"]\n\nnbsite_gallery_conf = {\n 'github_org': 'holoviz',\n 'github_project': 'panel',\n 'galleries': {\n 'gallery': {\n 'title': 'Gallery',\n 'sections': [\n {'path': 'demos',\n 'title': 'Demos',\n 'description': 'A set of sophisticated apps built to demonstrate the features of Panel.'},\n {'path': 'simple',\n 'title': 'Simple Apps',\n 'description': 'Simple example apps meant to provide a quick introduction to Panel.'},\n {'path': 'layout',\n 'title': 'Layouts',\n 'description': 'How to leverage Panel layout components to achieve complex layouts.'},\n {'path': 'dynamic',\n 'title': 'Dynamic UIs',\n 'description': ('Examples demonstrating how to build dynamic UIs with components that '\n 'are added or removed interactively.')},\n {'path': 'streaming',\n 'title': 'Streaming',\n 'description': ('Streaming data to a visual component.')},\n {'path': 'components',\n 'title': 'Custom components',\n 'description': \"Components created using Panel's ReactiveHTML class.\"},\n {'path': 'styles',\n 'title': 'Styling & Theming',\n 'description': \"Examples demonstrating how to style and theme different components.\"},\n {'path': 'external',\n 'title': 'External libraries',\n 'description': 'Wrapping external libraries with Panel.'}\n ]\n },\n 'reference': {\n 'title': 'Reference Gallery',\n 'sections': [\n 'panes',\n 'layouts',\n 'templates',\n 'global',\n 'indicators',\n 'widgets',\n ],\n 'titles': {\n 'Vega': 'Altair & Vega',\n 'DeckGL': 'PyDeck & Deck.gl',\n 'ECharts': 'PyEcharts & ECharts',\n 'IPyWidget': 'ipywidgets'\n },\n 'normalize_titles': False\n }\n },\n 'thumbnail_url': 'https://assets.holoviz.org/panel/thumbnails',\n 'deployment_url': 'https://panel-gallery.pyviz.demo.anaconda.com/'\n}\n\nif panel.__version__ != version and (PANEL_ROOT / 'dist' / 'wheels').is_dir():\n py_version = panel.__version__.replace(\"-dirty\", \"\")\n panel_req = f'./wheels/panel-{py_version}-py3-none-any.whl'\n bokeh_req = f'./wheels/bokeh-{BOKEH_VERSION}-py3-none-any.whl'\nelse:\n panel_req = f'{CDN_DIST}wheels/panel-{PY_VERSION}-py3-none-any.whl'\n bokeh_req = f'{CDN_DIST}wheels/bokeh-{BOKEH_VERSION}-py3-none-any.whl'\n\nnbsite_pyodide_conf = {\n 'requirements': [bokeh_req, panel_req, 'pandas', 'pyodide-http', 'holoviews>=1.16.0a2']\n}\n\ntemplates_path = [\n '_templates'\n]\n\nhtml_context.update({\n \"last_release\": f\"v{release}\",\n \"github_user\": \"holoviz\",\n \"github_repo\": \"panel\",\n \"default_mode\": \"light\"\n})\n\nnbbuild_patterns_to_take_along = [\"simple.html\", \"*.json\", \"json_*\"]\n\n# Override the Sphinx default title that appends `documentation`\nhtml_title = f'{project} v{version}'\n\nsuppress_warnings = [\"myst.header\", \"ref.myst\", \"mystnb.unknown_mime_type\"]\n", "path": "doc/conf.py"}], "after_files": [{"content": "import json\nimport os\nimport pathlib\n\nimport param\n\nparam.parameterized.docstring_signature = False\nparam.parameterized.docstring_describe_params = False\n\nfrom nbsite.shared_conf import *\n\nproject = 'Panel'\nauthors = 'Panel contributors'\ncopyright_years['start_year'] = '2019'\ncopyright = copyright_fmt.format(**copyright_years)\ndescription = 'High-level dashboarding for python visualization libraries'\n\nimport panel\n\nfrom panel.io.convert import BOKEH_VERSION, PY_VERSION\nfrom panel.io.resources import CDN_DIST\n\nPANEL_ROOT = pathlib.Path(panel.__file__).parent\n\nversion = release = base_version(panel.__version__)\njs_version = json.loads((PANEL_ROOT / 'package.json').read_text())['version']\n\n# For the interactivity warning box created by nbsite to point to the right\n# git tag instead of the default i.e. main.\nos.environ['BRANCH'] = f\"v{release}\"\n\nhtml_static_path += ['_static']\n\nhtml_css_files = [\n 'nbsite.css',\n 'css/custom.css',\n 'css/dataframe.css',\n]\n\nhtml_theme = \"pydata_sphinx_theme\"\nhtml_logo = \"_static/logo_horizontal.png\"\nhtml_favicon = \"_static/icons/favicon.ico\"\n\nhtml_theme_options = {\n \"github_url\": \"https://github.com/holoviz/panel\",\n \"icon_links\": [\n {\n \"name\": \"Twitter\",\n \"url\": \"https://twitter.com/Panel_Org\",\n \"icon\": \"fab fa-twitter-square\",\n },\n {\n \"name\": \"Discourse\",\n \"url\": \"https://discourse.holoviz.org/c/panel/5\",\n \"icon\": \"fab fa-discourse\",\n },\n ],\n \"google_analytics_id\": \"UA-154795830-2\",\n \"pygment_light_style\": \"material\",\n \"pygment_dark_style\": \"material\",\n \"header_links_before_dropdown\": 6\n}\n\nextensions += [\n 'sphinx.ext.napoleon',\n 'nbsite.gallery',\n 'sphinx_copybutton',\n 'nbsite.pyodide'\n]\nnapoleon_numpy_docstring = True\n\nmyst_enable_extensions = [\"colon_fence\", \"deflist\"]\n\nnbsite_gallery_conf = {\n 'github_org': 'holoviz',\n 'github_project': 'panel',\n 'galleries': {\n 'gallery': {\n 'title': 'Gallery',\n 'sections': [\n {'path': 'demos',\n 'title': 'Demos',\n 'description': 'A set of sophisticated apps built to demonstrate the features of Panel.'},\n {'path': 'simple',\n 'title': 'Simple Apps',\n 'description': 'Simple example apps meant to provide a quick introduction to Panel.'},\n {'path': 'layout',\n 'title': 'Layouts',\n 'description': 'How to leverage Panel layout components to achieve complex layouts.'},\n {'path': 'dynamic',\n 'title': 'Dynamic UIs',\n 'description': ('Examples demonstrating how to build dynamic UIs with components that '\n 'are added or removed interactively.')},\n {'path': 'streaming',\n 'title': 'Streaming',\n 'description': ('Streaming data to a visual component.')},\n {'path': 'components',\n 'title': 'Custom components',\n 'description': \"Components created using Panel's ReactiveHTML class.\"},\n {'path': 'styles',\n 'title': 'Styling & Theming',\n 'description': \"Examples demonstrating how to style and theme different components.\"},\n {'path': 'external',\n 'title': 'External libraries',\n 'description': 'Wrapping external libraries with Panel.'}\n ]\n },\n 'reference': {\n 'title': 'Reference Gallery',\n 'as_pyodide': True,\n 'sections': [\n 'panes',\n 'layouts',\n 'templates',\n 'global',\n 'indicators',\n 'widgets',\n ],\n 'titles': {\n 'Vega': 'Altair & Vega',\n 'DeckGL': 'PyDeck & Deck.gl',\n 'ECharts': 'PyEcharts & ECharts',\n 'IPyWidget': 'ipywidgets'\n },\n 'normalize_titles': False\n }\n },\n 'thumbnail_url': 'https://assets.holoviz.org/panel/thumbnails',\n 'deployment_url': 'https://panel-gallery.pyviz.demo.anaconda.com/',\n 'jupyterlite_url': 'https://panelite.holoviz.org/lab/index.html'\n}\n\nif panel.__version__ != version and (PANEL_ROOT / 'dist' / 'wheels').is_dir():\n py_version = panel.__version__.replace(\"-dirty\", \"\")\n panel_req = f'./wheels/panel-{py_version}-py3-none-any.whl'\n bokeh_req = f'./wheels/bokeh-{BOKEH_VERSION}-py3-none-any.whl'\nelse:\n panel_req = f'{CDN_DIST}wheels/panel-{PY_VERSION}-py3-none-any.whl'\n bokeh_req = f'{CDN_DIST}wheels/bokeh-{BOKEH_VERSION}-py3-none-any.whl'\n\nnbsite_pyodide_conf = {\n 'requirements': [bokeh_req, panel_req, 'pandas', 'pyodide-http', 'holoviews>=1.16.0a2']\n}\n\ntemplates_path = [\n '_templates'\n]\n\nhtml_context.update({\n \"last_release\": f\"v{release}\",\n \"github_user\": \"holoviz\",\n \"github_repo\": \"panel\",\n \"default_mode\": \"light\"\n})\n\nnbbuild_patterns_to_take_along = [\"simple.html\", \"*.json\", \"json_*\"]\n\n# Override the Sphinx default title that appends `documentation`\nhtml_title = f'{project} v{version}'\n\nsuppress_warnings = [\"myst.header\", \"ref.myst\", \"mystnb.unknown_mime_type\"]\n", "path": "doc/conf.py"}]} | 1,987 | 291 |
gh_patches_debug_53384 | rasdani/github-patches | git_diff | chainer__chainer-271 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
FunctionSet.copy_parameters_from()
Hi all!
The code in 'FunctionSet.copy_parameters_from()' does not work, when 'src' and 'dst' are both numpy.ndarrays?
``` python
if isinstance(dst, numpy.ndarray):
if isinstance(src, numpy.ndarray):
dst.copy(src) # this gives a ValueError
```
I think this should read
``` python
if isinstance(dst, numpy.ndarray):
if isinstance(src, numpy.ndarray):
numpy.copyto(dst, src)
```
My numpy.version.full_version is 1.9.2, the 'copyto' method exists since 1.7.0.
Cheers,
-r
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `chainer/function_set.py`
Content:
```
1 import numpy
2 import six
3
4 from chainer import cuda
5
6
7 class FunctionSet(object):
8
9 """Set of objects with ``parameters`` and ``gradients`` properties.
10
11 :class:`FunctionSet` is useful to collect parameters and gradients of
12 multiple parameterized :class:`Function` objects. :class:`FunctionSet`
13 itself also implements :attr:`~FunctionSet.parameters` and
14 :attr:`~FunctionSet.gradients`, so it can be nested in another
15 :class:`FunctionSet` object.
16
17 Function registration is done by just adding an attribute to
18 :class:`FunctionSet` object.
19
20 """
21
22 def __init__(self, **functions):
23 """Initializes the function set by given functions.
24
25 Args:
26 **functions: ``dict`` of ``str`` key and :class:`Function` values.
27 The key-value pairs are just set to the :class:`FunctionSet`
28 object as attributes.
29
30 """
31 for name, func in six.iteritems(functions):
32 setattr(self, name, func)
33
34 def collect_parameters(self):
35 """Returns a tuple of parameters and gradients.
36
37 Returns:
38 Tuple (pair) of two tuples. The first element is a tuple of
39 parameter arrays, and the second is a tuple of gradient arrays.
40
41 """
42 return self.parameters, self.gradients
43
44 def to_gpu(self, device=None):
45 """Migrates all parameters and gradients onto GPU.
46
47 This method calls ``to_gpu`` method of each registered object.
48
49 Args:
50 device (int or :class:`pycuda.driver.Device` or ``None``): Device
51 ID of GPU. If ``None`` is given, it uses the current device.
52
53 Returns:
54 self
55
56 """
57 for func in six.itervalues(self.__dict__):
58 func.to_gpu(device=device)
59 return self
60
61 def to_cpu(self):
62 """Migrates all parameters and gradients onto CPU.
63
64 This method calls ``to_cpu`` method of each registered object.
65
66 Returns:
67 self
68
69 """
70 for func in six.itervalues(self.__dict__):
71 func.to_cpu()
72 return self
73
74 def copy_parameters_from(self, params):
75 """Copies parameters from another source without reallocation.
76
77 Args:
78 params (Iterable): Iterable of parameter arrays.
79
80 """
81 for dst, src in zip(self.parameters, params):
82 if isinstance(dst, numpy.ndarray):
83 if isinstance(src, numpy.ndarray):
84 dst.copy(src)
85 else:
86 src.get(dst)
87 elif isinstance(src, numpy.ndarray):
88 dst.set(src)
89 else:
90 cuda.copy(src, out=dst)
91
92 @property
93 def parameters(self):
94 """Tuple of parameter arrays of all registered functions.
95
96 The order of parameters is consistent with :meth:`gradients` property.
97
98 """
99 return sum((func.parameters for _, func in self._get_sorted_funcs()),
100 ())
101
102 @parameters.setter
103 def parameters(self, params):
104 param_iter = iter(params)
105 for _, func in self._get_sorted_funcs():
106 func.parameters = param_iter
107
108 @property
109 def gradients(self):
110 """Tuple of gradient arrays of all registered functions.
111
112 The order of gradients is consistent with :meth:`parameters` property.
113
114 """
115 return sum((func.gradients for _, func in self._get_sorted_funcs()),
116 ())
117
118 @gradients.setter
119 def gradients(self, grads):
120 grad_iter = iter(grads)
121 for _, func in self._get_sorted_funcs():
122 func.gradients = grad_iter
123
124 def _get_sorted_funcs(self):
125 return sorted(six.iteritems(self.__dict__))
126
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/chainer/function_set.py b/chainer/function_set.py
--- a/chainer/function_set.py
+++ b/chainer/function_set.py
@@ -81,7 +81,7 @@
for dst, src in zip(self.parameters, params):
if isinstance(dst, numpy.ndarray):
if isinstance(src, numpy.ndarray):
- dst.copy(src)
+ numpy.copyto(dst, src)
else:
src.get(dst)
elif isinstance(src, numpy.ndarray):
| {"golden_diff": "diff --git a/chainer/function_set.py b/chainer/function_set.py\n--- a/chainer/function_set.py\n+++ b/chainer/function_set.py\n@@ -81,7 +81,7 @@\n for dst, src in zip(self.parameters, params):\n if isinstance(dst, numpy.ndarray):\n if isinstance(src, numpy.ndarray):\n- dst.copy(src)\n+ numpy.copyto(dst, src)\n else:\n src.get(dst)\n elif isinstance(src, numpy.ndarray):\n", "issue": "FunctionSet.copy_parameters_from()\nHi all!\n\nThe code in 'FunctionSet.copy_parameters_from()' does not work, when 'src' and 'dst' are both numpy.ndarrays?\n\n``` python\nif isinstance(dst, numpy.ndarray):\n if isinstance(src, numpy.ndarray):\n dst.copy(src) # this gives a ValueError\n```\n\nI think this should read\n\n``` python\nif isinstance(dst, numpy.ndarray):\n if isinstance(src, numpy.ndarray):\n numpy.copyto(dst, src)\n```\n\nMy numpy.version.full_version is 1.9.2, the 'copyto' method exists since 1.7.0.\n\nCheers,\n-r\n\n", "before_files": [{"content": "import numpy\nimport six\n\nfrom chainer import cuda\n\n\nclass FunctionSet(object):\n\n \"\"\"Set of objects with ``parameters`` and ``gradients`` properties.\n\n :class:`FunctionSet` is useful to collect parameters and gradients of\n multiple parameterized :class:`Function` objects. :class:`FunctionSet`\n itself also implements :attr:`~FunctionSet.parameters` and\n :attr:`~FunctionSet.gradients`, so it can be nested in another\n :class:`FunctionSet` object.\n\n Function registration is done by just adding an attribute to\n :class:`FunctionSet` object.\n\n \"\"\"\n\n def __init__(self, **functions):\n \"\"\"Initializes the function set by given functions.\n\n Args:\n **functions: ``dict`` of ``str`` key and :class:`Function` values.\n The key-value pairs are just set to the :class:`FunctionSet`\n object as attributes.\n\n \"\"\"\n for name, func in six.iteritems(functions):\n setattr(self, name, func)\n\n def collect_parameters(self):\n \"\"\"Returns a tuple of parameters and gradients.\n\n Returns:\n Tuple (pair) of two tuples. The first element is a tuple of\n parameter arrays, and the second is a tuple of gradient arrays.\n\n \"\"\"\n return self.parameters, self.gradients\n\n def to_gpu(self, device=None):\n \"\"\"Migrates all parameters and gradients onto GPU.\n\n This method calls ``to_gpu`` method of each registered object.\n\n Args:\n device (int or :class:`pycuda.driver.Device` or ``None``): Device\n ID of GPU. If ``None`` is given, it uses the current device.\n\n Returns:\n self\n\n \"\"\"\n for func in six.itervalues(self.__dict__):\n func.to_gpu(device=device)\n return self\n\n def to_cpu(self):\n \"\"\"Migrates all parameters and gradients onto CPU.\n\n This method calls ``to_cpu`` method of each registered object.\n\n Returns:\n self\n\n \"\"\"\n for func in six.itervalues(self.__dict__):\n func.to_cpu()\n return self\n\n def copy_parameters_from(self, params):\n \"\"\"Copies parameters from another source without reallocation.\n\n Args:\n params (Iterable): Iterable of parameter arrays.\n\n \"\"\"\n for dst, src in zip(self.parameters, params):\n if isinstance(dst, numpy.ndarray):\n if isinstance(src, numpy.ndarray):\n dst.copy(src)\n else:\n src.get(dst)\n elif isinstance(src, numpy.ndarray):\n dst.set(src)\n else:\n cuda.copy(src, out=dst)\n\n @property\n def parameters(self):\n \"\"\"Tuple of parameter arrays of all registered functions.\n\n The order of parameters is consistent with :meth:`gradients` property.\n\n \"\"\"\n return sum((func.parameters for _, func in self._get_sorted_funcs()),\n ())\n\n @parameters.setter\n def parameters(self, params):\n param_iter = iter(params)\n for _, func in self._get_sorted_funcs():\n func.parameters = param_iter\n\n @property\n def gradients(self):\n \"\"\"Tuple of gradient arrays of all registered functions.\n\n The order of gradients is consistent with :meth:`parameters` property.\n\n \"\"\"\n return sum((func.gradients for _, func in self._get_sorted_funcs()),\n ())\n\n @gradients.setter\n def gradients(self, grads):\n grad_iter = iter(grads)\n for _, func in self._get_sorted_funcs():\n func.gradients = grad_iter\n\n def _get_sorted_funcs(self):\n return sorted(six.iteritems(self.__dict__))\n", "path": "chainer/function_set.py"}], "after_files": [{"content": "import numpy\nimport six\n\nfrom chainer import cuda\n\n\nclass FunctionSet(object):\n\n \"\"\"Set of objects with ``parameters`` and ``gradients`` properties.\n\n :class:`FunctionSet` is useful to collect parameters and gradients of\n multiple parameterized :class:`Function` objects. :class:`FunctionSet`\n itself also implements :attr:`~FunctionSet.parameters` and\n :attr:`~FunctionSet.gradients`, so it can be nested in another\n :class:`FunctionSet` object.\n\n Function registration is done by just adding an attribute to\n :class:`FunctionSet` object.\n\n \"\"\"\n\n def __init__(self, **functions):\n \"\"\"Initializes the function set by given functions.\n\n Args:\n **functions: ``dict`` of ``str`` key and :class:`Function` values.\n The key-value pairs are just set to the :class:`FunctionSet`\n object as attributes.\n\n \"\"\"\n for name, func in six.iteritems(functions):\n setattr(self, name, func)\n\n def collect_parameters(self):\n \"\"\"Returns a tuple of parameters and gradients.\n\n Returns:\n Tuple (pair) of two tuples. The first element is a tuple of\n parameter arrays, and the second is a tuple of gradient arrays.\n\n \"\"\"\n return self.parameters, self.gradients\n\n def to_gpu(self, device=None):\n \"\"\"Migrates all parameters and gradients onto GPU.\n\n This method calls ``to_gpu`` method of each registered object.\n\n Args:\n device (int or :class:`pycuda.driver.Device` or ``None``): Device\n ID of GPU. If ``None`` is given, it uses the current device.\n\n Returns:\n self\n\n \"\"\"\n for func in six.itervalues(self.__dict__):\n func.to_gpu(device=device)\n return self\n\n def to_cpu(self):\n \"\"\"Migrates all parameters and gradients onto CPU.\n\n This method calls ``to_cpu`` method of each registered object.\n\n Returns:\n self\n\n \"\"\"\n for func in six.itervalues(self.__dict__):\n func.to_cpu()\n return self\n\n def copy_parameters_from(self, params):\n \"\"\"Copies parameters from another source without reallocation.\n\n Args:\n params (Iterable): Iterable of parameter arrays.\n\n \"\"\"\n for dst, src in zip(self.parameters, params):\n if isinstance(dst, numpy.ndarray):\n if isinstance(src, numpy.ndarray):\n numpy.copyto(dst, src)\n else:\n src.get(dst)\n elif isinstance(src, numpy.ndarray):\n dst.set(src)\n else:\n cuda.copy(src, out=dst)\n\n @property\n def parameters(self):\n \"\"\"Tuple of parameter arrays of all registered functions.\n\n The order of parameters is consistent with :meth:`gradients` property.\n\n \"\"\"\n return sum((func.parameters for _, func in self._get_sorted_funcs()),\n ())\n\n @parameters.setter\n def parameters(self, params):\n param_iter = iter(params)\n for _, func in self._get_sorted_funcs():\n func.parameters = param_iter\n\n @property\n def gradients(self):\n \"\"\"Tuple of gradient arrays of all registered functions.\n\n The order of gradients is consistent with :meth:`parameters` property.\n\n \"\"\"\n return sum((func.gradients for _, func in self._get_sorted_funcs()),\n ())\n\n @gradients.setter\n def gradients(self, grads):\n grad_iter = iter(grads)\n for _, func in self._get_sorted_funcs():\n func.gradients = grad_iter\n\n def _get_sorted_funcs(self):\n return sorted(six.iteritems(self.__dict__))\n", "path": "chainer/function_set.py"}]} | 1,452 | 103 |
gh_patches_debug_55343 | rasdani/github-patches | git_diff | ytdl-org__youtube-dl-16707 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
vidzi.tv doesn't work
$youtube-dl --version
2018.06.04
$youtube-dl and http://vidzi.tv links doesn't work
for example:
$youtube-dl http://vidzi.tv/n83vo2mlnpgb
Failed to parse JSON (caused by ValueError("Expecting ',' delimiter: line 12 column 175 (char 771)",)); please report this issue on https://yt-dl.org/bug . Make sure you are using the latest version; see https://yt-dl.org/update on how to update. Be sure to call youtube-dl with the --verbose flag and include its complete output.
$youtube-dl --verbose http://vidzi.tv/n83vo2mlnpgb
[debug] System config: []
[debug] User config: []
[debug] Custom config: []
[debug] Command-line args: [u'--verbose', u'http://vidzi.tv/n83vo2mlnpgb']
[debug] Encodings: locale UTF-8, fs utf-8, out UTF-8, pref UTF-8
[debug] youtube-dl version 2018.06.04
[debug] Python version 2.7.10 (CPython) - Darwin-17.5.0-x86_64-i386-64bit
[debug] exe versions: avconv 12.3, avprobe 12.3, ffmpeg 3.4.2, ffprobe 3.4.2
[debug] Proxy map: {}
[Vidzi] n83vo2mlnpgb: Downloading webpage
ERROR: n83vo2mlnpgb: Failed to parse JSON (caused by ValueError("Expecting ',' delimiter: line 12 column 175 (char 791)",)); please report this issue on https://yt-dl.org/bug . Make sure you are using the latest version; see https://yt-dl.org/update on how to update. Be sure to call youtube-dl with the --verbose flag and include its complete output.
Traceback (most recent call last):
File "/Library/Python/2.7/site-packages/youtube_dl/extractor/common.py", line 774, in _parse_json
return json.loads(json_string)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/json/__init__.py", line 338, in loads
return _default_decoder.decode(s)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/json/decoder.py", line 366, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/json/decoder.py", line 382, in raw_decode
obj, end = self.scan_once(s, idx)
ValueError: Expecting ',' delimiter: line 12 column 175 (char 791)
Traceback (most recent call last):
File "/Library/Python/2.7/site-packages/youtube_dl/YoutubeDL.py", line 792, in extract_info
ie_result = ie.extract(url)
File "/Library/Python/2.7/site-packages/youtube_dl/extractor/common.py", line 500, in extract
ie_result = self._real_extract(url)
File "/Library/Python/2.7/site-packages/youtube_dl/extractor/vidzi.py", line 57, in _real_extract
video_id, transform_source=js_to_json)
File "/Library/Python/2.7/site-packages/youtube_dl/extractor/common.py", line 778, in _parse_json
raise ExtractorError(errmsg, cause=ve)
ExtractorError: n83vo2mlnpgb: Failed to parse JSON (caused by ValueError("Expecting ',' delimiter: line 12 column 175 (char 791)",)); please report this issue on https://yt-dl.org/bug . Make sure you are using the latest version; see https://yt-dl.org/update on how to update. Be sure to call youtube-dl with the --verbose flag and include its complete output.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `youtube_dl/extractor/vidzi.py`
Content:
```
1 # coding: utf-8
2 from __future__ import unicode_literals
3
4 import re
5
6 from .common import InfoExtractor
7 from ..utils import (
8 decode_packed_codes,
9 js_to_json,
10 NO_DEFAULT,
11 PACKED_CODES_RE,
12 )
13
14
15 class VidziIE(InfoExtractor):
16 _VALID_URL = r'https?://(?:www\.)?vidzi\.(?:tv|cc|si)/(?:embed-)?(?P<id>[0-9a-zA-Z]+)'
17 _TESTS = [{
18 'url': 'http://vidzi.tv/cghql9yq6emu.html',
19 'md5': '4f16c71ca0c8c8635ab6932b5f3f1660',
20 'info_dict': {
21 'id': 'cghql9yq6emu',
22 'ext': 'mp4',
23 'title': 'youtube-dl test video 1\\\\2\'3/4<5\\\\6ä7↭',
24 },
25 'params': {
26 # m3u8 download
27 'skip_download': True,
28 },
29 }, {
30 'url': 'http://vidzi.tv/embed-4z2yb0rzphe9-600x338.html',
31 'only_matching': True,
32 }, {
33 'url': 'http://vidzi.cc/cghql9yq6emu.html',
34 'only_matching': True,
35 }, {
36 'url': 'https://vidzi.si/rph9gztxj1et.html',
37 'only_matching': True,
38 }]
39
40 def _real_extract(self, url):
41 video_id = self._match_id(url)
42
43 webpage = self._download_webpage(
44 'http://vidzi.tv/%s' % video_id, video_id)
45 title = self._html_search_regex(
46 r'(?s)<h2 class="video-title">(.*?)</h2>', webpage, 'title')
47
48 codes = [webpage]
49 codes.extend([
50 decode_packed_codes(mobj.group(0)).replace('\\\'', '\'')
51 for mobj in re.finditer(PACKED_CODES_RE, webpage)])
52 for num, code in enumerate(codes, 1):
53 jwplayer_data = self._parse_json(
54 self._search_regex(
55 r'setup\(([^)]+)\)', code, 'jwplayer data',
56 default=NO_DEFAULT if num == len(codes) else '{}'),
57 video_id, transform_source=js_to_json)
58 if jwplayer_data:
59 break
60
61 info_dict = self._parse_jwplayer_data(jwplayer_data, video_id, require_title=False)
62 info_dict['title'] = title
63
64 return info_dict
65
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/youtube_dl/extractor/vidzi.py b/youtube_dl/extractor/vidzi.py
--- a/youtube_dl/extractor/vidzi.py
+++ b/youtube_dl/extractor/vidzi.py
@@ -54,7 +54,8 @@
self._search_regex(
r'setup\(([^)]+)\)', code, 'jwplayer data',
default=NO_DEFAULT if num == len(codes) else '{}'),
- video_id, transform_source=js_to_json)
+ video_id, transform_source=lambda s: js_to_json(
+ re.sub(r'\s*\+\s*window\[.+?\]', '', s)))
if jwplayer_data:
break
| {"golden_diff": "diff --git a/youtube_dl/extractor/vidzi.py b/youtube_dl/extractor/vidzi.py\n--- a/youtube_dl/extractor/vidzi.py\n+++ b/youtube_dl/extractor/vidzi.py\n@@ -54,7 +54,8 @@\n self._search_regex(\n r'setup\\(([^)]+)\\)', code, 'jwplayer data',\n default=NO_DEFAULT if num == len(codes) else '{}'),\n- video_id, transform_source=js_to_json)\n+ video_id, transform_source=lambda s: js_to_json(\n+ re.sub(r'\\s*\\+\\s*window\\[.+?\\]', '', s)))\n if jwplayer_data:\n break\n", "issue": "vidzi.tv doesn't work\n$youtube-dl --version\r\n2018.06.04\r\n\r\n$youtube-dl and http://vidzi.tv links doesn't work\r\n\r\nfor example:\r\n$youtube-dl http://vidzi.tv/n83vo2mlnpgb\r\n\r\nFailed to parse JSON (caused by ValueError(\"Expecting ',' delimiter: line 12 column 175 (char 771)\",)); please report this issue on https://yt-dl.org/bug . Make sure you are using the latest version; see https://yt-dl.org/update on how to update. Be sure to call youtube-dl with the --verbose flag and include its complete output.\r\n\r\n$youtube-dl --verbose http://vidzi.tv/n83vo2mlnpgb\r\n\r\n[debug] System config: []\r\n[debug] User config: []\r\n[debug] Custom config: []\r\n[debug] Command-line args: [u'--verbose', u'http://vidzi.tv/n83vo2mlnpgb']\r\n[debug] Encodings: locale UTF-8, fs utf-8, out UTF-8, pref UTF-8\r\n[debug] youtube-dl version 2018.06.04\r\n[debug] Python version 2.7.10 (CPython) - Darwin-17.5.0-x86_64-i386-64bit\r\n[debug] exe versions: avconv 12.3, avprobe 12.3, ffmpeg 3.4.2, ffprobe 3.4.2\r\n[debug] Proxy map: {}\r\n[Vidzi] n83vo2mlnpgb: Downloading webpage\r\nERROR: n83vo2mlnpgb: Failed to parse JSON (caused by ValueError(\"Expecting ',' delimiter: line 12 column 175 (char 791)\",)); please report this issue on https://yt-dl.org/bug . Make sure you are using the latest version; see https://yt-dl.org/update on how to update. Be sure to call youtube-dl with the --verbose flag and include its complete output.\r\nTraceback (most recent call last):\r\n File \"/Library/Python/2.7/site-packages/youtube_dl/extractor/common.py\", line 774, in _parse_json\r\n return json.loads(json_string)\r\n File \"/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/json/__init__.py\", line 338, in loads\r\n return _default_decoder.decode(s)\r\n File \"/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/json/decoder.py\", line 366, in decode\r\n obj, end = self.raw_decode(s, idx=_w(s, 0).end())\r\n File \"/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/json/decoder.py\", line 382, in raw_decode\r\n obj, end = self.scan_once(s, idx)\r\nValueError: Expecting ',' delimiter: line 12 column 175 (char 791)\r\nTraceback (most recent call last):\r\n File \"/Library/Python/2.7/site-packages/youtube_dl/YoutubeDL.py\", line 792, in extract_info\r\n ie_result = ie.extract(url)\r\n File \"/Library/Python/2.7/site-packages/youtube_dl/extractor/common.py\", line 500, in extract\r\n ie_result = self._real_extract(url)\r\n File \"/Library/Python/2.7/site-packages/youtube_dl/extractor/vidzi.py\", line 57, in _real_extract\r\n video_id, transform_source=js_to_json)\r\n File \"/Library/Python/2.7/site-packages/youtube_dl/extractor/common.py\", line 778, in _parse_json\r\n raise ExtractorError(errmsg, cause=ve)\r\nExtractorError: n83vo2mlnpgb: Failed to parse JSON (caused by ValueError(\"Expecting ',' delimiter: line 12 column 175 (char 791)\",)); please report this issue on https://yt-dl.org/bug . Make sure you are using the latest version; see https://yt-dl.org/update on how to update. Be sure to call youtube-dl with the --verbose flag and include its complete output.\r\n\r\n\n", "before_files": [{"content": "# coding: utf-8\nfrom __future__ import unicode_literals\n\nimport re\n\nfrom .common import InfoExtractor\nfrom ..utils import (\n decode_packed_codes,\n js_to_json,\n NO_DEFAULT,\n PACKED_CODES_RE,\n)\n\n\nclass VidziIE(InfoExtractor):\n _VALID_URL = r'https?://(?:www\\.)?vidzi\\.(?:tv|cc|si)/(?:embed-)?(?P<id>[0-9a-zA-Z]+)'\n _TESTS = [{\n 'url': 'http://vidzi.tv/cghql9yq6emu.html',\n 'md5': '4f16c71ca0c8c8635ab6932b5f3f1660',\n 'info_dict': {\n 'id': 'cghql9yq6emu',\n 'ext': 'mp4',\n 'title': 'youtube-dl test video 1\\\\\\\\2\\'3/4<5\\\\\\\\6\u00e47\u21ad',\n },\n 'params': {\n # m3u8 download\n 'skip_download': True,\n },\n }, {\n 'url': 'http://vidzi.tv/embed-4z2yb0rzphe9-600x338.html',\n 'only_matching': True,\n }, {\n 'url': 'http://vidzi.cc/cghql9yq6emu.html',\n 'only_matching': True,\n }, {\n 'url': 'https://vidzi.si/rph9gztxj1et.html',\n 'only_matching': True,\n }]\n\n def _real_extract(self, url):\n video_id = self._match_id(url)\n\n webpage = self._download_webpage(\n 'http://vidzi.tv/%s' % video_id, video_id)\n title = self._html_search_regex(\n r'(?s)<h2 class=\"video-title\">(.*?)</h2>', webpage, 'title')\n\n codes = [webpage]\n codes.extend([\n decode_packed_codes(mobj.group(0)).replace('\\\\\\'', '\\'')\n for mobj in re.finditer(PACKED_CODES_RE, webpage)])\n for num, code in enumerate(codes, 1):\n jwplayer_data = self._parse_json(\n self._search_regex(\n r'setup\\(([^)]+)\\)', code, 'jwplayer data',\n default=NO_DEFAULT if num == len(codes) else '{}'),\n video_id, transform_source=js_to_json)\n if jwplayer_data:\n break\n\n info_dict = self._parse_jwplayer_data(jwplayer_data, video_id, require_title=False)\n info_dict['title'] = title\n\n return info_dict\n", "path": "youtube_dl/extractor/vidzi.py"}], "after_files": [{"content": "# coding: utf-8\nfrom __future__ import unicode_literals\n\nimport re\n\nfrom .common import InfoExtractor\nfrom ..utils import (\n decode_packed_codes,\n js_to_json,\n NO_DEFAULT,\n PACKED_CODES_RE,\n)\n\n\nclass VidziIE(InfoExtractor):\n _VALID_URL = r'https?://(?:www\\.)?vidzi\\.(?:tv|cc|si)/(?:embed-)?(?P<id>[0-9a-zA-Z]+)'\n _TESTS = [{\n 'url': 'http://vidzi.tv/cghql9yq6emu.html',\n 'md5': '4f16c71ca0c8c8635ab6932b5f3f1660',\n 'info_dict': {\n 'id': 'cghql9yq6emu',\n 'ext': 'mp4',\n 'title': 'youtube-dl test video 1\\\\\\\\2\\'3/4<5\\\\\\\\6\u00e47\u21ad',\n },\n 'params': {\n # m3u8 download\n 'skip_download': True,\n },\n }, {\n 'url': 'http://vidzi.tv/embed-4z2yb0rzphe9-600x338.html',\n 'only_matching': True,\n }, {\n 'url': 'http://vidzi.cc/cghql9yq6emu.html',\n 'only_matching': True,\n }, {\n 'url': 'https://vidzi.si/rph9gztxj1et.html',\n 'only_matching': True,\n }]\n\n def _real_extract(self, url):\n video_id = self._match_id(url)\n\n webpage = self._download_webpage(\n 'http://vidzi.tv/%s' % video_id, video_id)\n title = self._html_search_regex(\n r'(?s)<h2 class=\"video-title\">(.*?)</h2>', webpage, 'title')\n\n codes = [webpage]\n codes.extend([\n decode_packed_codes(mobj.group(0)).replace('\\\\\\'', '\\'')\n for mobj in re.finditer(PACKED_CODES_RE, webpage)])\n for num, code in enumerate(codes, 1):\n jwplayer_data = self._parse_json(\n self._search_regex(\n r'setup\\(([^)]+)\\)', code, 'jwplayer data',\n default=NO_DEFAULT if num == len(codes) else '{}'),\n video_id, transform_source=lambda s: js_to_json(\n re.sub(r'\\s*\\+\\s*window\\[.+?\\]', '', s)))\n if jwplayer_data:\n break\n\n info_dict = self._parse_jwplayer_data(jwplayer_data, video_id, require_title=False)\n info_dict['title'] = title\n\n return info_dict\n", "path": "youtube_dl/extractor/vidzi.py"}]} | 1,950 | 159 |
gh_patches_debug_15485 | rasdani/github-patches | git_diff | keras-team__autokeras-1145 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
IO API, multi-modal classification, predict method problem
### Bug Description
IO API, multi-modal classification, predict method problem
### Bug Reproduction
https://github.com/datamllab/automl-in-action-notebooks/blob/master/3.4.2-Functional-API-Multi-Input.ipynb
### Setup Details
Include the details about the versions of:
- OS type and version:
- Python:
- autokeras: 1.0.2
- keras-tuner:
- scikit-learn:
- numpy:
- pandas:
- tensorflow: 2.1.0
### Additional context
<!---
If applicable, add any other context about the problem.
-->
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `autokeras/keras_layers.py`
Content:
```
1 import inspect
2
3 import tensorflow as tf
4 from tensorflow.keras.layers.experimental import preprocessing
5 from tensorflow.python.keras.layers.preprocessing import index_lookup
6 from tensorflow.python.util import nest
7
8 CombinerPreprocessingLayer = inspect.getmro(preprocessing.Normalization)[1]
9 Combiner = inspect.getmro(preprocessing.Normalization()._combiner.__class__)[1]
10
11 INT = 'int'
12 NONE = 'none'
13 ONE_HOT = 'one-hot'
14
15
16 class MultiColumnCategoricalEncoding(preprocessing.PreprocessingLayer):
17 """Encode the categorical features to numerical features.
18
19 # Arguments
20 encoding: A list of strings, which has the same number of elements as the
21 columns in the structured data. Each of the strings specifies the
22 encoding method used for the corresponding column. Use 'int' for
23 categorical columns and 'none' for numerical columns.
24 """
25
26 # TODO: Support one-hot encoding.
27 # TODO: Support frequency encoding.
28
29 def __init__(self, encoding, **kwargs):
30 super().__init__(**kwargs)
31 self.encoding = encoding
32 self.encoding_layers = []
33 for encoding in self.encoding:
34 if encoding == NONE:
35 self.encoding_layers.append(None)
36 elif encoding == INT:
37 self.encoding_layers.append(index_lookup.IndexLookup())
38 elif encoding == ONE_HOT:
39 self.encoding_layers.append(None)
40
41 def build(self, input_shape):
42 for encoding_layer in self.encoding_layers:
43 if encoding_layer is not None:
44 encoding_layer.build(tf.TensorShape([1]))
45
46 def call(self, inputs):
47 input_nodes = nest.flatten(inputs)[0]
48 split_inputs = tf.split(input_nodes, [1] * len(self.encoding), axis=-1)
49 output_nodes = []
50 for input_node, encoding_layer in zip(split_inputs, self.encoding_layers):
51 if encoding_layer is None:
52 output_nodes.append(tf.strings.to_number(input_node, tf.float32))
53 else:
54 output_nodes.append(tf.cast(encoding_layer(input_node), tf.float32))
55 return tf.keras.layers.Concatenate()(output_nodes)
56
57 def adapt(self, data):
58 for index, encoding_layer in enumerate(self.encoding_layers):
59 if encoding_layer is None:
60 continue
61 data_column = data.map(lambda x: tf.slice(x, [0, index], [-1, 1]))
62 encoding_layer.adapt(data_column)
63
64 def get_config(self):
65 config = {
66 'encoding': self.encoding,
67 }
68 base_config = super().get_config()
69 return dict(list(base_config.items()) + list(config.items()))
70
71
72 CUSTOM_OBJECTS = {
73 'MultiColumnCategoricalEncoding': MultiColumnCategoricalEncoding,
74 }
75
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/autokeras/keras_layers.py b/autokeras/keras_layers.py
--- a/autokeras/keras_layers.py
+++ b/autokeras/keras_layers.py
@@ -49,7 +49,12 @@
output_nodes = []
for input_node, encoding_layer in zip(split_inputs, self.encoding_layers):
if encoding_layer is None:
- output_nodes.append(tf.strings.to_number(input_node, tf.float32))
+ number = tf.strings.to_number(input_node, tf.float32)
+ # Replace NaN with 0.
+ imputed = tf.where(tf.math.is_nan(number),
+ tf.zeros_like(number),
+ number)
+ output_nodes.append(imputed)
else:
output_nodes.append(tf.cast(encoding_layer(input_node), tf.float32))
return tf.keras.layers.Concatenate()(output_nodes)
| {"golden_diff": "diff --git a/autokeras/keras_layers.py b/autokeras/keras_layers.py\n--- a/autokeras/keras_layers.py\n+++ b/autokeras/keras_layers.py\n@@ -49,7 +49,12 @@\n output_nodes = []\n for input_node, encoding_layer in zip(split_inputs, self.encoding_layers):\n if encoding_layer is None:\n- output_nodes.append(tf.strings.to_number(input_node, tf.float32))\n+ number = tf.strings.to_number(input_node, tf.float32)\n+ # Replace NaN with 0.\n+ imputed = tf.where(tf.math.is_nan(number),\n+ tf.zeros_like(number),\n+ number)\n+ output_nodes.append(imputed)\n else:\n output_nodes.append(tf.cast(encoding_layer(input_node), tf.float32))\n return tf.keras.layers.Concatenate()(output_nodes)\n", "issue": "IO API, multi-modal classification, predict method problem\n### Bug Description\r\nIO API, multi-modal classification, predict method problem\r\n\r\n\r\n### Bug Reproduction\r\n\r\nhttps://github.com/datamllab/automl-in-action-notebooks/blob/master/3.4.2-Functional-API-Multi-Input.ipynb\r\n\r\n\r\n### Setup Details\r\nInclude the details about the versions of:\r\n - OS type and version:\r\n - Python: \r\n - autokeras: 1.0.2\r\n - keras-tuner:\r\n - scikit-learn:\r\n - numpy:\r\n - pandas:\r\n - tensorflow: 2.1.0\r\n\r\n### Additional context\r\n<!---\r\nIf applicable, add any other context about the problem.\r\n-->\r\n\n", "before_files": [{"content": "import inspect\n\nimport tensorflow as tf\nfrom tensorflow.keras.layers.experimental import preprocessing\nfrom tensorflow.python.keras.layers.preprocessing import index_lookup\nfrom tensorflow.python.util import nest\n\nCombinerPreprocessingLayer = inspect.getmro(preprocessing.Normalization)[1]\nCombiner = inspect.getmro(preprocessing.Normalization()._combiner.__class__)[1]\n\nINT = 'int'\nNONE = 'none'\nONE_HOT = 'one-hot'\n\n\nclass MultiColumnCategoricalEncoding(preprocessing.PreprocessingLayer):\n \"\"\"Encode the categorical features to numerical features.\n\n # Arguments\n encoding: A list of strings, which has the same number of elements as the\n columns in the structured data. Each of the strings specifies the\n encoding method used for the corresponding column. Use 'int' for\n categorical columns and 'none' for numerical columns.\n \"\"\"\n\n # TODO: Support one-hot encoding.\n # TODO: Support frequency encoding.\n\n def __init__(self, encoding, **kwargs):\n super().__init__(**kwargs)\n self.encoding = encoding\n self.encoding_layers = []\n for encoding in self.encoding:\n if encoding == NONE:\n self.encoding_layers.append(None)\n elif encoding == INT:\n self.encoding_layers.append(index_lookup.IndexLookup())\n elif encoding == ONE_HOT:\n self.encoding_layers.append(None)\n\n def build(self, input_shape):\n for encoding_layer in self.encoding_layers:\n if encoding_layer is not None:\n encoding_layer.build(tf.TensorShape([1]))\n\n def call(self, inputs):\n input_nodes = nest.flatten(inputs)[0]\n split_inputs = tf.split(input_nodes, [1] * len(self.encoding), axis=-1)\n output_nodes = []\n for input_node, encoding_layer in zip(split_inputs, self.encoding_layers):\n if encoding_layer is None:\n output_nodes.append(tf.strings.to_number(input_node, tf.float32))\n else:\n output_nodes.append(tf.cast(encoding_layer(input_node), tf.float32))\n return tf.keras.layers.Concatenate()(output_nodes)\n\n def adapt(self, data):\n for index, encoding_layer in enumerate(self.encoding_layers):\n if encoding_layer is None:\n continue\n data_column = data.map(lambda x: tf.slice(x, [0, index], [-1, 1]))\n encoding_layer.adapt(data_column)\n\n def get_config(self):\n config = {\n 'encoding': self.encoding,\n }\n base_config = super().get_config()\n return dict(list(base_config.items()) + list(config.items()))\n\n\nCUSTOM_OBJECTS = {\n 'MultiColumnCategoricalEncoding': MultiColumnCategoricalEncoding,\n}\n", "path": "autokeras/keras_layers.py"}], "after_files": [{"content": "import inspect\n\nimport tensorflow as tf\nfrom tensorflow.keras.layers.experimental import preprocessing\nfrom tensorflow.python.keras.layers.preprocessing import index_lookup\nfrom tensorflow.python.util import nest\n\nCombinerPreprocessingLayer = inspect.getmro(preprocessing.Normalization)[1]\nCombiner = inspect.getmro(preprocessing.Normalization()._combiner.__class__)[1]\n\nINT = 'int'\nNONE = 'none'\nONE_HOT = 'one-hot'\n\n\nclass MultiColumnCategoricalEncoding(preprocessing.PreprocessingLayer):\n \"\"\"Encode the categorical features to numerical features.\n\n # Arguments\n encoding: A list of strings, which has the same number of elements as the\n columns in the structured data. Each of the strings specifies the\n encoding method used for the corresponding column. Use 'int' for\n categorical columns and 'none' for numerical columns.\n \"\"\"\n\n # TODO: Support one-hot encoding.\n # TODO: Support frequency encoding.\n\n def __init__(self, encoding, **kwargs):\n super().__init__(**kwargs)\n self.encoding = encoding\n self.encoding_layers = []\n for encoding in self.encoding:\n if encoding == NONE:\n self.encoding_layers.append(None)\n elif encoding == INT:\n self.encoding_layers.append(index_lookup.IndexLookup())\n elif encoding == ONE_HOT:\n self.encoding_layers.append(None)\n\n def build(self, input_shape):\n for encoding_layer in self.encoding_layers:\n if encoding_layer is not None:\n encoding_layer.build(tf.TensorShape([1]))\n\n def call(self, inputs):\n input_nodes = nest.flatten(inputs)[0]\n split_inputs = tf.split(input_nodes, [1] * len(self.encoding), axis=-1)\n output_nodes = []\n for input_node, encoding_layer in zip(split_inputs, self.encoding_layers):\n if encoding_layer is None:\n number = tf.strings.to_number(input_node, tf.float32)\n # Replace NaN with 0.\n imputed = tf.where(tf.math.is_nan(number),\n tf.zeros_like(number),\n number)\n output_nodes.append(imputed)\n else:\n output_nodes.append(tf.cast(encoding_layer(input_node), tf.float32))\n return tf.keras.layers.Concatenate()(output_nodes)\n\n def adapt(self, data):\n for index, encoding_layer in enumerate(self.encoding_layers):\n if encoding_layer is None:\n continue\n data_column = data.map(lambda x: tf.slice(x, [0, index], [-1, 1]))\n encoding_layer.adapt(data_column)\n\n def get_config(self):\n config = {\n 'encoding': self.encoding,\n }\n base_config = super().get_config()\n return dict(list(base_config.items()) + list(config.items()))\n\n\nCUSTOM_OBJECTS = {\n 'MultiColumnCategoricalEncoding': MultiColumnCategoricalEncoding,\n}\n", "path": "autokeras/keras_layers.py"}]} | 1,107 | 193 |
gh_patches_debug_21069 | rasdani/github-patches | git_diff | fossasia__open-event-server-9044 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Cannot save the badge field
```
HINT: You will need to rewrite or cast the expression.
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/data/app/app/api/helpers/db.py", line 27, in save_to_db
db.session.commit()
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/scoping.py", line 163, in do
return getattr(self.registry(), name)(*args, **kwargs)
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/session.py", line 1046, in commit
self.transaction.commit()
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/session.py", line 504, in commit
self._prepare_impl()
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/session.py", line 483, in _prepare_impl
self.session.flush()
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/session.py", line 2540, in flush
self._flush(objects)
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/session.py", line 2682, in _flush
transaction.rollback(_capture_exception=True)
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/util/langhelpers.py", line 68, in __exit__
compat.raise_(
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/util/compat.py", line 182, in raise_
raise exception
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/session.py", line 2642, in _flush
flush_context.execute()
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/unitofwork.py", line 422, in execute
rec.execute(self)
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/unitofwork.py", line 586, in execute
persistence.save_obj(
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/persistence.py", line 239, in save_obj
_emit_insert_statements(
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/persistence.py", line 1135, in _emit_insert_statements
result = cached_connections[connection].execute(
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 1011, in execute
return meth(self, multiparams, params)
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/sql/elements.py", line 298, in _execute_on_connection
return connection._execute_clauseelement(self, multiparams, params)
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 1124, in _execute_clauseelement
ret = self._execute_context(
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 1316, in _execute_context
self._handle_dbapi_exception(
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 1510, in _handle_dbapi_exception
util.raise_(
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/util/compat.py", line 182, in raise_
raise exception
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 1276, in _execute_context
self.dialect.do_execute(
File "/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/engine/default.py", line 608, in do_execute
cursor.execute(statement, parameters)
sqlalchemy.exc.ProgrammingError: (psycopg2.errors.DatatypeMismatch) column "font_weight" is of type integer but expression is of type json[]
LINE 1: ...e', 'Last Name', 'Sample Text', 14, 'Arial', CAST(ARRAY['{"n...
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `migrations/versions/rev-2023-08-01-14:10:12-8b5bc48e1d4c_.py`
Content:
```
1 """empty message
2
3 Revision ID: 8b5bc48e1d4c
4 Revises: 21c79d253f21
5 Create Date: 2023-08-01 14:10:12.187180
6
7 """
8
9 from alembic import op
10 import sqlalchemy as sa
11 from sqlalchemy.dialects import postgresql
12
13 # revision identifiers, used by Alembic.
14 revision = '8b5bc48e1d4c'
15 down_revision = '21c79d253f21'
16
17
18 def upgrade():
19 # ### commands auto generated by Alembic - please adjust! ###
20 op.alter_column('badge_field_forms', 'font_weight',
21 existing_type=sa.TEXT(),
22 type_=postgresql.ARRAY(sa.JSON()),
23 postgresql_using='font_weight::json[]',
24 existing_nullable=True)
25 # ### end Alembic commands ###
26
27
28 def downgrade():
29 # ### commands auto generated by Alembic - please adjust! ###
30 op.alter_column('badge_field_forms', 'font_weight',
31 existing_type=postgresql.ARRAY(sa.JSON()),
32 type_=sa.TEXT(),
33 existing_nullable=True)
34 # ### end Alembic commands ###
35
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/migrations/versions/rev-2023-08-01-14:10:12-8b5bc48e1d4c_.py b/migrations/versions/rev-2023-08-01-14:10:12-8b5bc48e1d4c_.py
--- a/migrations/versions/rev-2023-08-01-14:10:12-8b5bc48e1d4c_.py
+++ b/migrations/versions/rev-2023-08-01-14:10:12-8b5bc48e1d4c_.py
@@ -17,18 +17,15 @@
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
- op.alter_column('badge_field_forms', 'font_weight',
- existing_type=sa.TEXT(),
- type_=postgresql.ARRAY(sa.JSON()),
- postgresql_using='font_weight::json[]',
- existing_nullable=True)
+ op.drop_column('badge_field_forms', 'font_weight')
+ op.add_column('badge_field_forms', sa.Column('font_weight',
+ postgresql.ARRAY(sa.JSON()), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
- op.alter_column('badge_field_forms', 'font_weight',
- existing_type=postgresql.ARRAY(sa.JSON()),
- type_=sa.TEXT(),
- existing_nullable=True)
+ op.drop_column('badge_field_forms', 'font_weight')
+ op.add_column('badge_field_forms', sa.Column('font_weight',
+ sa.Integer(), nullable=True))
# ### end Alembic commands ###
| {"golden_diff": "diff --git a/migrations/versions/rev-2023-08-01-14:10:12-8b5bc48e1d4c_.py b/migrations/versions/rev-2023-08-01-14:10:12-8b5bc48e1d4c_.py\n--- a/migrations/versions/rev-2023-08-01-14:10:12-8b5bc48e1d4c_.py\n+++ b/migrations/versions/rev-2023-08-01-14:10:12-8b5bc48e1d4c_.py\n@@ -17,18 +17,15 @@\n \n def upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n- op.alter_column('badge_field_forms', 'font_weight',\n- existing_type=sa.TEXT(),\n- type_=postgresql.ARRAY(sa.JSON()),\n- postgresql_using='font_weight::json[]',\n- existing_nullable=True)\n+ op.drop_column('badge_field_forms', 'font_weight')\n+ op.add_column('badge_field_forms', sa.Column('font_weight',\n+ postgresql.ARRAY(sa.JSON()), nullable=True))\n # ### end Alembic commands ###\n \n \n def downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n- op.alter_column('badge_field_forms', 'font_weight',\n- existing_type=postgresql.ARRAY(sa.JSON()),\n- type_=sa.TEXT(),\n- existing_nullable=True)\n+ op.drop_column('badge_field_forms', 'font_weight')\n+ op.add_column('badge_field_forms', sa.Column('font_weight',\n+ sa.Integer(), nullable=True))\n # ### end Alembic commands ###\n", "issue": "Cannot save the badge field\n```\r\nHINT: You will need to rewrite or cast the expression.\r\n\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File \"/data/app/app/api/helpers/db.py\", line 27, in save_to_db\r\n db.session.commit()\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/scoping.py\", line 163, in do\r\n return getattr(self.registry(), name)(*args, **kwargs)\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/session.py\", line 1046, in commit\r\n self.transaction.commit()\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/session.py\", line 504, in commit\r\n self._prepare_impl()\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/session.py\", line 483, in _prepare_impl\r\n self.session.flush()\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/session.py\", line 2540, in flush\r\n self._flush(objects)\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/session.py\", line 2682, in _flush\r\n transaction.rollback(_capture_exception=True)\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/util/langhelpers.py\", line 68, in __exit__\r\n compat.raise_(\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/util/compat.py\", line 182, in raise_\r\n raise exception\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/session.py\", line 2642, in _flush\r\n flush_context.execute()\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/unitofwork.py\", line 422, in execute\r\n rec.execute(self)\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/unitofwork.py\", line 586, in execute\r\n persistence.save_obj(\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/persistence.py\", line 239, in save_obj\r\n _emit_insert_statements(\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/orm/persistence.py\", line 1135, in _emit_insert_statements\r\n result = cached_connections[connection].execute(\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/engine/base.py\", line 1011, in execute\r\n return meth(self, multiparams, params)\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/sql/elements.py\", line 298, in _execute_on_connection\r\n return connection._execute_clauseelement(self, multiparams, params)\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/engine/base.py\", line 1124, in _execute_clauseelement\r\n ret = self._execute_context(\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/engine/base.py\", line 1316, in _execute_context\r\n self._handle_dbapi_exception(\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/engine/base.py\", line 1510, in _handle_dbapi_exception\r\n util.raise_(\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/util/compat.py\", line 182, in raise_\r\n raise exception\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/engine/base.py\", line 1276, in _execute_context\r\n self.dialect.do_execute(\r\n File \"/opt/pysetup/.venv/lib/python3.8/site-packages/sqlalchemy/engine/default.py\", line 608, in do_execute\r\n cursor.execute(statement, parameters)\r\nsqlalchemy.exc.ProgrammingError: (psycopg2.errors.DatatypeMismatch) column \"font_weight\" is of type integer but expression is of type json[]\r\nLINE 1: ...e', 'Last Name', 'Sample Text', 14, 'Arial', CAST(ARRAY['{\"n...\r\n```\n", "before_files": [{"content": "\"\"\"empty message\n\nRevision ID: 8b5bc48e1d4c\nRevises: 21c79d253f21\nCreate Date: 2023-08-01 14:10:12.187180\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\nfrom sqlalchemy.dialects import postgresql\n\n# revision identifiers, used by Alembic.\nrevision = '8b5bc48e1d4c'\ndown_revision = '21c79d253f21'\n\n\ndef upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.alter_column('badge_field_forms', 'font_weight',\n existing_type=sa.TEXT(),\n type_=postgresql.ARRAY(sa.JSON()),\n postgresql_using='font_weight::json[]',\n existing_nullable=True)\n # ### end Alembic commands ###\n\n\ndef downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.alter_column('badge_field_forms', 'font_weight',\n existing_type=postgresql.ARRAY(sa.JSON()),\n type_=sa.TEXT(),\n existing_nullable=True)\n # ### end Alembic commands ###\n", "path": "migrations/versions/rev-2023-08-01-14:10:12-8b5bc48e1d4c_.py"}], "after_files": [{"content": "\"\"\"empty message\n\nRevision ID: 8b5bc48e1d4c\nRevises: 21c79d253f21\nCreate Date: 2023-08-01 14:10:12.187180\n\n\"\"\"\n\nfrom alembic import op\nimport sqlalchemy as sa\nfrom sqlalchemy.dialects import postgresql\n\n# revision identifiers, used by Alembic.\nrevision = '8b5bc48e1d4c'\ndown_revision = '21c79d253f21'\n\n\ndef upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.drop_column('badge_field_forms', 'font_weight')\n op.add_column('badge_field_forms', sa.Column('font_weight',\n postgresql.ARRAY(sa.JSON()), nullable=True))\n # ### end Alembic commands ###\n\n\ndef downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.drop_column('badge_field_forms', 'font_weight')\n op.add_column('badge_field_forms', sa.Column('font_weight',\n sa.Integer(), nullable=True))\n # ### end Alembic commands ###\n", "path": "migrations/versions/rev-2023-08-01-14:10:12-8b5bc48e1d4c_.py"}]} | 1,619 | 412 |
gh_patches_debug_4684 | rasdani/github-patches | git_diff | pwr-Solaar__Solaar-1026 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
metainfo file installed in the wrong place
**Information**
- Solaar version (`solaar --version` or `git describe --tags` if cloned from this repository): 1.0.4-57-g69f889e
- Distribution: Fedora
- Kernel version (ex. `uname -srmo`): N/A
- Output of `solaar show`: N/A
**Describe the bug**
The `metainfo.xml` file gets installed into the wrong location, i.e. `/usr/share/metainfo/io.github.pwr_solaar.solaar.metainfo.xml/metainfo.xml`.
**To Reproduce**
Steps to reproduce the behavior (this is part of RPM package build process, hence the `--root xxx` option):
```
...
/usr/bin/python3 setup.py install -O1 --skip-build --root /builddir/build/BUILDROOT/solaar-1.0.4-3.fc33.x86_64
...
running install_data
creating /builddir/build/BUILDROOT/solaar-1.0.4-3.fc33.x86_64/usr/share
...
creating /builddir/build/BUILDROOT/solaar-1.0.4-3.fc33.x86_64/usr/share/metainfo
creating /builddir/build/BUILDROOT/solaar-1.0.4-3.fc33.x86_64/usr/share/metainfo/io.github.pwr_solaar.solaar.metainfo.xml
copying share/solaar/metainfo.xml -> /builddir/build/BUILDROOT/solaar-1.0.4-3.fc33.x86_64/usr/share/metainfo/io.github.pwr_solaar.solaar.metainfo.xml
...
```
**Screenshots**
N/A
**Additional context**
The correct location is: `/usr/share/metainfo/io.github.pwr_solaar.solaar.metainfo.xml` (a file under `/usr/share/metainfo`, not a directory).
The solution is to rename `metainfo.xml` to `io.github.pwr_solaar.solaar.metainfo.xml` and install it under `/usr/share/metainfo` in `setup.py`. I'll send a PR shortly.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python3
2
3 from glob import glob as _glob
4
5 try:
6 from setuptools import setup
7 except ImportError:
8 from distutils.core import setup
9
10 # from solaar import NAME, __version__
11 __version__ = '1.0.4'
12 NAME = 'Solaar'
13
14
15 def _data_files():
16 from os.path import dirname as _dirname
17
18 yield 'share/solaar/icons', _glob('share/solaar/icons/solaar*.svg')
19 yield 'share/solaar/icons', _glob('share/solaar/icons/light_*.png')
20 yield 'share/icons/hicolor/scalable/apps', ['share/solaar/icons/solaar.svg']
21
22 for mo in _glob('share/locale/*/LC_MESSAGES/solaar.mo'):
23 yield _dirname(mo), [mo]
24
25 yield 'share/applications', ['share/applications/solaar.desktop']
26 yield 'share/solaar/udev-rules.d', ['rules.d/42-logitech-unify-permissions.rules']
27 yield 'share/metainfo/io.github.pwr_solaar.solaar.metainfo.xml', ['share/solaar/metainfo.xml']
28
29 del _dirname
30
31
32 setup(
33 name=NAME.lower(),
34 version=__version__,
35 description='Linux devices manager for the Logitech Unifying Receiver.',
36 long_description='''
37 Solaar is a Linux device manager for Logitech's Unifying Receiver peripherals.
38 It is able to pair/unpair devices with the receiver, for many devices show
39 battery status, and show and modify some of the modifiable features of devices.
40 '''.strip(),
41 author='Daniel Pavel',
42 license='GPLv2',
43 url='http://pwr-solaar.github.io/Solaar/',
44 classifiers=[
45 'Development Status :: 4 - Beta',
46 'Environment :: X11 Applications :: GTK',
47 'Environment :: Console',
48 'Intended Audience :: End Users/Desktop',
49 'License :: DFSG approved',
50 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
51 'Natural Language :: English',
52 'Programming Language :: Python :: 3 :: Only',
53 'Operating System :: POSIX :: Linux',
54 'Topic :: Utilities',
55 ],
56 platforms=['linux'],
57
58 # sudo apt install python-gi python3-gi \
59 # gir1.2-gtk-3.0 gir1.2-notify-0.7 gir1.2-ayatanaappindicator3-0.1
60 # os_requires=['gi.repository.GObject (>= 2.0)', 'gi.repository.Gtk (>= 3.0)'],
61 python_requires='>=3.6',
62 install_requires=[
63 'pyudev (>= 0.13)',
64 'PyYAML (>= 5.1)',
65 'python-xlib (>= 0.27)',
66 'psutil (>= 5.6.0)',
67 ],
68 package_dir={'': 'lib'},
69 packages=['hidapi', 'logitech_receiver', 'solaar', 'solaar.ui', 'solaar.cli'],
70 data_files=list(_data_files()),
71 scripts=_glob('bin/*'),
72 )
73
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -24,7 +24,7 @@
yield 'share/applications', ['share/applications/solaar.desktop']
yield 'share/solaar/udev-rules.d', ['rules.d/42-logitech-unify-permissions.rules']
- yield 'share/metainfo/io.github.pwr_solaar.solaar.metainfo.xml', ['share/solaar/metainfo.xml']
+ yield 'share/metainfo', ['share/solaar/io.github.pwr_solaar.solaar.metainfo.xml']
del _dirname
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -24,7 +24,7 @@\n \n yield 'share/applications', ['share/applications/solaar.desktop']\n yield 'share/solaar/udev-rules.d', ['rules.d/42-logitech-unify-permissions.rules']\n- yield 'share/metainfo/io.github.pwr_solaar.solaar.metainfo.xml', ['share/solaar/metainfo.xml']\n+ yield 'share/metainfo', ['share/solaar/io.github.pwr_solaar.solaar.metainfo.xml']\n \n del _dirname\n", "issue": "metainfo file installed in the wrong place\n**Information**\r\n- Solaar version (`solaar --version` or `git describe --tags` if cloned from this repository): 1.0.4-57-g69f889e\r\n- Distribution: Fedora\r\n- Kernel version (ex. `uname -srmo`): N/A\r\n- Output of `solaar show`: N/A\r\n\r\n**Describe the bug**\r\nThe `metainfo.xml` file gets installed into the wrong location, i.e. `/usr/share/metainfo/io.github.pwr_solaar.solaar.metainfo.xml/metainfo.xml`.\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior (this is part of RPM package build process, hence the `--root xxx` option):\r\n```\r\n...\r\n/usr/bin/python3 setup.py install -O1 --skip-build --root /builddir/build/BUILDROOT/solaar-1.0.4-3.fc33.x86_64\r\n...\r\nrunning install_data\r\ncreating /builddir/build/BUILDROOT/solaar-1.0.4-3.fc33.x86_64/usr/share\r\n...\r\ncreating /builddir/build/BUILDROOT/solaar-1.0.4-3.fc33.x86_64/usr/share/metainfo\r\ncreating /builddir/build/BUILDROOT/solaar-1.0.4-3.fc33.x86_64/usr/share/metainfo/io.github.pwr_solaar.solaar.metainfo.xml\r\ncopying share/solaar/metainfo.xml -> /builddir/build/BUILDROOT/solaar-1.0.4-3.fc33.x86_64/usr/share/metainfo/io.github.pwr_solaar.solaar.metainfo.xml\r\n...\r\n```\r\n\r\n**Screenshots**\r\nN/A\r\n\r\n**Additional context**\r\nThe correct location is: `/usr/share/metainfo/io.github.pwr_solaar.solaar.metainfo.xml` (a file under `/usr/share/metainfo`, not a directory).\r\nThe solution is to rename `metainfo.xml` to `io.github.pwr_solaar.solaar.metainfo.xml` and install it under `/usr/share/metainfo` in `setup.py`. I'll send a PR shortly.\r\n\n", "before_files": [{"content": "#!/usr/bin/env python3\n\nfrom glob import glob as _glob\n\ntry:\n from setuptools import setup\nexcept ImportError:\n from distutils.core import setup\n\n# from solaar import NAME, __version__\n__version__ = '1.0.4'\nNAME = 'Solaar'\n\n\ndef _data_files():\n from os.path import dirname as _dirname\n\n yield 'share/solaar/icons', _glob('share/solaar/icons/solaar*.svg')\n yield 'share/solaar/icons', _glob('share/solaar/icons/light_*.png')\n yield 'share/icons/hicolor/scalable/apps', ['share/solaar/icons/solaar.svg']\n\n for mo in _glob('share/locale/*/LC_MESSAGES/solaar.mo'):\n yield _dirname(mo), [mo]\n\n yield 'share/applications', ['share/applications/solaar.desktop']\n yield 'share/solaar/udev-rules.d', ['rules.d/42-logitech-unify-permissions.rules']\n yield 'share/metainfo/io.github.pwr_solaar.solaar.metainfo.xml', ['share/solaar/metainfo.xml']\n\n del _dirname\n\n\nsetup(\n name=NAME.lower(),\n version=__version__,\n description='Linux devices manager for the Logitech Unifying Receiver.',\n long_description='''\nSolaar is a Linux device manager for Logitech's Unifying Receiver peripherals.\nIt is able to pair/unpair devices with the receiver, for many devices show\nbattery status, and show and modify some of the modifiable features of devices.\n'''.strip(),\n author='Daniel Pavel',\n license='GPLv2',\n url='http://pwr-solaar.github.io/Solaar/',\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Environment :: X11 Applications :: GTK',\n 'Environment :: Console',\n 'Intended Audience :: End Users/Desktop',\n 'License :: DFSG approved',\n 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',\n 'Natural Language :: English',\n 'Programming Language :: Python :: 3 :: Only',\n 'Operating System :: POSIX :: Linux',\n 'Topic :: Utilities',\n ],\n platforms=['linux'],\n\n # sudo apt install python-gi python3-gi \\\n # gir1.2-gtk-3.0 gir1.2-notify-0.7 gir1.2-ayatanaappindicator3-0.1\n # os_requires=['gi.repository.GObject (>= 2.0)', 'gi.repository.Gtk (>= 3.0)'],\n python_requires='>=3.6',\n install_requires=[\n 'pyudev (>= 0.13)',\n 'PyYAML (>= 5.1)',\n 'python-xlib (>= 0.27)',\n 'psutil (>= 5.6.0)',\n ],\n package_dir={'': 'lib'},\n packages=['hidapi', 'logitech_receiver', 'solaar', 'solaar.ui', 'solaar.cli'],\n data_files=list(_data_files()),\n scripts=_glob('bin/*'),\n)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\nfrom glob import glob as _glob\n\ntry:\n from setuptools import setup\nexcept ImportError:\n from distutils.core import setup\n\n# from solaar import NAME, __version__\n__version__ = '1.0.4'\nNAME = 'Solaar'\n\n\ndef _data_files():\n from os.path import dirname as _dirname\n\n yield 'share/solaar/icons', _glob('share/solaar/icons/solaar*.svg')\n yield 'share/solaar/icons', _glob('share/solaar/icons/light_*.png')\n yield 'share/icons/hicolor/scalable/apps', ['share/solaar/icons/solaar.svg']\n\n for mo in _glob('share/locale/*/LC_MESSAGES/solaar.mo'):\n yield _dirname(mo), [mo]\n\n yield 'share/applications', ['share/applications/solaar.desktop']\n yield 'share/solaar/udev-rules.d', ['rules.d/42-logitech-unify-permissions.rules']\n yield 'share/metainfo', ['share/solaar/io.github.pwr_solaar.solaar.metainfo.xml']\n\n del _dirname\n\n\nsetup(\n name=NAME.lower(),\n version=__version__,\n description='Linux devices manager for the Logitech Unifying Receiver.',\n long_description='''\nSolaar is a Linux device manager for Logitech's Unifying Receiver peripherals.\nIt is able to pair/unpair devices with the receiver, for many devices show\nbattery status, and show and modify some of the modifiable features of devices.\n'''.strip(),\n author='Daniel Pavel',\n license='GPLv2',\n url='http://pwr-solaar.github.io/Solaar/',\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Environment :: X11 Applications :: GTK',\n 'Environment :: Console',\n 'Intended Audience :: End Users/Desktop',\n 'License :: DFSG approved',\n 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',\n 'Natural Language :: English',\n 'Programming Language :: Python :: 3 :: Only',\n 'Operating System :: POSIX :: Linux',\n 'Topic :: Utilities',\n ],\n platforms=['linux'],\n\n # sudo apt install python-gi python3-gi \\\n # gir1.2-gtk-3.0 gir1.2-notify-0.7 gir1.2-ayatanaappindicator3-0.1\n # os_requires=['gi.repository.GObject (>= 2.0)', 'gi.repository.Gtk (>= 3.0)'],\n python_requires='>=3.6',\n install_requires=[\n 'pyudev (>= 0.13)',\n 'PyYAML (>= 5.1)',\n 'python-xlib (>= 0.27)',\n 'psutil (>= 5.6.0)',\n ],\n package_dir={'': 'lib'},\n packages=['hidapi', 'logitech_receiver', 'solaar', 'solaar.ui', 'solaar.cli'],\n data_files=list(_data_files()),\n scripts=_glob('bin/*'),\n)\n", "path": "setup.py"}]} | 1,587 | 144 |
gh_patches_debug_54801 | rasdani/github-patches | git_diff | certbot__certbot-2707 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
letsencrypt-apache on Gentoo should read /etc/conf.d/apache
In Gentoo, Define's are passed by the initscript as -D arguments read in /etc/conf.d/apache
LE seems to ignore that. As a result, since the "Listen 443" directive is inside IfDefine blocks, it is systematically overlooked by LE since it doesn't know about the active Define directives.
LE will therefore add a "Listen 443" temporary directive, which will cause apache to fail with a "could not bind to address 0.0.0.0:443" error. LE in turn will fail with "urn:acme:error:connection" since apache is not running during the challenge.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `letsencrypt-apache/letsencrypt_apache/constants.py`
Content:
```
1 """Apache plugin constants."""
2 import pkg_resources
3 from letsencrypt import le_util
4
5
6 CLI_DEFAULTS_DEBIAN = dict(
7 server_root="/etc/apache2",
8 vhost_root="/etc/apache2/sites-available",
9 vhost_files="*",
10 version_cmd=['apache2ctl', '-v'],
11 define_cmd=['apache2ctl', '-t', '-D', 'DUMP_RUN_CFG'],
12 restart_cmd=['apache2ctl', 'graceful'],
13 conftest_cmd=['apache2ctl', 'configtest'],
14 enmod="a2enmod",
15 dismod="a2dismod",
16 le_vhost_ext="-le-ssl.conf",
17 handle_mods=True,
18 handle_sites=True,
19 challenge_location="/etc/apache2",
20 MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
21 "letsencrypt_apache", "options-ssl-apache.conf")
22 )
23 CLI_DEFAULTS_CENTOS = dict(
24 server_root="/etc/httpd",
25 vhost_root="/etc/httpd/conf.d",
26 vhost_files="*.conf",
27 version_cmd=['apachectl', '-v'],
28 define_cmd=['apachectl', '-t', '-D', 'DUMP_RUN_CFG'],
29 restart_cmd=['apachectl', 'graceful'],
30 conftest_cmd=['apachectl', 'configtest'],
31 enmod=None,
32 dismod=None,
33 le_vhost_ext="-le-ssl.conf",
34 handle_mods=False,
35 handle_sites=False,
36 challenge_location="/etc/httpd/conf.d",
37 MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
38 "letsencrypt_apache", "centos-options-ssl-apache.conf")
39 )
40 CLI_DEFAULTS_GENTOO = dict(
41 server_root="/etc/apache2",
42 vhost_root="/etc/apache2/vhosts.d",
43 vhost_files="*.conf",
44 version_cmd=['/usr/sbin/apache2', '-v'],
45 define_cmd=['/usr/sbin/apache2', '-t', '-D', 'DUMP_RUN_CFG'],
46 restart_cmd=['apache2ctl', 'graceful'],
47 conftest_cmd=['apache2ctl', 'configtest'],
48 enmod=None,
49 dismod=None,
50 le_vhost_ext="-le-ssl.conf",
51 handle_mods=False,
52 handle_sites=False,
53 challenge_location="/etc/apache2/vhosts.d",
54 MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
55 "letsencrypt_apache", "options-ssl-apache.conf")
56 )
57 CLI_DEFAULTS_DARWIN = dict(
58 server_root="/etc/apache2",
59 vhost_root="/etc/apache2/other",
60 vhost_files="*.conf",
61 version_cmd=['/usr/sbin/httpd', '-v'],
62 define_cmd=['/usr/sbin/httpd', '-t', '-D', 'DUMP_RUN_CFG'],
63 restart_cmd=['apachectl', 'graceful'],
64 conftest_cmd=['apachectl', 'configtest'],
65 enmod=None,
66 dismod=None,
67 le_vhost_ext="-le-ssl.conf",
68 handle_mods=False,
69 handle_sites=False,
70 challenge_location="/etc/apache2/other",
71 MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
72 "letsencrypt_apache", "options-ssl-apache.conf")
73 )
74 CLI_DEFAULTS = {
75 "debian": CLI_DEFAULTS_DEBIAN,
76 "ubuntu": CLI_DEFAULTS_DEBIAN,
77 "centos": CLI_DEFAULTS_CENTOS,
78 "centos linux": CLI_DEFAULTS_CENTOS,
79 "fedora": CLI_DEFAULTS_CENTOS,
80 "red hat enterprise linux server": CLI_DEFAULTS_CENTOS,
81 "gentoo base system": CLI_DEFAULTS_GENTOO,
82 "darwin": CLI_DEFAULTS_DARWIN,
83 }
84 """CLI defaults."""
85
86 MOD_SSL_CONF_DEST = "options-ssl-apache.conf"
87 """Name of the mod_ssl config file as saved in `IConfig.config_dir`."""
88
89 AUGEAS_LENS_DIR = pkg_resources.resource_filename(
90 "letsencrypt_apache", "augeas_lens")
91 """Path to the Augeas lens directory"""
92
93 REWRITE_HTTPS_ARGS = [
94 "^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[L,QSA,R=permanent]"]
95 """Apache version<2.3.9 rewrite rule arguments used for redirections to
96 https vhost"""
97
98 REWRITE_HTTPS_ARGS_WITH_END = [
99 "^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[END,QSA,R=permanent]"]
100 """Apache version >= 2.3.9 rewrite rule arguments used for redirections to
101 https vhost"""
102
103 HSTS_ARGS = ["always", "set", "Strict-Transport-Security",
104 "\"max-age=31536000\""]
105 """Apache header arguments for HSTS"""
106
107 UIR_ARGS = ["always", "set", "Content-Security-Policy",
108 "upgrade-insecure-requests"]
109
110 HEADER_ARGS = {"Strict-Transport-Security": HSTS_ARGS,
111 "Upgrade-Insecure-Requests": UIR_ARGS}
112
113
114 def os_constant(key):
115 """Get a constant value for operating system
116 :param key: name of cli constant
117 :return: value of constant for active os
118 """
119 os_info = le_util.get_os_info()
120 try:
121 constants = CLI_DEFAULTS[os_info[0].lower()]
122 except KeyError:
123 constants = CLI_DEFAULTS["debian"]
124 return constants[key]
125
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/letsencrypt-apache/letsencrypt_apache/constants.py b/letsencrypt-apache/letsencrypt_apache/constants.py
--- a/letsencrypt-apache/letsencrypt_apache/constants.py
+++ b/letsencrypt-apache/letsencrypt_apache/constants.py
@@ -42,7 +42,7 @@
vhost_root="/etc/apache2/vhosts.d",
vhost_files="*.conf",
version_cmd=['/usr/sbin/apache2', '-v'],
- define_cmd=['/usr/sbin/apache2', '-t', '-D', 'DUMP_RUN_CFG'],
+ define_cmd=['apache2ctl', 'virtualhosts'],
restart_cmd=['apache2ctl', 'graceful'],
conftest_cmd=['apache2ctl', 'configtest'],
enmod=None,
| {"golden_diff": "diff --git a/letsencrypt-apache/letsencrypt_apache/constants.py b/letsencrypt-apache/letsencrypt_apache/constants.py\n--- a/letsencrypt-apache/letsencrypt_apache/constants.py\n+++ b/letsencrypt-apache/letsencrypt_apache/constants.py\n@@ -42,7 +42,7 @@\n vhost_root=\"/etc/apache2/vhosts.d\",\n vhost_files=\"*.conf\",\n version_cmd=['/usr/sbin/apache2', '-v'],\n- define_cmd=['/usr/sbin/apache2', '-t', '-D', 'DUMP_RUN_CFG'],\n+ define_cmd=['apache2ctl', 'virtualhosts'],\n restart_cmd=['apache2ctl', 'graceful'],\n conftest_cmd=['apache2ctl', 'configtest'],\n enmod=None,\n", "issue": "letsencrypt-apache on Gentoo should read /etc/conf.d/apache\nIn Gentoo, Define's are passed by the initscript as -D arguments read in /etc/conf.d/apache\n\nLE seems to ignore that. As a result, since the \"Listen 443\" directive is inside IfDefine blocks, it is systematically overlooked by LE since it doesn't know about the active Define directives.\n\nLE will therefore add a \"Listen 443\" temporary directive, which will cause apache to fail with a \"could not bind to address 0.0.0.0:443\" error. LE in turn will fail with \"urn:acme:error:connection\" since apache is not running during the challenge.\n\n", "before_files": [{"content": "\"\"\"Apache plugin constants.\"\"\"\nimport pkg_resources\nfrom letsencrypt import le_util\n\n\nCLI_DEFAULTS_DEBIAN = dict(\n server_root=\"/etc/apache2\",\n vhost_root=\"/etc/apache2/sites-available\",\n vhost_files=\"*\",\n version_cmd=['apache2ctl', '-v'],\n define_cmd=['apache2ctl', '-t', '-D', 'DUMP_RUN_CFG'],\n restart_cmd=['apache2ctl', 'graceful'],\n conftest_cmd=['apache2ctl', 'configtest'],\n enmod=\"a2enmod\",\n dismod=\"a2dismod\",\n le_vhost_ext=\"-le-ssl.conf\",\n handle_mods=True,\n handle_sites=True,\n challenge_location=\"/etc/apache2\",\n MOD_SSL_CONF_SRC=pkg_resources.resource_filename(\n \"letsencrypt_apache\", \"options-ssl-apache.conf\")\n)\nCLI_DEFAULTS_CENTOS = dict(\n server_root=\"/etc/httpd\",\n vhost_root=\"/etc/httpd/conf.d\",\n vhost_files=\"*.conf\",\n version_cmd=['apachectl', '-v'],\n define_cmd=['apachectl', '-t', '-D', 'DUMP_RUN_CFG'],\n restart_cmd=['apachectl', 'graceful'],\n conftest_cmd=['apachectl', 'configtest'],\n enmod=None,\n dismod=None,\n le_vhost_ext=\"-le-ssl.conf\",\n handle_mods=False,\n handle_sites=False,\n challenge_location=\"/etc/httpd/conf.d\",\n MOD_SSL_CONF_SRC=pkg_resources.resource_filename(\n \"letsencrypt_apache\", \"centos-options-ssl-apache.conf\")\n)\nCLI_DEFAULTS_GENTOO = dict(\n server_root=\"/etc/apache2\",\n vhost_root=\"/etc/apache2/vhosts.d\",\n vhost_files=\"*.conf\",\n version_cmd=['/usr/sbin/apache2', '-v'],\n define_cmd=['/usr/sbin/apache2', '-t', '-D', 'DUMP_RUN_CFG'],\n restart_cmd=['apache2ctl', 'graceful'],\n conftest_cmd=['apache2ctl', 'configtest'],\n enmod=None,\n dismod=None,\n le_vhost_ext=\"-le-ssl.conf\",\n handle_mods=False,\n handle_sites=False,\n challenge_location=\"/etc/apache2/vhosts.d\",\n MOD_SSL_CONF_SRC=pkg_resources.resource_filename(\n \"letsencrypt_apache\", \"options-ssl-apache.conf\")\n)\nCLI_DEFAULTS_DARWIN = dict(\n server_root=\"/etc/apache2\",\n vhost_root=\"/etc/apache2/other\",\n vhost_files=\"*.conf\",\n version_cmd=['/usr/sbin/httpd', '-v'],\n define_cmd=['/usr/sbin/httpd', '-t', '-D', 'DUMP_RUN_CFG'],\n restart_cmd=['apachectl', 'graceful'],\n conftest_cmd=['apachectl', 'configtest'],\n enmod=None,\n dismod=None,\n le_vhost_ext=\"-le-ssl.conf\",\n handle_mods=False,\n handle_sites=False,\n challenge_location=\"/etc/apache2/other\",\n MOD_SSL_CONF_SRC=pkg_resources.resource_filename(\n \"letsencrypt_apache\", \"options-ssl-apache.conf\")\n)\nCLI_DEFAULTS = {\n \"debian\": CLI_DEFAULTS_DEBIAN,\n \"ubuntu\": CLI_DEFAULTS_DEBIAN,\n \"centos\": CLI_DEFAULTS_CENTOS,\n \"centos linux\": CLI_DEFAULTS_CENTOS,\n \"fedora\": CLI_DEFAULTS_CENTOS,\n \"red hat enterprise linux server\": CLI_DEFAULTS_CENTOS,\n \"gentoo base system\": CLI_DEFAULTS_GENTOO,\n \"darwin\": CLI_DEFAULTS_DARWIN,\n}\n\"\"\"CLI defaults.\"\"\"\n\nMOD_SSL_CONF_DEST = \"options-ssl-apache.conf\"\n\"\"\"Name of the mod_ssl config file as saved in `IConfig.config_dir`.\"\"\"\n\nAUGEAS_LENS_DIR = pkg_resources.resource_filename(\n \"letsencrypt_apache\", \"augeas_lens\")\n\"\"\"Path to the Augeas lens directory\"\"\"\n\nREWRITE_HTTPS_ARGS = [\n \"^\", \"https://%{SERVER_NAME}%{REQUEST_URI}\", \"[L,QSA,R=permanent]\"]\n\"\"\"Apache version<2.3.9 rewrite rule arguments used for redirections to\nhttps vhost\"\"\"\n\nREWRITE_HTTPS_ARGS_WITH_END = [\n \"^\", \"https://%{SERVER_NAME}%{REQUEST_URI}\", \"[END,QSA,R=permanent]\"]\n\"\"\"Apache version >= 2.3.9 rewrite rule arguments used for redirections to\n https vhost\"\"\"\n\nHSTS_ARGS = [\"always\", \"set\", \"Strict-Transport-Security\",\n \"\\\"max-age=31536000\\\"\"]\n\"\"\"Apache header arguments for HSTS\"\"\"\n\nUIR_ARGS = [\"always\", \"set\", \"Content-Security-Policy\",\n \"upgrade-insecure-requests\"]\n\nHEADER_ARGS = {\"Strict-Transport-Security\": HSTS_ARGS,\n \"Upgrade-Insecure-Requests\": UIR_ARGS}\n\n\ndef os_constant(key):\n \"\"\"Get a constant value for operating system\n :param key: name of cli constant\n :return: value of constant for active os\n \"\"\"\n os_info = le_util.get_os_info()\n try:\n constants = CLI_DEFAULTS[os_info[0].lower()]\n except KeyError:\n constants = CLI_DEFAULTS[\"debian\"]\n return constants[key]\n", "path": "letsencrypt-apache/letsencrypt_apache/constants.py"}], "after_files": [{"content": "\"\"\"Apache plugin constants.\"\"\"\nimport pkg_resources\nfrom letsencrypt import le_util\n\n\nCLI_DEFAULTS_DEBIAN = dict(\n server_root=\"/etc/apache2\",\n vhost_root=\"/etc/apache2/sites-available\",\n vhost_files=\"*\",\n version_cmd=['apache2ctl', '-v'],\n define_cmd=['apache2ctl', '-t', '-D', 'DUMP_RUN_CFG'],\n restart_cmd=['apache2ctl', 'graceful'],\n conftest_cmd=['apache2ctl', 'configtest'],\n enmod=\"a2enmod\",\n dismod=\"a2dismod\",\n le_vhost_ext=\"-le-ssl.conf\",\n handle_mods=True,\n handle_sites=True,\n challenge_location=\"/etc/apache2\",\n MOD_SSL_CONF_SRC=pkg_resources.resource_filename(\n \"letsencrypt_apache\", \"options-ssl-apache.conf\")\n)\nCLI_DEFAULTS_CENTOS = dict(\n server_root=\"/etc/httpd\",\n vhost_root=\"/etc/httpd/conf.d\",\n vhost_files=\"*.conf\",\n version_cmd=['apachectl', '-v'],\n define_cmd=['apachectl', '-t', '-D', 'DUMP_RUN_CFG'],\n restart_cmd=['apachectl', 'graceful'],\n conftest_cmd=['apachectl', 'configtest'],\n enmod=None,\n dismod=None,\n le_vhost_ext=\"-le-ssl.conf\",\n handle_mods=False,\n handle_sites=False,\n challenge_location=\"/etc/httpd/conf.d\",\n MOD_SSL_CONF_SRC=pkg_resources.resource_filename(\n \"letsencrypt_apache\", \"centos-options-ssl-apache.conf\")\n)\nCLI_DEFAULTS_GENTOO = dict(\n server_root=\"/etc/apache2\",\n vhost_root=\"/etc/apache2/vhosts.d\",\n vhost_files=\"*.conf\",\n version_cmd=['/usr/sbin/apache2', '-v'],\n define_cmd=['apache2ctl', 'virtualhosts'],\n restart_cmd=['apache2ctl', 'graceful'],\n conftest_cmd=['apache2ctl', 'configtest'],\n enmod=None,\n dismod=None,\n le_vhost_ext=\"-le-ssl.conf\",\n handle_mods=False,\n handle_sites=False,\n challenge_location=\"/etc/apache2/vhosts.d\",\n MOD_SSL_CONF_SRC=pkg_resources.resource_filename(\n \"letsencrypt_apache\", \"options-ssl-apache.conf\")\n)\nCLI_DEFAULTS_DARWIN = dict(\n server_root=\"/etc/apache2\",\n vhost_root=\"/etc/apache2/other\",\n vhost_files=\"*.conf\",\n version_cmd=['/usr/sbin/httpd', '-v'],\n define_cmd=['/usr/sbin/httpd', '-t', '-D', 'DUMP_RUN_CFG'],\n restart_cmd=['apachectl', 'graceful'],\n conftest_cmd=['apachectl', 'configtest'],\n enmod=None,\n dismod=None,\n le_vhost_ext=\"-le-ssl.conf\",\n handle_mods=False,\n handle_sites=False,\n challenge_location=\"/etc/apache2/other\",\n MOD_SSL_CONF_SRC=pkg_resources.resource_filename(\n \"letsencrypt_apache\", \"options-ssl-apache.conf\")\n)\nCLI_DEFAULTS = {\n \"debian\": CLI_DEFAULTS_DEBIAN,\n \"ubuntu\": CLI_DEFAULTS_DEBIAN,\n \"centos\": CLI_DEFAULTS_CENTOS,\n \"centos linux\": CLI_DEFAULTS_CENTOS,\n \"fedora\": CLI_DEFAULTS_CENTOS,\n \"red hat enterprise linux server\": CLI_DEFAULTS_CENTOS,\n \"gentoo base system\": CLI_DEFAULTS_GENTOO,\n \"darwin\": CLI_DEFAULTS_DARWIN,\n}\n\"\"\"CLI defaults.\"\"\"\n\nMOD_SSL_CONF_DEST = \"options-ssl-apache.conf\"\n\"\"\"Name of the mod_ssl config file as saved in `IConfig.config_dir`.\"\"\"\n\nAUGEAS_LENS_DIR = pkg_resources.resource_filename(\n \"letsencrypt_apache\", \"augeas_lens\")\n\"\"\"Path to the Augeas lens directory\"\"\"\n\nREWRITE_HTTPS_ARGS = [\n \"^\", \"https://%{SERVER_NAME}%{REQUEST_URI}\", \"[L,QSA,R=permanent]\"]\n\"\"\"Apache version<2.3.9 rewrite rule arguments used for redirections to\nhttps vhost\"\"\"\n\nREWRITE_HTTPS_ARGS_WITH_END = [\n \"^\", \"https://%{SERVER_NAME}%{REQUEST_URI}\", \"[END,QSA,R=permanent]\"]\n\"\"\"Apache version >= 2.3.9 rewrite rule arguments used for redirections to\n https vhost\"\"\"\n\nHSTS_ARGS = [\"always\", \"set\", \"Strict-Transport-Security\",\n \"\\\"max-age=31536000\\\"\"]\n\"\"\"Apache header arguments for HSTS\"\"\"\n\nUIR_ARGS = [\"always\", \"set\", \"Content-Security-Policy\",\n \"upgrade-insecure-requests\"]\n\nHEADER_ARGS = {\"Strict-Transport-Security\": HSTS_ARGS,\n \"Upgrade-Insecure-Requests\": UIR_ARGS}\n\n\ndef os_constant(key):\n \"\"\"Get a constant value for operating system\n :param key: name of cli constant\n :return: value of constant for active os\n \"\"\"\n os_info = le_util.get_os_info()\n try:\n constants = CLI_DEFAULTS[os_info[0].lower()]\n except KeyError:\n constants = CLI_DEFAULTS[\"debian\"]\n return constants[key]\n", "path": "letsencrypt-apache/letsencrypt_apache/constants.py"}]} | 1,836 | 173 |
gh_patches_debug_17795 | rasdani/github-patches | git_diff | goauthentik__authentik-5727 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
OCI Registry Blueprint port ignored
**Describe the bug**
When I try to load blueprints from a registry running on a custom port (e.g. port 5050) the connection fails.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to `Costomization > Blueprints`
2. Create a new `OCI Registry` Blueprint with a non default Port
3. Example `oci://larsl.dev:5050/larsl-net/authentik-config/blueprints/larsl-stages-base:latest`
4. A connection error occurs
**Expected behavior**
authentik connects on the port specified in the URL (5050). What happens according to the error message is that authentik uses port 443.
**Logs**
```
HTTPSConnectionPool(host='larsl.dev', port=443): Max retries exceeded with url: /v2/larsl-net/authentik-config/blueprints/larsl-stages-base/manifests/latest (Caused by NewConnectionError('<urllib3.connection.HTTPSConnection object at 0x7f2e26efa690>: Failed to establish a new connection: [Errno 111] Connection refused'))
```
**Version and Deployment (please complete the following information):**
- authentik version: 2023.5.1
- Deployment: docker-compose
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `authentik/blueprints/v1/oci.py`
Content:
```
1 """OCI Client"""
2 from typing import Any
3 from urllib.parse import ParseResult, urlparse
4
5 from opencontainers.distribution.reggie import (
6 NewClient,
7 WithDebug,
8 WithDefaultName,
9 WithDigest,
10 WithReference,
11 WithUserAgent,
12 WithUsernamePassword,
13 )
14 from requests.exceptions import RequestException
15 from structlog import get_logger
16 from structlog.stdlib import BoundLogger
17
18 from authentik.lib.sentry import SentryIgnoredException
19 from authentik.lib.utils.http import authentik_user_agent
20
21 OCI_MEDIA_TYPE = "application/vnd.goauthentik.blueprint.v1+yaml"
22
23
24 class OCIException(SentryIgnoredException):
25 """OCI-related errors"""
26
27
28 class BlueprintOCIClient:
29 """Blueprint OCI Client"""
30
31 url: ParseResult
32 sanitized_url: str
33 logger: BoundLogger
34 ref: str
35 client: NewClient
36
37 def __init__(self, url: str) -> None:
38 self._parse_url(url)
39 self.logger = get_logger().bind(url=self.sanitized_url)
40
41 self.ref = "latest"
42 path = self.url.path[1:]
43 if ":" in self.url.path:
44 path, _, self.ref = path.partition(":")
45 self.client = NewClient(
46 f"https://{self.url.hostname}",
47 WithUserAgent(authentik_user_agent()),
48 WithUsernamePassword(self.url.username, self.url.password),
49 WithDefaultName(path),
50 WithDebug(True),
51 )
52
53 def _parse_url(self, url: str):
54 self.url = urlparse(url)
55 netloc = self.url.netloc
56 if "@" in netloc:
57 netloc = netloc[netloc.index("@") + 1 :]
58 self.sanitized_url = self.url._replace(netloc=netloc).geturl()
59
60 def fetch_manifests(self) -> dict[str, Any]:
61 """Fetch manifests for ref"""
62 self.logger.info("Fetching OCI manifests for blueprint")
63 manifest_request = self.client.NewRequest(
64 "GET",
65 "/v2/<name>/manifests/<reference>",
66 WithReference(self.ref),
67 ).SetHeader("Accept", "application/vnd.oci.image.manifest.v1+json")
68 try:
69 manifest_response = self.client.Do(manifest_request)
70 manifest_response.raise_for_status()
71 except RequestException as exc:
72 raise OCIException(exc) from exc
73 manifest = manifest_response.json()
74 if "errors" in manifest:
75 raise OCIException(manifest["errors"])
76 return manifest
77
78 def fetch_blobs(self, manifest: dict[str, Any]):
79 """Fetch blob based on manifest info"""
80 blob = None
81 for layer in manifest.get("layers", []):
82 if layer.get("mediaType", "") == OCI_MEDIA_TYPE:
83 blob = layer.get("digest")
84 self.logger.debug("Found layer with matching media type", blob=blob)
85 if not blob:
86 raise OCIException("Blob not found")
87
88 blob_request = self.client.NewRequest(
89 "GET",
90 "/v2/<name>/blobs/<digest>",
91 WithDigest(blob),
92 )
93 try:
94 blob_response = self.client.Do(blob_request)
95 blob_response.raise_for_status()
96 return blob_response.text
97 except RequestException as exc:
98 raise OCIException(exc) from exc
99
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/authentik/blueprints/v1/oci.py b/authentik/blueprints/v1/oci.py
--- a/authentik/blueprints/v1/oci.py
+++ b/authentik/blueprints/v1/oci.py
@@ -39,11 +39,16 @@
self.logger = get_logger().bind(url=self.sanitized_url)
self.ref = "latest"
+ # Remove the leading slash of the path to convert it to an image name
path = self.url.path[1:]
- if ":" in self.url.path:
+ if ":" in path:
+ # if there's a colon in the path, use everything after it as a ref
path, _, self.ref = path.partition(":")
+ base_url = f"https://{self.url.hostname}"
+ if self.url.port:
+ base_url += f":{self.url.port}"
self.client = NewClient(
- f"https://{self.url.hostname}",
+ base_url,
WithUserAgent(authentik_user_agent()),
WithUsernamePassword(self.url.username, self.url.password),
WithDefaultName(path),
| {"golden_diff": "diff --git a/authentik/blueprints/v1/oci.py b/authentik/blueprints/v1/oci.py\n--- a/authentik/blueprints/v1/oci.py\n+++ b/authentik/blueprints/v1/oci.py\n@@ -39,11 +39,16 @@\n self.logger = get_logger().bind(url=self.sanitized_url)\n \n self.ref = \"latest\"\n+ # Remove the leading slash of the path to convert it to an image name\n path = self.url.path[1:]\n- if \":\" in self.url.path:\n+ if \":\" in path:\n+ # if there's a colon in the path, use everything after it as a ref\n path, _, self.ref = path.partition(\":\")\n+ base_url = f\"https://{self.url.hostname}\"\n+ if self.url.port:\n+ base_url += f\":{self.url.port}\"\n self.client = NewClient(\n- f\"https://{self.url.hostname}\",\n+ base_url,\n WithUserAgent(authentik_user_agent()),\n WithUsernamePassword(self.url.username, self.url.password),\n WithDefaultName(path),\n", "issue": "OCI Registry Blueprint port ignored\n**Describe the bug**\r\nWhen I try to load blueprints from a registry running on a custom port (e.g. port 5050) the connection fails.\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n\r\n1. Go to `Costomization > Blueprints`\r\n2. Create a new `OCI Registry` Blueprint with a non default Port\r\n3. Example `oci://larsl.dev:5050/larsl-net/authentik-config/blueprints/larsl-stages-base:latest`\r\n4. A connection error occurs\r\n\r\n**Expected behavior**\r\nauthentik connects on the port specified in the URL (5050). What happens according to the error message is that authentik uses port 443.\r\n\r\n**Logs**\r\n```\r\nHTTPSConnectionPool(host='larsl.dev', port=443): Max retries exceeded with url: /v2/larsl-net/authentik-config/blueprints/larsl-stages-base/manifests/latest (Caused by NewConnectionError('<urllib3.connection.HTTPSConnection object at 0x7f2e26efa690>: Failed to establish a new connection: [Errno 111] Connection refused'))\r\n```\r\n\r\n**Version and Deployment (please complete the following information):**\r\n\r\n- authentik version: 2023.5.1\r\n- Deployment: docker-compose\r\n\n", "before_files": [{"content": "\"\"\"OCI Client\"\"\"\nfrom typing import Any\nfrom urllib.parse import ParseResult, urlparse\n\nfrom opencontainers.distribution.reggie import (\n NewClient,\n WithDebug,\n WithDefaultName,\n WithDigest,\n WithReference,\n WithUserAgent,\n WithUsernamePassword,\n)\nfrom requests.exceptions import RequestException\nfrom structlog import get_logger\nfrom structlog.stdlib import BoundLogger\n\nfrom authentik.lib.sentry import SentryIgnoredException\nfrom authentik.lib.utils.http import authentik_user_agent\n\nOCI_MEDIA_TYPE = \"application/vnd.goauthentik.blueprint.v1+yaml\"\n\n\nclass OCIException(SentryIgnoredException):\n \"\"\"OCI-related errors\"\"\"\n\n\nclass BlueprintOCIClient:\n \"\"\"Blueprint OCI Client\"\"\"\n\n url: ParseResult\n sanitized_url: str\n logger: BoundLogger\n ref: str\n client: NewClient\n\n def __init__(self, url: str) -> None:\n self._parse_url(url)\n self.logger = get_logger().bind(url=self.sanitized_url)\n\n self.ref = \"latest\"\n path = self.url.path[1:]\n if \":\" in self.url.path:\n path, _, self.ref = path.partition(\":\")\n self.client = NewClient(\n f\"https://{self.url.hostname}\",\n WithUserAgent(authentik_user_agent()),\n WithUsernamePassword(self.url.username, self.url.password),\n WithDefaultName(path),\n WithDebug(True),\n )\n\n def _parse_url(self, url: str):\n self.url = urlparse(url)\n netloc = self.url.netloc\n if \"@\" in netloc:\n netloc = netloc[netloc.index(\"@\") + 1 :]\n self.sanitized_url = self.url._replace(netloc=netloc).geturl()\n\n def fetch_manifests(self) -> dict[str, Any]:\n \"\"\"Fetch manifests for ref\"\"\"\n self.logger.info(\"Fetching OCI manifests for blueprint\")\n manifest_request = self.client.NewRequest(\n \"GET\",\n \"/v2/<name>/manifests/<reference>\",\n WithReference(self.ref),\n ).SetHeader(\"Accept\", \"application/vnd.oci.image.manifest.v1+json\")\n try:\n manifest_response = self.client.Do(manifest_request)\n manifest_response.raise_for_status()\n except RequestException as exc:\n raise OCIException(exc) from exc\n manifest = manifest_response.json()\n if \"errors\" in manifest:\n raise OCIException(manifest[\"errors\"])\n return manifest\n\n def fetch_blobs(self, manifest: dict[str, Any]):\n \"\"\"Fetch blob based on manifest info\"\"\"\n blob = None\n for layer in manifest.get(\"layers\", []):\n if layer.get(\"mediaType\", \"\") == OCI_MEDIA_TYPE:\n blob = layer.get(\"digest\")\n self.logger.debug(\"Found layer with matching media type\", blob=blob)\n if not blob:\n raise OCIException(\"Blob not found\")\n\n blob_request = self.client.NewRequest(\n \"GET\",\n \"/v2/<name>/blobs/<digest>\",\n WithDigest(blob),\n )\n try:\n blob_response = self.client.Do(blob_request)\n blob_response.raise_for_status()\n return blob_response.text\n except RequestException as exc:\n raise OCIException(exc) from exc\n", "path": "authentik/blueprints/v1/oci.py"}], "after_files": [{"content": "\"\"\"OCI Client\"\"\"\nfrom typing import Any\nfrom urllib.parse import ParseResult, urlparse\n\nfrom opencontainers.distribution.reggie import (\n NewClient,\n WithDebug,\n WithDefaultName,\n WithDigest,\n WithReference,\n WithUserAgent,\n WithUsernamePassword,\n)\nfrom requests.exceptions import RequestException\nfrom structlog import get_logger\nfrom structlog.stdlib import BoundLogger\n\nfrom authentik.lib.sentry import SentryIgnoredException\nfrom authentik.lib.utils.http import authentik_user_agent\n\nOCI_MEDIA_TYPE = \"application/vnd.goauthentik.blueprint.v1+yaml\"\n\n\nclass OCIException(SentryIgnoredException):\n \"\"\"OCI-related errors\"\"\"\n\n\nclass BlueprintOCIClient:\n \"\"\"Blueprint OCI Client\"\"\"\n\n url: ParseResult\n sanitized_url: str\n logger: BoundLogger\n ref: str\n client: NewClient\n\n def __init__(self, url: str) -> None:\n self._parse_url(url)\n self.logger = get_logger().bind(url=self.sanitized_url)\n\n self.ref = \"latest\"\n # Remove the leading slash of the path to convert it to an image name\n path = self.url.path[1:]\n if \":\" in path:\n # if there's a colon in the path, use everything after it as a ref\n path, _, self.ref = path.partition(\":\")\n base_url = f\"https://{self.url.hostname}\"\n if self.url.port:\n base_url += f\":{self.url.port}\"\n self.client = NewClient(\n base_url,\n WithUserAgent(authentik_user_agent()),\n WithUsernamePassword(self.url.username, self.url.password),\n WithDefaultName(path),\n WithDebug(True),\n )\n\n def _parse_url(self, url: str):\n self.url = urlparse(url)\n netloc = self.url.netloc\n if \"@\" in netloc:\n netloc = netloc[netloc.index(\"@\") + 1 :]\n self.sanitized_url = self.url._replace(netloc=netloc).geturl()\n\n def fetch_manifests(self) -> dict[str, Any]:\n \"\"\"Fetch manifests for ref\"\"\"\n self.logger.info(\"Fetching OCI manifests for blueprint\")\n manifest_request = self.client.NewRequest(\n \"GET\",\n \"/v2/<name>/manifests/<reference>\",\n WithReference(self.ref),\n ).SetHeader(\"Accept\", \"application/vnd.oci.image.manifest.v1+json\")\n try:\n manifest_response = self.client.Do(manifest_request)\n manifest_response.raise_for_status()\n except RequestException as exc:\n raise OCIException(exc) from exc\n manifest = manifest_response.json()\n if \"errors\" in manifest:\n raise OCIException(manifest[\"errors\"])\n return manifest\n\n def fetch_blobs(self, manifest: dict[str, Any]):\n \"\"\"Fetch blob based on manifest info\"\"\"\n blob = None\n for layer in manifest.get(\"layers\", []):\n if layer.get(\"mediaType\", \"\") == OCI_MEDIA_TYPE:\n blob = layer.get(\"digest\")\n self.logger.debug(\"Found layer with matching media type\", blob=blob)\n if not blob:\n raise OCIException(\"Blob not found\")\n\n blob_request = self.client.NewRequest(\n \"GET\",\n \"/v2/<name>/blobs/<digest>\",\n WithDigest(blob),\n )\n try:\n blob_response = self.client.Do(blob_request)\n blob_response.raise_for_status()\n return blob_response.text\n except RequestException as exc:\n raise OCIException(exc) from exc\n", "path": "authentik/blueprints/v1/oci.py"}]} | 1,450 | 243 |
gh_patches_debug_24709 | rasdani/github-patches | git_diff | HypothesisWorks__hypothesis-1535 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add compat with pytest-capturelog for note()-style logging capture?
Using hypothesis with code that has logging statements makes it very difficult to use those log calls to debug.
Having the ability to have logging output captured in the style of `note()` would be extremely useful. The [pytest-capturelog](https://pypi.python.org/pypi/pytest-capturelog) plugin collects the logging output into the test failure message. It would be really nice to have some kind of cross-compatibility with them so that it can group captured logs by example rather than at the test-function level
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `hypothesis-python/src/hypothesis/control.py`
Content:
```
1 # coding=utf-8
2 #
3 # This file is part of Hypothesis, which may be found at
4 # https://github.com/HypothesisWorks/hypothesis-python
5 #
6 # Most of this work is copyright (C) 2013-2018 David R. MacIver
7 # ([email protected]), but it contains contributions by others. See
8 # CONTRIBUTING.rst for a full list of people who may hold copyright, and
9 # consult the git log if you need to determine who owns an individual
10 # contribution.
11 #
12 # This Source Code Form is subject to the terms of the Mozilla Public License,
13 # v. 2.0. If a copy of the MPL was not distributed with this file, You can
14 # obtain one at http://mozilla.org/MPL/2.0/.
15 #
16 # END HEADER
17
18 from __future__ import division, print_function, absolute_import
19
20 import traceback
21
22 from hypothesis import Verbosity, settings
23 from hypothesis.errors import CleanupFailed, InvalidArgument, \
24 UnsatisfiedAssumption
25 from hypothesis.reporting import report
26 from hypothesis.utils.dynamicvariables import DynamicVariable
27
28 if False:
29 from typing import Any, AnyStr # noqa
30
31
32 def reject():
33 raise UnsatisfiedAssumption()
34
35
36 def assume(condition):
37 # type: (Any) -> bool
38 """Calling ``assume`` is like an :ref:`assert <python:assert>` that marks
39 the example as bad, rather than failing the test.
40
41 This allows you to specify properties that you *assume* will be
42 true, and let Hypothesis try to avoid similar examples in future.
43 """
44 if not condition:
45 raise UnsatisfiedAssumption()
46 return True
47
48
49 _current_build_context = DynamicVariable(None)
50
51
52 def current_build_context():
53 context = _current_build_context.value
54 if context is None:
55 raise InvalidArgument(
56 u'No build context registered')
57 return context
58
59
60 class BuildContext(object):
61
62 def __init__(self, data, is_final=False, close_on_capture=True):
63 self.data = data
64 self.tasks = []
65 self.is_final = is_final
66 self.close_on_capture = close_on_capture
67 self.close_on_del = False
68 self.notes = []
69
70 def __enter__(self):
71 self.assign_variable = _current_build_context.with_value(self)
72 self.assign_variable.__enter__()
73 return self
74
75 def __exit__(self, exc_type, exc_value, tb):
76 self.assign_variable.__exit__(exc_type, exc_value, tb)
77 if self.close() and exc_type is None:
78 raise CleanupFailed()
79
80 def local(self):
81 return _current_build_context.with_value(self)
82
83 def close(self):
84 any_failed = False
85 for task in self.tasks:
86 try:
87 task()
88 except BaseException:
89 any_failed = True
90 report(traceback.format_exc())
91 return any_failed
92
93
94 def cleanup(teardown):
95 """Register a function to be called when the current test has finished
96 executing. Any exceptions thrown in teardown will be printed but not
97 rethrown.
98
99 Inside a test this isn't very interesting, because you can just use
100 a finally block, but note that you can use this inside map, flatmap,
101 etc. in order to e.g. insist that a value is closed at the end.
102 """
103 context = _current_build_context.value
104 if context is None:
105 raise InvalidArgument(
106 u'Cannot register cleanup outside of build context')
107 context.tasks.append(teardown)
108
109
110 def note(value):
111 # type: (AnyStr) -> None
112 """Report this value in the final execution."""
113 context = _current_build_context.value
114 if context is None:
115 raise InvalidArgument(
116 'Cannot make notes outside of a test')
117 context.notes.append(value)
118 if context.is_final or settings.default.verbosity >= Verbosity.verbose:
119 report(value)
120
121
122 def event(value):
123 # type: (AnyStr) -> None
124 """Record an event that occurred this test. Statistics on number of test
125 runs with each event will be reported at the end if you run Hypothesis in
126 statistics reporting mode.
127
128 Events should be strings or convertible to them.
129 """
130 context = _current_build_context.value
131 if context is None:
132 raise InvalidArgument(
133 'Cannot make record events outside of a test')
134
135 if context.data is not None:
136 context.data.note_event(value)
137
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/hypothesis-python/src/hypothesis/control.py b/hypothesis-python/src/hypothesis/control.py
--- a/hypothesis-python/src/hypothesis/control.py
+++ b/hypothesis-python/src/hypothesis/control.py
@@ -17,6 +17,7 @@
from __future__ import division, print_function, absolute_import
+import logging
import traceback
from hypothesis import Verbosity, settings
@@ -66,14 +67,20 @@
self.close_on_capture = close_on_capture
self.close_on_del = False
self.notes = []
+ self.original_logging_disable = logging.NOTSET
def __enter__(self):
+ if not self.is_final:
+ self.original_logging_disable = logging.root.manager.disable
+ logging.disable(logging.CRITICAL)
self.assign_variable = _current_build_context.with_value(self)
self.assign_variable.__enter__()
return self
def __exit__(self, exc_type, exc_value, tb):
self.assign_variable.__exit__(exc_type, exc_value, tb)
+ if not self.is_final:
+ logging.disable(self.original_logging_disable)
if self.close() and exc_type is None:
raise CleanupFailed()
| {"golden_diff": "diff --git a/hypothesis-python/src/hypothesis/control.py b/hypothesis-python/src/hypothesis/control.py\n--- a/hypothesis-python/src/hypothesis/control.py\n+++ b/hypothesis-python/src/hypothesis/control.py\n@@ -17,6 +17,7 @@\n \n from __future__ import division, print_function, absolute_import\n \n+import logging\n import traceback\n \n from hypothesis import Verbosity, settings\n@@ -66,14 +67,20 @@\n self.close_on_capture = close_on_capture\n self.close_on_del = False\n self.notes = []\n+ self.original_logging_disable = logging.NOTSET\n \n def __enter__(self):\n+ if not self.is_final:\n+ self.original_logging_disable = logging.root.manager.disable\n+ logging.disable(logging.CRITICAL)\n self.assign_variable = _current_build_context.with_value(self)\n self.assign_variable.__enter__()\n return self\n \n def __exit__(self, exc_type, exc_value, tb):\n self.assign_variable.__exit__(exc_type, exc_value, tb)\n+ if not self.is_final:\n+ logging.disable(self.original_logging_disable)\n if self.close() and exc_type is None:\n raise CleanupFailed()\n", "issue": "Add compat with pytest-capturelog for note()-style logging capture?\nUsing hypothesis with code that has logging statements makes it very difficult to use those log calls to debug.\n\nHaving the ability to have logging output captured in the style of `note()` would be extremely useful. The [pytest-capturelog](https://pypi.python.org/pypi/pytest-capturelog) plugin collects the logging output into the test failure message. It would be really nice to have some kind of cross-compatibility with them so that it can group captured logs by example rather than at the test-function level\n\n", "before_files": [{"content": "# coding=utf-8\n#\n# This file is part of Hypothesis, which may be found at\n# https://github.com/HypothesisWorks/hypothesis-python\n#\n# Most of this work is copyright (C) 2013-2018 David R. MacIver\n# ([email protected]), but it contains contributions by others. See\n# CONTRIBUTING.rst for a full list of people who may hold copyright, and\n# consult the git log if you need to determine who owns an individual\n# contribution.\n#\n# This Source Code Form is subject to the terms of the Mozilla Public License,\n# v. 2.0. If a copy of the MPL was not distributed with this file, You can\n# obtain one at http://mozilla.org/MPL/2.0/.\n#\n# END HEADER\n\nfrom __future__ import division, print_function, absolute_import\n\nimport traceback\n\nfrom hypothesis import Verbosity, settings\nfrom hypothesis.errors import CleanupFailed, InvalidArgument, \\\n UnsatisfiedAssumption\nfrom hypothesis.reporting import report\nfrom hypothesis.utils.dynamicvariables import DynamicVariable\n\nif False:\n from typing import Any, AnyStr # noqa\n\n\ndef reject():\n raise UnsatisfiedAssumption()\n\n\ndef assume(condition):\n # type: (Any) -> bool\n \"\"\"Calling ``assume`` is like an :ref:`assert <python:assert>` that marks\n the example as bad, rather than failing the test.\n\n This allows you to specify properties that you *assume* will be\n true, and let Hypothesis try to avoid similar examples in future.\n \"\"\"\n if not condition:\n raise UnsatisfiedAssumption()\n return True\n\n\n_current_build_context = DynamicVariable(None)\n\n\ndef current_build_context():\n context = _current_build_context.value\n if context is None:\n raise InvalidArgument(\n u'No build context registered')\n return context\n\n\nclass BuildContext(object):\n\n def __init__(self, data, is_final=False, close_on_capture=True):\n self.data = data\n self.tasks = []\n self.is_final = is_final\n self.close_on_capture = close_on_capture\n self.close_on_del = False\n self.notes = []\n\n def __enter__(self):\n self.assign_variable = _current_build_context.with_value(self)\n self.assign_variable.__enter__()\n return self\n\n def __exit__(self, exc_type, exc_value, tb):\n self.assign_variable.__exit__(exc_type, exc_value, tb)\n if self.close() and exc_type is None:\n raise CleanupFailed()\n\n def local(self):\n return _current_build_context.with_value(self)\n\n def close(self):\n any_failed = False\n for task in self.tasks:\n try:\n task()\n except BaseException:\n any_failed = True\n report(traceback.format_exc())\n return any_failed\n\n\ndef cleanup(teardown):\n \"\"\"Register a function to be called when the current test has finished\n executing. Any exceptions thrown in teardown will be printed but not\n rethrown.\n\n Inside a test this isn't very interesting, because you can just use\n a finally block, but note that you can use this inside map, flatmap,\n etc. in order to e.g. insist that a value is closed at the end.\n \"\"\"\n context = _current_build_context.value\n if context is None:\n raise InvalidArgument(\n u'Cannot register cleanup outside of build context')\n context.tasks.append(teardown)\n\n\ndef note(value):\n # type: (AnyStr) -> None\n \"\"\"Report this value in the final execution.\"\"\"\n context = _current_build_context.value\n if context is None:\n raise InvalidArgument(\n 'Cannot make notes outside of a test')\n context.notes.append(value)\n if context.is_final or settings.default.verbosity >= Verbosity.verbose:\n report(value)\n\n\ndef event(value):\n # type: (AnyStr) -> None\n \"\"\"Record an event that occurred this test. Statistics on number of test\n runs with each event will be reported at the end if you run Hypothesis in\n statistics reporting mode.\n\n Events should be strings or convertible to them.\n \"\"\"\n context = _current_build_context.value\n if context is None:\n raise InvalidArgument(\n 'Cannot make record events outside of a test')\n\n if context.data is not None:\n context.data.note_event(value)\n", "path": "hypothesis-python/src/hypothesis/control.py"}], "after_files": [{"content": "# coding=utf-8\n#\n# This file is part of Hypothesis, which may be found at\n# https://github.com/HypothesisWorks/hypothesis-python\n#\n# Most of this work is copyright (C) 2013-2018 David R. MacIver\n# ([email protected]), but it contains contributions by others. See\n# CONTRIBUTING.rst for a full list of people who may hold copyright, and\n# consult the git log if you need to determine who owns an individual\n# contribution.\n#\n# This Source Code Form is subject to the terms of the Mozilla Public License,\n# v. 2.0. If a copy of the MPL was not distributed with this file, You can\n# obtain one at http://mozilla.org/MPL/2.0/.\n#\n# END HEADER\n\nfrom __future__ import division, print_function, absolute_import\n\nimport logging\nimport traceback\n\nfrom hypothesis import Verbosity, settings\nfrom hypothesis.errors import CleanupFailed, InvalidArgument, \\\n UnsatisfiedAssumption\nfrom hypothesis.reporting import report\nfrom hypothesis.utils.dynamicvariables import DynamicVariable\n\nif False:\n from typing import Any, AnyStr # noqa\n\n\ndef reject():\n raise UnsatisfiedAssumption()\n\n\ndef assume(condition):\n # type: (Any) -> bool\n \"\"\"Calling ``assume`` is like an :ref:`assert <python:assert>` that marks\n the example as bad, rather than failing the test.\n\n This allows you to specify properties that you *assume* will be\n true, and let Hypothesis try to avoid similar examples in future.\n \"\"\"\n if not condition:\n raise UnsatisfiedAssumption()\n return True\n\n\n_current_build_context = DynamicVariable(None)\n\n\ndef current_build_context():\n context = _current_build_context.value\n if context is None:\n raise InvalidArgument(\n u'No build context registered')\n return context\n\n\nclass BuildContext(object):\n\n def __init__(self, data, is_final=False, close_on_capture=True):\n self.data = data\n self.tasks = []\n self.is_final = is_final\n self.close_on_capture = close_on_capture\n self.close_on_del = False\n self.notes = []\n self.original_logging_disable = logging.NOTSET\n\n def __enter__(self):\n if not self.is_final:\n self.original_logging_disable = logging.root.manager.disable\n logging.disable(logging.CRITICAL)\n self.assign_variable = _current_build_context.with_value(self)\n self.assign_variable.__enter__()\n return self\n\n def __exit__(self, exc_type, exc_value, tb):\n self.assign_variable.__exit__(exc_type, exc_value, tb)\n if not self.is_final:\n logging.disable(self.original_logging_disable)\n if self.close() and exc_type is None:\n raise CleanupFailed()\n\n def local(self):\n return _current_build_context.with_value(self)\n\n def close(self):\n any_failed = False\n for task in self.tasks:\n try:\n task()\n except BaseException:\n any_failed = True\n report(traceback.format_exc())\n return any_failed\n\n\ndef cleanup(teardown):\n \"\"\"Register a function to be called when the current test has finished\n executing. Any exceptions thrown in teardown will be printed but not\n rethrown.\n\n Inside a test this isn't very interesting, because you can just use\n a finally block, but note that you can use this inside map, flatmap,\n etc. in order to e.g. insist that a value is closed at the end.\n \"\"\"\n context = _current_build_context.value\n if context is None:\n raise InvalidArgument(\n u'Cannot register cleanup outside of build context')\n context.tasks.append(teardown)\n\n\ndef note(value):\n # type: (AnyStr) -> None\n \"\"\"Report this value in the final execution.\"\"\"\n context = _current_build_context.value\n if context is None:\n raise InvalidArgument(\n 'Cannot make notes outside of a test')\n context.notes.append(value)\n if context.is_final or settings.default.verbosity >= Verbosity.verbose:\n report(value)\n\n\ndef event(value):\n # type: (AnyStr) -> None\n \"\"\"Record an event that occurred this test. Statistics on number of test\n runs with each event will be reported at the end if you run Hypothesis in\n statistics reporting mode.\n\n Events should be strings or convertible to them.\n \"\"\"\n context = _current_build_context.value\n if context is None:\n raise InvalidArgument(\n 'Cannot make record events outside of a test')\n\n if context.data is not None:\n context.data.note_event(value)\n", "path": "hypothesis-python/src/hypothesis/control.py"}]} | 1,645 | 263 |
gh_patches_debug_14070 | rasdani/github-patches | git_diff | huggingface__diffusers-7013 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Color channel order for watermark embedding
### Describe the bug
The encoder from the invisible watermark library expects input images with the channel order BGR, which is the default in OpenCV. This can be seen [here](https://github.com/ShieldMnt/invisible-watermark/blob/68d0376d94a4701ed240af0841ec12e00676e325/imwatermark/maxDct.py#L21).
As far as I can see from [here](https://github.com/huggingface/diffusers/blob/3369bc810a09a52521bbf8cc1ec77df3a8c682a8/src/diffusers/pipelines/stable_diffusion_xl/watermark.py#L24), diffusers passes the images in RGB order.
The watermark encoder then converts the given image from BGR to YUV. When the image is passed with the wrong channel order, this will lead to unexpected U and V channel values.
### Reproduction
n/a
### Logs
_No response_
### System Info
Python 3.10, diffusers 0.24.0, invisible-watermark-0.2.0
### Who can help?
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/diffusers/pipelines/stable_diffusion_xl/watermark.py`
Content:
```
1 import numpy as np
2 import torch
3
4 from ...utils import is_invisible_watermark_available
5
6
7 if is_invisible_watermark_available():
8 from imwatermark import WatermarkEncoder
9
10
11 # Copied from https://github.com/Stability-AI/generative-models/blob/613af104c6b85184091d42d374fef420eddb356d/scripts/demo/streamlit_helpers.py#L66
12 WATERMARK_MESSAGE = 0b101100111110110010010000011110111011000110011110
13 # bin(x)[2:] gives bits of x as str, use int to convert them to 0/1
14 WATERMARK_BITS = [int(bit) for bit in bin(WATERMARK_MESSAGE)[2:]]
15
16
17 class StableDiffusionXLWatermarker:
18 def __init__(self):
19 self.watermark = WATERMARK_BITS
20 self.encoder = WatermarkEncoder()
21
22 self.encoder.set_watermark("bits", self.watermark)
23
24 def apply_watermark(self, images: torch.FloatTensor):
25 # can't encode images that are smaller than 256
26 if images.shape[-1] < 256:
27 return images
28
29 images = (255 * (images / 2 + 0.5)).cpu().permute(0, 2, 3, 1).float().numpy()
30
31 images = [self.encoder.encode(image, "dwtDct") for image in images]
32
33 images = torch.from_numpy(np.array(images)).permute(0, 3, 1, 2)
34
35 images = torch.clamp(2 * (images / 255 - 0.5), min=-1.0, max=1.0)
36 return images
37
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/diffusers/pipelines/stable_diffusion_xl/watermark.py b/src/diffusers/pipelines/stable_diffusion_xl/watermark.py
--- a/src/diffusers/pipelines/stable_diffusion_xl/watermark.py
+++ b/src/diffusers/pipelines/stable_diffusion_xl/watermark.py
@@ -28,9 +28,15 @@
images = (255 * (images / 2 + 0.5)).cpu().permute(0, 2, 3, 1).float().numpy()
- images = [self.encoder.encode(image, "dwtDct") for image in images]
+ # Convert RGB to BGR, which is the channel order expected by the watermark encoder.
+ images = images[:, :, :, ::-1]
- images = torch.from_numpy(np.array(images)).permute(0, 3, 1, 2)
+ # Add watermark and convert BGR back to RGB
+ images = [self.encoder.encode(image, "dwtDct")[:, :, ::-1] for image in images]
+
+ images = np.array(images)
+
+ images = torch.from_numpy(images).permute(0, 3, 1, 2)
images = torch.clamp(2 * (images / 255 - 0.5), min=-1.0, max=1.0)
return images
| {"golden_diff": "diff --git a/src/diffusers/pipelines/stable_diffusion_xl/watermark.py b/src/diffusers/pipelines/stable_diffusion_xl/watermark.py\n--- a/src/diffusers/pipelines/stable_diffusion_xl/watermark.py\n+++ b/src/diffusers/pipelines/stable_diffusion_xl/watermark.py\n@@ -28,9 +28,15 @@\n \n images = (255 * (images / 2 + 0.5)).cpu().permute(0, 2, 3, 1).float().numpy()\n \n- images = [self.encoder.encode(image, \"dwtDct\") for image in images]\n+ # Convert RGB to BGR, which is the channel order expected by the watermark encoder.\n+ images = images[:, :, :, ::-1]\n \n- images = torch.from_numpy(np.array(images)).permute(0, 3, 1, 2)\n+ # Add watermark and convert BGR back to RGB\n+ images = [self.encoder.encode(image, \"dwtDct\")[:, :, ::-1] for image in images]\n+\n+ images = np.array(images)\n+\n+ images = torch.from_numpy(images).permute(0, 3, 1, 2)\n \n images = torch.clamp(2 * (images / 255 - 0.5), min=-1.0, max=1.0)\n return images\n", "issue": "Color channel order for watermark embedding\n### Describe the bug\n\nThe encoder from the invisible watermark library expects input images with the channel order BGR, which is the default in OpenCV. This can be seen [here](https://github.com/ShieldMnt/invisible-watermark/blob/68d0376d94a4701ed240af0841ec12e00676e325/imwatermark/maxDct.py#L21).\r\n\r\nAs far as I can see from [here](https://github.com/huggingface/diffusers/blob/3369bc810a09a52521bbf8cc1ec77df3a8c682a8/src/diffusers/pipelines/stable_diffusion_xl/watermark.py#L24), diffusers passes the images in RGB order.\r\n\r\nThe watermark encoder then converts the given image from BGR to YUV. When the image is passed with the wrong channel order, this will lead to unexpected U and V channel values.\n\n### Reproduction\n\nn/a\n\n### Logs\n\n_No response_\n\n### System Info\n\nPython 3.10, diffusers 0.24.0, invisible-watermark-0.2.0\n\n### Who can help?\n\n_No response_\n", "before_files": [{"content": "import numpy as np\nimport torch\n\nfrom ...utils import is_invisible_watermark_available\n\n\nif is_invisible_watermark_available():\n from imwatermark import WatermarkEncoder\n\n\n# Copied from https://github.com/Stability-AI/generative-models/blob/613af104c6b85184091d42d374fef420eddb356d/scripts/demo/streamlit_helpers.py#L66\nWATERMARK_MESSAGE = 0b101100111110110010010000011110111011000110011110\n# bin(x)[2:] gives bits of x as str, use int to convert them to 0/1\nWATERMARK_BITS = [int(bit) for bit in bin(WATERMARK_MESSAGE)[2:]]\n\n\nclass StableDiffusionXLWatermarker:\n def __init__(self):\n self.watermark = WATERMARK_BITS\n self.encoder = WatermarkEncoder()\n\n self.encoder.set_watermark(\"bits\", self.watermark)\n\n def apply_watermark(self, images: torch.FloatTensor):\n # can't encode images that are smaller than 256\n if images.shape[-1] < 256:\n return images\n\n images = (255 * (images / 2 + 0.5)).cpu().permute(0, 2, 3, 1).float().numpy()\n\n images = [self.encoder.encode(image, \"dwtDct\") for image in images]\n\n images = torch.from_numpy(np.array(images)).permute(0, 3, 1, 2)\n\n images = torch.clamp(2 * (images / 255 - 0.5), min=-1.0, max=1.0)\n return images\n", "path": "src/diffusers/pipelines/stable_diffusion_xl/watermark.py"}], "after_files": [{"content": "import numpy as np\nimport torch\n\nfrom ...utils import is_invisible_watermark_available\n\n\nif is_invisible_watermark_available():\n from imwatermark import WatermarkEncoder\n\n\n# Copied from https://github.com/Stability-AI/generative-models/blob/613af104c6b85184091d42d374fef420eddb356d/scripts/demo/streamlit_helpers.py#L66\nWATERMARK_MESSAGE = 0b101100111110110010010000011110111011000110011110\n# bin(x)[2:] gives bits of x as str, use int to convert them to 0/1\nWATERMARK_BITS = [int(bit) for bit in bin(WATERMARK_MESSAGE)[2:]]\n\n\nclass StableDiffusionXLWatermarker:\n def __init__(self):\n self.watermark = WATERMARK_BITS\n self.encoder = WatermarkEncoder()\n\n self.encoder.set_watermark(\"bits\", self.watermark)\n\n def apply_watermark(self, images: torch.FloatTensor):\n # can't encode images that are smaller than 256\n if images.shape[-1] < 256:\n return images\n\n images = (255 * (images / 2 + 0.5)).cpu().permute(0, 2, 3, 1).float().numpy()\n\n # Convert RGB to BGR, which is the channel order expected by the watermark encoder.\n images = images[:, :, :, ::-1]\n\n # Add watermark and convert BGR back to RGB\n images = [self.encoder.encode(image, \"dwtDct\")[:, :, ::-1] for image in images]\n\n images = np.array(images)\n\n images = torch.from_numpy(images).permute(0, 3, 1, 2)\n\n images = torch.clamp(2 * (images / 255 - 0.5), min=-1.0, max=1.0)\n return images\n", "path": "src/diffusers/pipelines/stable_diffusion_xl/watermark.py"}]} | 1,043 | 321 |
gh_patches_debug_7983 | rasdani/github-patches | git_diff | pulp__pulpcore-4095 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Gunicorn consuming excessive amounts of memory
**Version**
3.16.z
**Describe the bug**
Gunicorn consuming excessive amounts of memory, 3.5-4gb
**To Reproduce**
Unclear
**Expected behavior**
Probably not to have a single gunicorn process use 4gb of memory
**Additional context**
BZ: https://bugzilla.redhat.com/show_bug.cgi?id=2035873
Katello forum discussion: https://community.theforeman.org/t/katello-4-5-foreman-3-3-memory-leak-in-gunicorn/29658/22
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pulpcore/app/access_policy.py`
Content:
```
1 from functools import lru_cache
2 from rest_access_policy import AccessPolicy
3 from rest_framework.exceptions import APIException
4
5 from pulpcore.app.models import AccessPolicy as AccessPolicyModel
6 from pulpcore.app.util import get_view_urlpattern, get_viewset_for_model
7
8
9 class AccessPolicyFromDB(AccessPolicy):
10 """
11 An AccessPolicy that loads statements from an `AccessPolicy` model instance.
12 """
13
14 @staticmethod
15 @lru_cache
16 def get_access_policy(view):
17 """
18 Retrieves the AccessPolicy from the DB or None if it doesn't exist.
19
20 Args:
21 view (subclass of rest_framework.view.APIView): The view or viewset to receive the
22 AccessPolicy model for.
23
24 Returns:
25 Either a `pulpcore.app.models.AccessPolicy` or None.
26 """
27 try:
28 urlpattern = get_view_urlpattern(view)
29 except AttributeError:
30 # The view does not define a `urlpattern()` method, e.g. it's not a NamedModelViewset
31 return None
32
33 try:
34 return AccessPolicyModel.objects.get(viewset_name=urlpattern)
35 except AccessPolicyModel.DoesNotExist:
36 return None
37
38 @classmethod
39 def handle_creation_hooks(cls, obj):
40 """
41 Handle the creation hooks defined in this policy for the passed in `obj`.
42
43 Args:
44 cls: The class this method belongs to.
45 obj: The model instance to have its creation hooks handled for.
46
47 """
48 viewset = get_viewset_for_model(obj)
49 access_policy = cls.get_access_policy(viewset)
50 if access_policy and access_policy.creation_hooks is not None:
51 for creation_hook in access_policy.creation_hooks:
52 hook_name = creation_hook["function"]
53 try:
54 function = obj.REGISTERED_CREATION_HOOKS[hook_name]
55 except KeyError:
56 raise APIException(
57 f"Creation hook '{hook_name}' was not registered for this view set."
58 )
59
60 kwargs = creation_hook.get("parameters") or {}
61 function(**kwargs)
62
63 def scope_queryset(self, view, qs):
64 """
65 Scope the queryset based on the access policy `scope_queryset` method if present.
66 """
67 if access_policy := self.get_access_policy(view):
68 if access_policy.queryset_scoping:
69 scope = access_policy.queryset_scoping["function"]
70 if not (function := getattr(view, scope, None)):
71 raise APIException(
72 f"Queryset scoping method {scope} is not present on this view set."
73 )
74 kwargs = access_policy.queryset_scoping.get("parameters") or {}
75 qs = function(qs, **kwargs)
76 return qs
77
78 def get_policy_statements(self, request, view):
79 """
80 Return the policy statements from an AccessPolicy instance matching the viewset name.
81
82 This is an implementation of a method that will be called by
83 `rest_access_policy.AccessPolicy`. See the drf-access-policy docs for more info:
84
85 https://rsinger86.github.io/drf-access-policy/loading_external_source/
86
87 The `pulpcore.plugin.models.AccessPolicy` instance is looked up by the `viewset_name`
88 attribute using::
89
90 AccessPolicyModel.objects.get(viewset_name=get_view_urlpattern(view))
91
92 If a matching `pulpcore.plugin.models.AccessPolicy` cannot be found, a default behavior of
93 allowing only admin users to perform any operation is used. This fallback allows the Pulp
94 RBAC implementation to be turned on endpoint-by-endpoint with less effort.
95
96 Args:
97 request (rest_framework.request.Request): The request being checked for authorization.
98 view (subclass rest_framework.viewsets.GenericViewSet): The view name being requested.
99
100 Returns:
101 The access policy statements in drf-access-policy policy structure.
102 """
103 if access_policy_obj := self.get_access_policy(view):
104 return access_policy_obj.statements
105 else:
106 default_statement = [{"action": "*", "principal": "admin", "effect": "allow"}]
107 policy = getattr(view, "DEFAULT_ACCESS_POLICY", {"statements": default_statement})
108 return policy["statements"]
109
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pulpcore/app/access_policy.py b/pulpcore/app/access_policy.py
--- a/pulpcore/app/access_policy.py
+++ b/pulpcore/app/access_policy.py
@@ -1,4 +1,3 @@
-from functools import lru_cache
from rest_access_policy import AccessPolicy
from rest_framework.exceptions import APIException
@@ -12,7 +11,6 @@
"""
@staticmethod
- @lru_cache
def get_access_policy(view):
"""
Retrieves the AccessPolicy from the DB or None if it doesn't exist.
| {"golden_diff": "diff --git a/pulpcore/app/access_policy.py b/pulpcore/app/access_policy.py\n--- a/pulpcore/app/access_policy.py\n+++ b/pulpcore/app/access_policy.py\n@@ -1,4 +1,3 @@\n-from functools import lru_cache\n from rest_access_policy import AccessPolicy\n from rest_framework.exceptions import APIException\n \n@@ -12,7 +11,6 @@\n \"\"\"\n \n @staticmethod\n- @lru_cache\n def get_access_policy(view):\n \"\"\"\n Retrieves the AccessPolicy from the DB or None if it doesn't exist.\n", "issue": "Gunicorn consuming excessive amounts of memory\n**Version**\r\n3.16.z\r\n\r\n**Describe the bug**\r\nGunicorn consuming excessive amounts of memory, 3.5-4gb\r\n\r\n**To Reproduce**\r\nUnclear\r\n\r\n**Expected behavior**\r\nProbably not to have a single gunicorn process use 4gb of memory\r\n\r\n**Additional context**\r\n\r\nBZ: https://bugzilla.redhat.com/show_bug.cgi?id=2035873\r\nKatello forum discussion: https://community.theforeman.org/t/katello-4-5-foreman-3-3-memory-leak-in-gunicorn/29658/22\n", "before_files": [{"content": "from functools import lru_cache\nfrom rest_access_policy import AccessPolicy\nfrom rest_framework.exceptions import APIException\n\nfrom pulpcore.app.models import AccessPolicy as AccessPolicyModel\nfrom pulpcore.app.util import get_view_urlpattern, get_viewset_for_model\n\n\nclass AccessPolicyFromDB(AccessPolicy):\n \"\"\"\n An AccessPolicy that loads statements from an `AccessPolicy` model instance.\n \"\"\"\n\n @staticmethod\n @lru_cache\n def get_access_policy(view):\n \"\"\"\n Retrieves the AccessPolicy from the DB or None if it doesn't exist.\n\n Args:\n view (subclass of rest_framework.view.APIView): The view or viewset to receive the\n AccessPolicy model for.\n\n Returns:\n Either a `pulpcore.app.models.AccessPolicy` or None.\n \"\"\"\n try:\n urlpattern = get_view_urlpattern(view)\n except AttributeError:\n # The view does not define a `urlpattern()` method, e.g. it's not a NamedModelViewset\n return None\n\n try:\n return AccessPolicyModel.objects.get(viewset_name=urlpattern)\n except AccessPolicyModel.DoesNotExist:\n return None\n\n @classmethod\n def handle_creation_hooks(cls, obj):\n \"\"\"\n Handle the creation hooks defined in this policy for the passed in `obj`.\n\n Args:\n cls: The class this method belongs to.\n obj: The model instance to have its creation hooks handled for.\n\n \"\"\"\n viewset = get_viewset_for_model(obj)\n access_policy = cls.get_access_policy(viewset)\n if access_policy and access_policy.creation_hooks is not None:\n for creation_hook in access_policy.creation_hooks:\n hook_name = creation_hook[\"function\"]\n try:\n function = obj.REGISTERED_CREATION_HOOKS[hook_name]\n except KeyError:\n raise APIException(\n f\"Creation hook '{hook_name}' was not registered for this view set.\"\n )\n\n kwargs = creation_hook.get(\"parameters\") or {}\n function(**kwargs)\n\n def scope_queryset(self, view, qs):\n \"\"\"\n Scope the queryset based on the access policy `scope_queryset` method if present.\n \"\"\"\n if access_policy := self.get_access_policy(view):\n if access_policy.queryset_scoping:\n scope = access_policy.queryset_scoping[\"function\"]\n if not (function := getattr(view, scope, None)):\n raise APIException(\n f\"Queryset scoping method {scope} is not present on this view set.\"\n )\n kwargs = access_policy.queryset_scoping.get(\"parameters\") or {}\n qs = function(qs, **kwargs)\n return qs\n\n def get_policy_statements(self, request, view):\n \"\"\"\n Return the policy statements from an AccessPolicy instance matching the viewset name.\n\n This is an implementation of a method that will be called by\n `rest_access_policy.AccessPolicy`. See the drf-access-policy docs for more info:\n\n https://rsinger86.github.io/drf-access-policy/loading_external_source/\n\n The `pulpcore.plugin.models.AccessPolicy` instance is looked up by the `viewset_name`\n attribute using::\n\n AccessPolicyModel.objects.get(viewset_name=get_view_urlpattern(view))\n\n If a matching `pulpcore.plugin.models.AccessPolicy` cannot be found, a default behavior of\n allowing only admin users to perform any operation is used. This fallback allows the Pulp\n RBAC implementation to be turned on endpoint-by-endpoint with less effort.\n\n Args:\n request (rest_framework.request.Request): The request being checked for authorization.\n view (subclass rest_framework.viewsets.GenericViewSet): The view name being requested.\n\n Returns:\n The access policy statements in drf-access-policy policy structure.\n \"\"\"\n if access_policy_obj := self.get_access_policy(view):\n return access_policy_obj.statements\n else:\n default_statement = [{\"action\": \"*\", \"principal\": \"admin\", \"effect\": \"allow\"}]\n policy = getattr(view, \"DEFAULT_ACCESS_POLICY\", {\"statements\": default_statement})\n return policy[\"statements\"]\n", "path": "pulpcore/app/access_policy.py"}], "after_files": [{"content": "from rest_access_policy import AccessPolicy\nfrom rest_framework.exceptions import APIException\n\nfrom pulpcore.app.models import AccessPolicy as AccessPolicyModel\nfrom pulpcore.app.util import get_view_urlpattern, get_viewset_for_model\n\n\nclass AccessPolicyFromDB(AccessPolicy):\n \"\"\"\n An AccessPolicy that loads statements from an `AccessPolicy` model instance.\n \"\"\"\n\n @staticmethod\n def get_access_policy(view):\n \"\"\"\n Retrieves the AccessPolicy from the DB or None if it doesn't exist.\n\n Args:\n view (subclass of rest_framework.view.APIView): The view or viewset to receive the\n AccessPolicy model for.\n\n Returns:\n Either a `pulpcore.app.models.AccessPolicy` or None.\n \"\"\"\n try:\n urlpattern = get_view_urlpattern(view)\n except AttributeError:\n # The view does not define a `urlpattern()` method, e.g. it's not a NamedModelViewset\n return None\n\n try:\n return AccessPolicyModel.objects.get(viewset_name=urlpattern)\n except AccessPolicyModel.DoesNotExist:\n return None\n\n @classmethod\n def handle_creation_hooks(cls, obj):\n \"\"\"\n Handle the creation hooks defined in this policy for the passed in `obj`.\n\n Args:\n cls: The class this method belongs to.\n obj: The model instance to have its creation hooks handled for.\n\n \"\"\"\n viewset = get_viewset_for_model(obj)\n access_policy = cls.get_access_policy(viewset)\n if access_policy and access_policy.creation_hooks is not None:\n for creation_hook in access_policy.creation_hooks:\n hook_name = creation_hook[\"function\"]\n try:\n function = obj.REGISTERED_CREATION_HOOKS[hook_name]\n except KeyError:\n raise APIException(\n f\"Creation hook '{hook_name}' was not registered for this view set.\"\n )\n\n kwargs = creation_hook.get(\"parameters\") or {}\n function(**kwargs)\n\n def scope_queryset(self, view, qs):\n \"\"\"\n Scope the queryset based on the access policy `scope_queryset` method if present.\n \"\"\"\n if access_policy := self.get_access_policy(view):\n if access_policy.queryset_scoping:\n scope = access_policy.queryset_scoping[\"function\"]\n if not (function := getattr(view, scope, None)):\n raise APIException(\n f\"Queryset scoping method {scope} is not present on this view set.\"\n )\n kwargs = access_policy.queryset_scoping.get(\"parameters\") or {}\n qs = function(qs, **kwargs)\n return qs\n\n def get_policy_statements(self, request, view):\n \"\"\"\n Return the policy statements from an AccessPolicy instance matching the viewset name.\n\n This is an implementation of a method that will be called by\n `rest_access_policy.AccessPolicy`. See the drf-access-policy docs for more info:\n\n https://rsinger86.github.io/drf-access-policy/loading_external_source/\n\n The `pulpcore.plugin.models.AccessPolicy` instance is looked up by the `viewset_name`\n attribute using::\n\n AccessPolicyModel.objects.get(viewset_name=get_view_urlpattern(view))\n\n If a matching `pulpcore.plugin.models.AccessPolicy` cannot be found, a default behavior of\n allowing only admin users to perform any operation is used. This fallback allows the Pulp\n RBAC implementation to be turned on endpoint-by-endpoint with less effort.\n\n Args:\n request (rest_framework.request.Request): The request being checked for authorization.\n view (subclass rest_framework.viewsets.GenericViewSet): The view name being requested.\n\n Returns:\n The access policy statements in drf-access-policy policy structure.\n \"\"\"\n if access_policy_obj := self.get_access_policy(view):\n return access_policy_obj.statements\n else:\n default_statement = [{\"action\": \"*\", \"principal\": \"admin\", \"effect\": \"allow\"}]\n policy = getattr(view, \"DEFAULT_ACCESS_POLICY\", {\"statements\": default_statement})\n return policy[\"statements\"]\n", "path": "pulpcore/app/access_policy.py"}]} | 1,482 | 124 |
gh_patches_debug_32719 | rasdani/github-patches | git_diff | freedomofpress__securedrop-5074 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add v2 and v3 source interface addresses to metadata endpoint
## Description
The Source Interface exposes a `/metadata` endpoint that includes information about an instance's OS and application version, supported languages, and submission key fingerprint, which is useful for Nagios monitoring purposes among other things. Adding the SI addresses in this endpoint would a) allow FPF to monitor v3 service uptake and update Nagios checks accordingly, and b) allow end users on the v2 version of service to verify the correct v3 address for the service (as an alternative or supplement to automatic redirection via the Alt-SVC header).
Potential downside: if an admin turns on v3 but doesn't want to advertise that they've done so, this could inadvertently expose the v3 address.
## User Research Evidence
I have none but folks seem to like the idea on Gitter.
## User Stories
- As an FPF support team member, I'd like to be able to have v3 service information available for monitoring purposes
- as a SecureDrop user, I'd like to be able to verify the correct v3 address corresponding to a v2 address.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `securedrop/source_app/utils.py`
Content:
```
1 import io
2 import logging
3 import subprocess
4
5 from datetime import datetime
6 from flask import session, current_app, abort, g
7 from sqlalchemy import create_engine
8 from sqlalchemy.orm import sessionmaker
9 from threading import Thread
10
11 import i18n
12
13 from crypto_util import CryptoException
14 from models import Source
15
16
17 def logged_in():
18 return 'logged_in' in session
19
20
21 def valid_codename(codename):
22 try:
23 filesystem_id = current_app.crypto_util.hash_codename(codename)
24 except CryptoException as e:
25 current_app.logger.info(
26 "Could not compute filesystem ID for codename '{}': {}".format(
27 codename, e))
28 abort(500)
29
30 source = Source.query.filter_by(filesystem_id=filesystem_id).first()
31 return source is not None
32
33
34 def generate_unique_codename(config):
35 """Generate random codenames until we get an unused one"""
36 while True:
37 codename = current_app.crypto_util.genrandomid(
38 Source.NUM_WORDS,
39 i18n.get_language(config))
40
41 # The maximum length of a word in the wordlist is 9 letters and the
42 # codename length is 7 words, so it is currently impossible to
43 # generate a codename that is longer than the maximum codename length
44 # (currently 128 characters). This code is meant to be defense in depth
45 # to guard against potential future changes, such as modifications to
46 # the word list or the maximum codename length.
47 if len(codename) > Source.MAX_CODENAME_LEN:
48 current_app.logger.warning(
49 "Generated a source codename that was too long, "
50 "skipping it. This should not happen. "
51 "(Codename='{}')".format(codename))
52 continue
53
54 # scrypt (slow)
55 filesystem_id = current_app.crypto_util.hash_codename(codename)
56
57 matching_sources = Source.query.filter(
58 Source.filesystem_id == filesystem_id).all()
59 if len(matching_sources) == 0:
60 return codename
61
62
63 def get_entropy_estimate():
64 with io.open('/proc/sys/kernel/random/entropy_avail') as f:
65 return int(f.read())
66
67
68 def asynchronous(f):
69 def wrapper(*args, **kwargs):
70 thread = Thread(target=f, args=args, kwargs=kwargs)
71 thread.start()
72 return wrapper
73
74
75 @asynchronous
76 def async_genkey(crypto_util_, db_uri, filesystem_id, codename):
77 # We pass in the `crypto_util_` so we don't have to reference `current_app`
78 # here. The app might not have a pushed context during testing which would
79 # cause this asynchronous function to break.
80 crypto_util_.genkeypair(filesystem_id, codename)
81
82 # Register key generation as update to the source, so sources will
83 # filter to the top of the list in the journalist interface if a
84 # flagged source logs in and has a key generated for them. #789
85 session = sessionmaker(bind=create_engine(db_uri))()
86 try:
87 source = session.query(Source).filter(
88 Source.filesystem_id == filesystem_id).one()
89 source.last_updated = datetime.utcnow()
90 session.commit()
91 except Exception as e:
92 logging.getLogger(__name__).error(
93 "async_genkey for source (filesystem_id={}): {}"
94 .format(filesystem_id, e))
95 session.close()
96
97
98 def normalize_timestamps(filesystem_id):
99 """
100 Update the timestamps on all of the source's submissions to match that of
101 the latest submission. This minimizes metadata that could be useful to
102 investigators. See #301.
103 """
104 sub_paths = [current_app.storage.path(filesystem_id, submission.filename)
105 for submission in g.source.submissions]
106 if len(sub_paths) > 1:
107 args = ["touch"]
108 args.extend(sub_paths[:-1])
109 rc = subprocess.call(args)
110 if rc != 0:
111 current_app.logger.warning(
112 "Couldn't normalize submission "
113 "timestamps (touch exited with %d)" %
114 rc)
115
```
Path: `securedrop/source_app/api.py`
Content:
```
1 import json
2 import platform
3
4 from flask import Blueprint, current_app, make_response
5
6 import version
7
8
9 def make_blueprint(config):
10 view = Blueprint('api', __name__)
11
12 @view.route('/metadata')
13 def metadata():
14 meta = {
15 'allow_document_uploads': current_app.instance_config.allow_document_uploads,
16 'gpg_fpr': config.JOURNALIST_KEY,
17 'sd_version': version.__version__,
18 'server_os': platform.linux_distribution()[1],
19 'supported_languages': config.SUPPORTED_LOCALES
20 }
21 resp = make_response(json.dumps(meta))
22 resp.headers['Content-Type'] = 'application/json'
23 return resp
24
25 return view
26
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/securedrop/source_app/api.py b/securedrop/source_app/api.py
--- a/securedrop/source_app/api.py
+++ b/securedrop/source_app/api.py
@@ -3,6 +3,8 @@
from flask import Blueprint, current_app, make_response
+from source_app.utils import get_sourcev2_url, get_sourcev3_url
+
import version
@@ -16,7 +18,9 @@
'gpg_fpr': config.JOURNALIST_KEY,
'sd_version': version.__version__,
'server_os': platform.linux_distribution()[1],
- 'supported_languages': config.SUPPORTED_LOCALES
+ 'supported_languages': config.SUPPORTED_LOCALES,
+ 'v2_source_url': get_sourcev2_url(),
+ 'v3_source_url': get_sourcev3_url()
}
resp = make_response(json.dumps(meta))
resp.headers['Content-Type'] = 'application/json'
diff --git a/securedrop/source_app/utils.py b/securedrop/source_app/utils.py
--- a/securedrop/source_app/utils.py
+++ b/securedrop/source_app/utils.py
@@ -9,6 +9,7 @@
from threading import Thread
import i18n
+import re
from crypto_util import CryptoException
from models import Source
@@ -112,3 +113,31 @@
"Couldn't normalize submission "
"timestamps (touch exited with %d)" %
rc)
+
+
+def check_url_file(path, regexp):
+ """
+ Check that a file exists at the path given and contains a single line
+ matching the regexp. Used for checking the source interface address
+ files at /var/lib/securedrop/source_{v2,v3}_url.
+ """
+ try:
+ f = open(path, "r")
+ contents = f.readline().strip()
+ f.close()
+ if re.match(regexp, contents):
+ return contents
+ else:
+ return None
+ except IOError:
+ return None
+
+
+def get_sourcev2_url():
+ return check_url_file("/var/lib/securedrop/source_v2_url",
+ r"^[a-z0-9]{16}\.onion$")
+
+
+def get_sourcev3_url():
+ return check_url_file("/var/lib/securedrop/source_v3_url",
+ r"^[a-z0-9]{56}\.onion$")
| {"golden_diff": "diff --git a/securedrop/source_app/api.py b/securedrop/source_app/api.py\n--- a/securedrop/source_app/api.py\n+++ b/securedrop/source_app/api.py\n@@ -3,6 +3,8 @@\n \n from flask import Blueprint, current_app, make_response\n \n+from source_app.utils import get_sourcev2_url, get_sourcev3_url\n+\n import version\n \n \n@@ -16,7 +18,9 @@\n 'gpg_fpr': config.JOURNALIST_KEY,\n 'sd_version': version.__version__,\n 'server_os': platform.linux_distribution()[1],\n- 'supported_languages': config.SUPPORTED_LOCALES\n+ 'supported_languages': config.SUPPORTED_LOCALES,\n+ 'v2_source_url': get_sourcev2_url(),\n+ 'v3_source_url': get_sourcev3_url()\n }\n resp = make_response(json.dumps(meta))\n resp.headers['Content-Type'] = 'application/json'\ndiff --git a/securedrop/source_app/utils.py b/securedrop/source_app/utils.py\n--- a/securedrop/source_app/utils.py\n+++ b/securedrop/source_app/utils.py\n@@ -9,6 +9,7 @@\n from threading import Thread\n \n import i18n\n+import re\n \n from crypto_util import CryptoException\n from models import Source\n@@ -112,3 +113,31 @@\n \"Couldn't normalize submission \"\n \"timestamps (touch exited with %d)\" %\n rc)\n+\n+\n+def check_url_file(path, regexp):\n+ \"\"\"\n+ Check that a file exists at the path given and contains a single line\n+ matching the regexp. Used for checking the source interface address\n+ files at /var/lib/securedrop/source_{v2,v3}_url.\n+ \"\"\"\n+ try:\n+ f = open(path, \"r\")\n+ contents = f.readline().strip()\n+ f.close()\n+ if re.match(regexp, contents):\n+ return contents\n+ else:\n+ return None\n+ except IOError:\n+ return None\n+\n+\n+def get_sourcev2_url():\n+ return check_url_file(\"/var/lib/securedrop/source_v2_url\",\n+ r\"^[a-z0-9]{16}\\.onion$\")\n+\n+\n+def get_sourcev3_url():\n+ return check_url_file(\"/var/lib/securedrop/source_v3_url\",\n+ r\"^[a-z0-9]{56}\\.onion$\")\n", "issue": "Add v2 and v3 source interface addresses to metadata endpoint\n## Description\r\n\r\nThe Source Interface exposes a `/metadata` endpoint that includes information about an instance's OS and application version, supported languages, and submission key fingerprint, which is useful for Nagios monitoring purposes among other things. Adding the SI addresses in this endpoint would a) allow FPF to monitor v3 service uptake and update Nagios checks accordingly, and b) allow end users on the v2 version of service to verify the correct v3 address for the service (as an alternative or supplement to automatic redirection via the Alt-SVC header).\r\n\r\nPotential downside: if an admin turns on v3 but doesn't want to advertise that they've done so, this could inadvertently expose the v3 address. \r\n\r\n## User Research Evidence\r\n\r\nI have none but folks seem to like the idea on Gitter.\r\n\r\n## User Stories\r\n- As an FPF support team member, I'd like to be able to have v3 service information available for monitoring purposes\r\n- as a SecureDrop user, I'd like to be able to verify the correct v3 address corresponding to a v2 address.\r\n\n", "before_files": [{"content": "import io\nimport logging\nimport subprocess\n\nfrom datetime import datetime\nfrom flask import session, current_app, abort, g\nfrom sqlalchemy import create_engine\nfrom sqlalchemy.orm import sessionmaker\nfrom threading import Thread\n\nimport i18n\n\nfrom crypto_util import CryptoException\nfrom models import Source\n\n\ndef logged_in():\n return 'logged_in' in session\n\n\ndef valid_codename(codename):\n try:\n filesystem_id = current_app.crypto_util.hash_codename(codename)\n except CryptoException as e:\n current_app.logger.info(\n \"Could not compute filesystem ID for codename '{}': {}\".format(\n codename, e))\n abort(500)\n\n source = Source.query.filter_by(filesystem_id=filesystem_id).first()\n return source is not None\n\n\ndef generate_unique_codename(config):\n \"\"\"Generate random codenames until we get an unused one\"\"\"\n while True:\n codename = current_app.crypto_util.genrandomid(\n Source.NUM_WORDS,\n i18n.get_language(config))\n\n # The maximum length of a word in the wordlist is 9 letters and the\n # codename length is 7 words, so it is currently impossible to\n # generate a codename that is longer than the maximum codename length\n # (currently 128 characters). This code is meant to be defense in depth\n # to guard against potential future changes, such as modifications to\n # the word list or the maximum codename length.\n if len(codename) > Source.MAX_CODENAME_LEN:\n current_app.logger.warning(\n \"Generated a source codename that was too long, \"\n \"skipping it. This should not happen. \"\n \"(Codename='{}')\".format(codename))\n continue\n\n # scrypt (slow)\n filesystem_id = current_app.crypto_util.hash_codename(codename)\n\n matching_sources = Source.query.filter(\n Source.filesystem_id == filesystem_id).all()\n if len(matching_sources) == 0:\n return codename\n\n\ndef get_entropy_estimate():\n with io.open('/proc/sys/kernel/random/entropy_avail') as f:\n return int(f.read())\n\n\ndef asynchronous(f):\n def wrapper(*args, **kwargs):\n thread = Thread(target=f, args=args, kwargs=kwargs)\n thread.start()\n return wrapper\n\n\n@asynchronous\ndef async_genkey(crypto_util_, db_uri, filesystem_id, codename):\n # We pass in the `crypto_util_` so we don't have to reference `current_app`\n # here. The app might not have a pushed context during testing which would\n # cause this asynchronous function to break.\n crypto_util_.genkeypair(filesystem_id, codename)\n\n # Register key generation as update to the source, so sources will\n # filter to the top of the list in the journalist interface if a\n # flagged source logs in and has a key generated for them. #789\n session = sessionmaker(bind=create_engine(db_uri))()\n try:\n source = session.query(Source).filter(\n Source.filesystem_id == filesystem_id).one()\n source.last_updated = datetime.utcnow()\n session.commit()\n except Exception as e:\n logging.getLogger(__name__).error(\n \"async_genkey for source (filesystem_id={}): {}\"\n .format(filesystem_id, e))\n session.close()\n\n\ndef normalize_timestamps(filesystem_id):\n \"\"\"\n Update the timestamps on all of the source's submissions to match that of\n the latest submission. This minimizes metadata that could be useful to\n investigators. See #301.\n \"\"\"\n sub_paths = [current_app.storage.path(filesystem_id, submission.filename)\n for submission in g.source.submissions]\n if len(sub_paths) > 1:\n args = [\"touch\"]\n args.extend(sub_paths[:-1])\n rc = subprocess.call(args)\n if rc != 0:\n current_app.logger.warning(\n \"Couldn't normalize submission \"\n \"timestamps (touch exited with %d)\" %\n rc)\n", "path": "securedrop/source_app/utils.py"}, {"content": "import json\nimport platform\n\nfrom flask import Blueprint, current_app, make_response\n\nimport version\n\n\ndef make_blueprint(config):\n view = Blueprint('api', __name__)\n\n @view.route('/metadata')\n def metadata():\n meta = {\n 'allow_document_uploads': current_app.instance_config.allow_document_uploads,\n 'gpg_fpr': config.JOURNALIST_KEY,\n 'sd_version': version.__version__,\n 'server_os': platform.linux_distribution()[1],\n 'supported_languages': config.SUPPORTED_LOCALES\n }\n resp = make_response(json.dumps(meta))\n resp.headers['Content-Type'] = 'application/json'\n return resp\n\n return view\n", "path": "securedrop/source_app/api.py"}], "after_files": [{"content": "import io\nimport logging\nimport subprocess\n\nfrom datetime import datetime\nfrom flask import session, current_app, abort, g\nfrom sqlalchemy import create_engine\nfrom sqlalchemy.orm import sessionmaker\nfrom threading import Thread\n\nimport i18n\nimport re\n\nfrom crypto_util import CryptoException\nfrom models import Source\n\n\ndef logged_in():\n return 'logged_in' in session\n\n\ndef valid_codename(codename):\n try:\n filesystem_id = current_app.crypto_util.hash_codename(codename)\n except CryptoException as e:\n current_app.logger.info(\n \"Could not compute filesystem ID for codename '{}': {}\".format(\n codename, e))\n abort(500)\n\n source = Source.query.filter_by(filesystem_id=filesystem_id).first()\n return source is not None\n\n\ndef generate_unique_codename(config):\n \"\"\"Generate random codenames until we get an unused one\"\"\"\n while True:\n codename = current_app.crypto_util.genrandomid(\n Source.NUM_WORDS,\n i18n.get_language(config))\n\n # The maximum length of a word in the wordlist is 9 letters and the\n # codename length is 7 words, so it is currently impossible to\n # generate a codename that is longer than the maximum codename length\n # (currently 128 characters). This code is meant to be defense in depth\n # to guard against potential future changes, such as modifications to\n # the word list or the maximum codename length.\n if len(codename) > Source.MAX_CODENAME_LEN:\n current_app.logger.warning(\n \"Generated a source codename that was too long, \"\n \"skipping it. This should not happen. \"\n \"(Codename='{}')\".format(codename))\n continue\n\n # scrypt (slow)\n filesystem_id = current_app.crypto_util.hash_codename(codename)\n\n matching_sources = Source.query.filter(\n Source.filesystem_id == filesystem_id).all()\n if len(matching_sources) == 0:\n return codename\n\n\ndef get_entropy_estimate():\n with io.open('/proc/sys/kernel/random/entropy_avail') as f:\n return int(f.read())\n\n\ndef asynchronous(f):\n def wrapper(*args, **kwargs):\n thread = Thread(target=f, args=args, kwargs=kwargs)\n thread.start()\n return wrapper\n\n\n@asynchronous\ndef async_genkey(crypto_util_, db_uri, filesystem_id, codename):\n # We pass in the `crypto_util_` so we don't have to reference `current_app`\n # here. The app might not have a pushed context during testing which would\n # cause this asynchronous function to break.\n crypto_util_.genkeypair(filesystem_id, codename)\n\n # Register key generation as update to the source, so sources will\n # filter to the top of the list in the journalist interface if a\n # flagged source logs in and has a key generated for them. #789\n session = sessionmaker(bind=create_engine(db_uri))()\n try:\n source = session.query(Source).filter(\n Source.filesystem_id == filesystem_id).one()\n source.last_updated = datetime.utcnow()\n session.commit()\n except Exception as e:\n logging.getLogger(__name__).error(\n \"async_genkey for source (filesystem_id={}): {}\"\n .format(filesystem_id, e))\n session.close()\n\n\ndef normalize_timestamps(filesystem_id):\n \"\"\"\n Update the timestamps on all of the source's submissions to match that of\n the latest submission. This minimizes metadata that could be useful to\n investigators. See #301.\n \"\"\"\n sub_paths = [current_app.storage.path(filesystem_id, submission.filename)\n for submission in g.source.submissions]\n if len(sub_paths) > 1:\n args = [\"touch\"]\n args.extend(sub_paths[:-1])\n rc = subprocess.call(args)\n if rc != 0:\n current_app.logger.warning(\n \"Couldn't normalize submission \"\n \"timestamps (touch exited with %d)\" %\n rc)\n\n\ndef check_url_file(path, regexp):\n \"\"\"\n Check that a file exists at the path given and contains a single line\n matching the regexp. Used for checking the source interface address\n files at /var/lib/securedrop/source_{v2,v3}_url.\n \"\"\"\n try:\n f = open(path, \"r\")\n contents = f.readline().strip()\n f.close()\n if re.match(regexp, contents):\n return contents\n else:\n return None\n except IOError:\n return None\n\n\ndef get_sourcev2_url():\n return check_url_file(\"/var/lib/securedrop/source_v2_url\",\n r\"^[a-z0-9]{16}\\.onion$\")\n\n\ndef get_sourcev3_url():\n return check_url_file(\"/var/lib/securedrop/source_v3_url\",\n r\"^[a-z0-9]{56}\\.onion$\")\n", "path": "securedrop/source_app/utils.py"}, {"content": "import json\nimport platform\n\nfrom flask import Blueprint, current_app, make_response\n\nfrom source_app.utils import get_sourcev2_url, get_sourcev3_url\n\nimport version\n\n\ndef make_blueprint(config):\n view = Blueprint('api', __name__)\n\n @view.route('/metadata')\n def metadata():\n meta = {\n 'allow_document_uploads': current_app.instance_config.allow_document_uploads,\n 'gpg_fpr': config.JOURNALIST_KEY,\n 'sd_version': version.__version__,\n 'server_os': platform.linux_distribution()[1],\n 'supported_languages': config.SUPPORTED_LOCALES,\n 'v2_source_url': get_sourcev2_url(),\n 'v3_source_url': get_sourcev3_url()\n }\n resp = make_response(json.dumps(meta))\n resp.headers['Content-Type'] = 'application/json'\n return resp\n\n return view\n", "path": "securedrop/source_app/api.py"}]} | 1,821 | 540 |
gh_patches_debug_32313 | rasdani/github-patches | git_diff | sopel-irc__sopel-725 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Make .seen persist over bot restarts
Currently, when a Willie-based bot is restarted, it loses all the info about who it saw. It is quite inconvenient, as restarts are required quite often, especially on networks where there are lots of netsplits going on and the bot loses its nick to them all the time. The proposed solution would be to keep a persistent DB containing the relevant info from which old records may or may not be auto-deleted at regular intervals.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `willie/modules/seen.py`
Content:
```
1 # coding=utf8
2 """
3 seen.py - Willie Seen Module
4 Copyright 2008, Sean B. Palmer, inamidst.com
5 Copyright © 2012, Elad Alfassa <[email protected]>
6 Licensed under the Eiffel Forum License 2.
7
8 http://willie.dftba.net
9 """
10 from __future__ import unicode_literals
11
12 import time
13 import datetime
14 from willie.tools import Ddict, Identifier, get_timezone, format_time
15 from willie.module import commands, rule, priority
16
17 seen_dict = Ddict(dict)
18
19
20 @commands('seen')
21 def seen(bot, trigger):
22 """Reports when and where the user was last seen."""
23 if not trigger.group(2):
24 bot.say(".seen <nick> - Reports when <nick> was last seen.")
25 return
26 nick = Identifier(trigger.group(2).strip())
27 if nick in seen_dict:
28 timestamp = seen_dict[nick]['timestamp']
29 channel = seen_dict[nick]['channel']
30 message = seen_dict[nick]['message']
31
32 tz = get_timezone(bot.db, bot.config, None, trigger.nick,
33 trigger.sender)
34 saw = datetime.datetime.utcfromtimestamp(timestamp)
35 timestamp = format_time(bot.db, bot.config, tz, trigger.nick,
36 trigger.sender, saw)
37
38 msg = "I last saw {} at {}".format(nick, timestamp)
39 if Identifier(channel) == trigger.sender:
40 msg = msg + " in here, saying " + message
41 else:
42 msg += " in another channel."
43 bot.say(str(trigger.nick) + ': ' + msg)
44 else:
45 bot.say("Sorry, I haven't seen %s around." % nick)
46
47
48 @rule('(.*)')
49 @priority('low')
50 def note(bot, trigger):
51 if not trigger.is_privmsg:
52 nick = Identifier(trigger.nick)
53 seen_dict[nick]['timestamp'] = time.time()
54 seen_dict[nick]['channel'] = trigger.sender
55 seen_dict[nick]['message'] = trigger
56
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/willie/modules/seen.py b/willie/modules/seen.py
--- a/willie/modules/seen.py
+++ b/willie/modules/seen.py
@@ -11,11 +11,9 @@
import time
import datetime
-from willie.tools import Ddict, Identifier, get_timezone, format_time
+from willie.tools import Identifier, get_timezone, format_time
from willie.module import commands, rule, priority
-seen_dict = Ddict(dict)
-
@commands('seen')
def seen(bot, trigger):
@@ -23,11 +21,11 @@
if not trigger.group(2):
bot.say(".seen <nick> - Reports when <nick> was last seen.")
return
- nick = Identifier(trigger.group(2).strip())
- if nick in seen_dict:
- timestamp = seen_dict[nick]['timestamp']
- channel = seen_dict[nick]['channel']
- message = seen_dict[nick]['message']
+ nick = trigger.group(2).strip()
+ timestamp = bot.db.get_nick_value(nick, 'seen_timestamp')
+ if timestamp:
+ channel = bot.db.get_nick_value(nick, 'seen_channel')
+ message = bot.db.get_nick_value(nick, 'seen_message')
tz = get_timezone(bot.db, bot.config, None, trigger.nick,
trigger.sender)
@@ -42,14 +40,13 @@
msg += " in another channel."
bot.say(str(trigger.nick) + ': ' + msg)
else:
- bot.say("Sorry, I haven't seen %s around." % nick)
+ bot.say("Sorry, I haven't seen {} around.".format(nick))
@rule('(.*)')
@priority('low')
def note(bot, trigger):
if not trigger.is_privmsg:
- nick = Identifier(trigger.nick)
- seen_dict[nick]['timestamp'] = time.time()
- seen_dict[nick]['channel'] = trigger.sender
- seen_dict[nick]['message'] = trigger
+ bot.db.set_nick_value(trigger.nick, 'seen_timestamp', time.time())
+ bot.db.set_nick_value(trigger.nick, 'seen_channel', trigger.sender)
+ bot.db.set_nick_value(trigger.nick, 'seen_message', trigger)
| {"golden_diff": "diff --git a/willie/modules/seen.py b/willie/modules/seen.py\n--- a/willie/modules/seen.py\n+++ b/willie/modules/seen.py\n@@ -11,11 +11,9 @@\n \n import time\n import datetime\n-from willie.tools import Ddict, Identifier, get_timezone, format_time\n+from willie.tools import Identifier, get_timezone, format_time\n from willie.module import commands, rule, priority\n \n-seen_dict = Ddict(dict)\n-\n \n @commands('seen')\n def seen(bot, trigger):\n@@ -23,11 +21,11 @@\n if not trigger.group(2):\n bot.say(\".seen <nick> - Reports when <nick> was last seen.\")\n return\n- nick = Identifier(trigger.group(2).strip())\n- if nick in seen_dict:\n- timestamp = seen_dict[nick]['timestamp']\n- channel = seen_dict[nick]['channel']\n- message = seen_dict[nick]['message']\n+ nick = trigger.group(2).strip()\n+ timestamp = bot.db.get_nick_value(nick, 'seen_timestamp')\n+ if timestamp:\n+ channel = bot.db.get_nick_value(nick, 'seen_channel')\n+ message = bot.db.get_nick_value(nick, 'seen_message')\n \n tz = get_timezone(bot.db, bot.config, None, trigger.nick,\n trigger.sender)\n@@ -42,14 +40,13 @@\n msg += \" in another channel.\"\n bot.say(str(trigger.nick) + ': ' + msg)\n else:\n- bot.say(\"Sorry, I haven't seen %s around.\" % nick)\n+ bot.say(\"Sorry, I haven't seen {} around.\".format(nick))\n \n \n @rule('(.*)')\n @priority('low')\n def note(bot, trigger):\n if not trigger.is_privmsg:\n- nick = Identifier(trigger.nick)\n- seen_dict[nick]['timestamp'] = time.time()\n- seen_dict[nick]['channel'] = trigger.sender\n- seen_dict[nick]['message'] = trigger\n+ bot.db.set_nick_value(trigger.nick, 'seen_timestamp', time.time())\n+ bot.db.set_nick_value(trigger.nick, 'seen_channel', trigger.sender)\n+ bot.db.set_nick_value(trigger.nick, 'seen_message', trigger)\n", "issue": "Make .seen persist over bot restarts\nCurrently, when a Willie-based bot is restarted, it loses all the info about who it saw. It is quite inconvenient, as restarts are required quite often, especially on networks where there are lots of netsplits going on and the bot loses its nick to them all the time. The proposed solution would be to keep a persistent DB containing the relevant info from which old records may or may not be auto-deleted at regular intervals.\n\n", "before_files": [{"content": "# coding=utf8\n\"\"\"\nseen.py - Willie Seen Module\nCopyright 2008, Sean B. Palmer, inamidst.com\nCopyright \u00a9 2012, Elad Alfassa <[email protected]>\nLicensed under the Eiffel Forum License 2.\n\nhttp://willie.dftba.net\n\"\"\"\nfrom __future__ import unicode_literals\n\nimport time\nimport datetime\nfrom willie.tools import Ddict, Identifier, get_timezone, format_time\nfrom willie.module import commands, rule, priority\n\nseen_dict = Ddict(dict)\n\n\n@commands('seen')\ndef seen(bot, trigger):\n \"\"\"Reports when and where the user was last seen.\"\"\"\n if not trigger.group(2):\n bot.say(\".seen <nick> - Reports when <nick> was last seen.\")\n return\n nick = Identifier(trigger.group(2).strip())\n if nick in seen_dict:\n timestamp = seen_dict[nick]['timestamp']\n channel = seen_dict[nick]['channel']\n message = seen_dict[nick]['message']\n\n tz = get_timezone(bot.db, bot.config, None, trigger.nick,\n trigger.sender)\n saw = datetime.datetime.utcfromtimestamp(timestamp)\n timestamp = format_time(bot.db, bot.config, tz, trigger.nick,\n trigger.sender, saw)\n\n msg = \"I last saw {} at {}\".format(nick, timestamp)\n if Identifier(channel) == trigger.sender:\n msg = msg + \" in here, saying \" + message\n else:\n msg += \" in another channel.\"\n bot.say(str(trigger.nick) + ': ' + msg)\n else:\n bot.say(\"Sorry, I haven't seen %s around.\" % nick)\n\n\n@rule('(.*)')\n@priority('low')\ndef note(bot, trigger):\n if not trigger.is_privmsg:\n nick = Identifier(trigger.nick)\n seen_dict[nick]['timestamp'] = time.time()\n seen_dict[nick]['channel'] = trigger.sender\n seen_dict[nick]['message'] = trigger\n", "path": "willie/modules/seen.py"}], "after_files": [{"content": "# coding=utf8\n\"\"\"\nseen.py - Willie Seen Module\nCopyright 2008, Sean B. Palmer, inamidst.com\nCopyright \u00a9 2012, Elad Alfassa <[email protected]>\nLicensed under the Eiffel Forum License 2.\n\nhttp://willie.dftba.net\n\"\"\"\nfrom __future__ import unicode_literals\n\nimport time\nimport datetime\nfrom willie.tools import Identifier, get_timezone, format_time\nfrom willie.module import commands, rule, priority\n\n\n@commands('seen')\ndef seen(bot, trigger):\n \"\"\"Reports when and where the user was last seen.\"\"\"\n if not trigger.group(2):\n bot.say(\".seen <nick> - Reports when <nick> was last seen.\")\n return\n nick = trigger.group(2).strip()\n timestamp = bot.db.get_nick_value(nick, 'seen_timestamp')\n if timestamp:\n channel = bot.db.get_nick_value(nick, 'seen_channel')\n message = bot.db.get_nick_value(nick, 'seen_message')\n\n tz = get_timezone(bot.db, bot.config, None, trigger.nick,\n trigger.sender)\n saw = datetime.datetime.utcfromtimestamp(timestamp)\n timestamp = format_time(bot.db, bot.config, tz, trigger.nick,\n trigger.sender, saw)\n\n msg = \"I last saw {} at {}\".format(nick, timestamp)\n if Identifier(channel) == trigger.sender:\n msg = msg + \" in here, saying \" + message\n else:\n msg += \" in another channel.\"\n bot.say(str(trigger.nick) + ': ' + msg)\n else:\n bot.say(\"Sorry, I haven't seen {} around.\".format(nick))\n\n\n@rule('(.*)')\n@priority('low')\ndef note(bot, trigger):\n if not trigger.is_privmsg:\n bot.db.set_nick_value(trigger.nick, 'seen_timestamp', time.time())\n bot.db.set_nick_value(trigger.nick, 'seen_channel', trigger.sender)\n bot.db.set_nick_value(trigger.nick, 'seen_message', trigger)\n", "path": "willie/modules/seen.py"}]} | 892 | 499 |
gh_patches_debug_16434 | rasdani/github-patches | git_diff | lightly-ai__lightly-655 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Did cifar10 dataset need gaussian blur
In the https://github.com/lightly-ai/lightly/blob/master/lightly/data/collate.py, class SimCLRCollateFunction() presents a n example for using it,
collate_fn = SimCLRCollateFunction(
>>> input_size=32,
>>> gaussian_blur=0.,
>>> )
but in https://docs.lightly.ai/examples/simclr.html
collate_fn = SimCLRCollateFunction(input_size=32)
so I wonder which one is the one you suggested?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/pytorch/simclr.py`
Content:
```
1 import torch
2 from torch import nn
3 import torchvision
4
5 from lightly.data import LightlyDataset
6 from lightly.data import SimCLRCollateFunction
7 from lightly.loss import NTXentLoss
8 from lightly.models.modules import SimCLRProjectionHead
9
10
11 class SimCLR(nn.Module):
12 def __init__(self, backbone):
13 super().__init__()
14 self.backbone = backbone
15 self.projection_head = SimCLRProjectionHead(512, 512, 128)
16
17 def forward(self, x):
18 x = self.backbone(x).flatten(start_dim=1)
19 z = self.projection_head(x)
20 return z
21
22
23 resnet = torchvision.models.resnet18()
24 backbone = nn.Sequential(*list(resnet.children())[:-1])
25 model = SimCLR(backbone)
26
27 device = "cuda" if torch.cuda.is_available() else "cpu"
28 model.to(device)
29
30 cifar10 = torchvision.datasets.CIFAR10("datasets/cifar10", download=True)
31 dataset = LightlyDataset.from_torch_dataset(cifar10)
32 # or create a dataset from a folder containing images or videos:
33 # dataset = LightlyDataset("path/to/folder")
34
35 collate_fn = SimCLRCollateFunction(input_size=32)
36
37 dataloader = torch.utils.data.DataLoader(
38 dataset,
39 batch_size=256,
40 collate_fn=collate_fn,
41 shuffle=True,
42 drop_last=True,
43 num_workers=8,
44 )
45
46 criterion = NTXentLoss()
47 optimizer = torch.optim.SGD(model.parameters(), lr=0.06)
48
49 print("Starting Training")
50 for epoch in range(10):
51 total_loss = 0
52 for (x0, x1), _, _ in dataloader:
53 x0 = x0.to(device)
54 x1 = x1.to(device)
55 z0 = model(x0)
56 z1 = model(x1)
57 loss = criterion(z0, z1)
58 total_loss += loss.detach()
59 loss.backward()
60 optimizer.step()
61 optimizer.zero_grad()
62 avg_loss = total_loss / len(dataloader)
63 print(f"epoch: {epoch:>02}, loss: {avg_loss:.5f}")
64
```
Path: `examples/pytorch_lightning/simclr.py`
Content:
```
1 import torch
2 from torch import nn
3 import torchvision
4 import pytorch_lightning as pl
5
6 from lightly.data import LightlyDataset
7 from lightly.data import SimCLRCollateFunction
8 from lightly.loss import NTXentLoss
9 from lightly.models.modules import SimCLRProjectionHead
10
11
12 class SimCLR(pl.LightningModule):
13 def __init__(self):
14 super().__init__()
15 resnet = torchvision.models.resnet18()
16 self.backbone = nn.Sequential(*list(resnet.children())[:-1])
17 self.projection_head = SimCLRProjectionHead(512, 2048, 2048)
18 self.criterion = NTXentLoss()
19
20 def forward(self, x):
21 x = self.backbone(x).flatten(start_dim=1)
22 z = self.projection_head(x)
23 return z
24
25 def training_step(self, batch, batch_index):
26 (x0, x1), _, _ = batch
27 z0 = self.forward(x0)
28 z1 = self.forward(x1)
29 loss = self.criterion(z0, z1)
30 return loss
31
32 def configure_optimizers(self):
33 optim = torch.optim.SGD(self.parameters(), lr=0.06)
34 return optim
35
36
37 model = SimCLR()
38
39 cifar10 = torchvision.datasets.CIFAR10("datasets/cifar10", download=True)
40 dataset = LightlyDataset.from_torch_dataset(cifar10)
41 # or create a dataset from a folder containing images or videos:
42 # dataset = LightlyDataset("path/to/folder")
43
44 collate_fn = SimCLRCollateFunction(input_size=32)
45
46 dataloader = torch.utils.data.DataLoader(
47 dataset,
48 batch_size=256,
49 collate_fn=collate_fn,
50 shuffle=True,
51 drop_last=True,
52 num_workers=8,
53 )
54
55 gpus = 1 if torch.cuda.is_available() else 0
56
57 trainer = pl.Trainer(max_epochs=10, gpus=gpus)
58 trainer.fit(model=model, train_dataloaders=dataloader)
59
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/examples/pytorch/simclr.py b/examples/pytorch/simclr.py
--- a/examples/pytorch/simclr.py
+++ b/examples/pytorch/simclr.py
@@ -32,7 +32,10 @@
# or create a dataset from a folder containing images or videos:
# dataset = LightlyDataset("path/to/folder")
-collate_fn = SimCLRCollateFunction(input_size=32)
+collate_fn = SimCLRCollateFunction(
+ input_size=32,
+ gaussian_blur=0.,
+)
dataloader = torch.utils.data.DataLoader(
dataset,
diff --git a/examples/pytorch_lightning/simclr.py b/examples/pytorch_lightning/simclr.py
--- a/examples/pytorch_lightning/simclr.py
+++ b/examples/pytorch_lightning/simclr.py
@@ -41,7 +41,10 @@
# or create a dataset from a folder containing images or videos:
# dataset = LightlyDataset("path/to/folder")
-collate_fn = SimCLRCollateFunction(input_size=32)
+collate_fn = SimCLRCollateFunction(
+ input_size=32,
+ gaussian_blur=0.,
+)
dataloader = torch.utils.data.DataLoader(
dataset,
| {"golden_diff": "diff --git a/examples/pytorch/simclr.py b/examples/pytorch/simclr.py\n--- a/examples/pytorch/simclr.py\n+++ b/examples/pytorch/simclr.py\n@@ -32,7 +32,10 @@\n # or create a dataset from a folder containing images or videos:\n # dataset = LightlyDataset(\"path/to/folder\")\n \n-collate_fn = SimCLRCollateFunction(input_size=32)\n+collate_fn = SimCLRCollateFunction(\n+ input_size=32,\n+ gaussian_blur=0.,\n+)\n \n dataloader = torch.utils.data.DataLoader(\n dataset,\ndiff --git a/examples/pytorch_lightning/simclr.py b/examples/pytorch_lightning/simclr.py\n--- a/examples/pytorch_lightning/simclr.py\n+++ b/examples/pytorch_lightning/simclr.py\n@@ -41,7 +41,10 @@\n # or create a dataset from a folder containing images or videos:\n # dataset = LightlyDataset(\"path/to/folder\")\n \n-collate_fn = SimCLRCollateFunction(input_size=32)\n+collate_fn = SimCLRCollateFunction(\n+ input_size=32,\n+ gaussian_blur=0.,\n+)\n \n dataloader = torch.utils.data.DataLoader(\n dataset,\n", "issue": "Did cifar10 dataset need gaussian blur\nIn the https://github.com/lightly-ai/lightly/blob/master/lightly/data/collate.py, class SimCLRCollateFunction() presents a n example for using it,\r\ncollate_fn = SimCLRCollateFunction(\r\n >>> input_size=32,\r\n >>> gaussian_blur=0.,\r\n >>> )\r\nbut in https://docs.lightly.ai/examples/simclr.html\r\ncollate_fn = SimCLRCollateFunction(input_size=32)\r\nso I wonder which one is the one you suggested?\n", "before_files": [{"content": "import torch\nfrom torch import nn\nimport torchvision\n\nfrom lightly.data import LightlyDataset\nfrom lightly.data import SimCLRCollateFunction\nfrom lightly.loss import NTXentLoss\nfrom lightly.models.modules import SimCLRProjectionHead\n\n\nclass SimCLR(nn.Module):\n def __init__(self, backbone):\n super().__init__()\n self.backbone = backbone\n self.projection_head = SimCLRProjectionHead(512, 512, 128)\n\n def forward(self, x):\n x = self.backbone(x).flatten(start_dim=1)\n z = self.projection_head(x)\n return z\n\n\nresnet = torchvision.models.resnet18()\nbackbone = nn.Sequential(*list(resnet.children())[:-1])\nmodel = SimCLR(backbone)\n\ndevice = \"cuda\" if torch.cuda.is_available() else \"cpu\"\nmodel.to(device)\n\ncifar10 = torchvision.datasets.CIFAR10(\"datasets/cifar10\", download=True)\ndataset = LightlyDataset.from_torch_dataset(cifar10)\n# or create a dataset from a folder containing images or videos:\n# dataset = LightlyDataset(\"path/to/folder\")\n\ncollate_fn = SimCLRCollateFunction(input_size=32)\n\ndataloader = torch.utils.data.DataLoader(\n dataset,\n batch_size=256,\n collate_fn=collate_fn,\n shuffle=True,\n drop_last=True,\n num_workers=8,\n)\n\ncriterion = NTXentLoss()\noptimizer = torch.optim.SGD(model.parameters(), lr=0.06)\n\nprint(\"Starting Training\")\nfor epoch in range(10):\n total_loss = 0\n for (x0, x1), _, _ in dataloader:\n x0 = x0.to(device)\n x1 = x1.to(device)\n z0 = model(x0)\n z1 = model(x1)\n loss = criterion(z0, z1)\n total_loss += loss.detach()\n loss.backward()\n optimizer.step()\n optimizer.zero_grad()\n avg_loss = total_loss / len(dataloader)\n print(f\"epoch: {epoch:>02}, loss: {avg_loss:.5f}\")\n", "path": "examples/pytorch/simclr.py"}, {"content": "import torch\nfrom torch import nn\nimport torchvision\nimport pytorch_lightning as pl\n\nfrom lightly.data import LightlyDataset\nfrom lightly.data import SimCLRCollateFunction\nfrom lightly.loss import NTXentLoss\nfrom lightly.models.modules import SimCLRProjectionHead\n\n\nclass SimCLR(pl.LightningModule):\n def __init__(self):\n super().__init__()\n resnet = torchvision.models.resnet18()\n self.backbone = nn.Sequential(*list(resnet.children())[:-1])\n self.projection_head = SimCLRProjectionHead(512, 2048, 2048)\n self.criterion = NTXentLoss()\n\n def forward(self, x):\n x = self.backbone(x).flatten(start_dim=1)\n z = self.projection_head(x)\n return z\n\n def training_step(self, batch, batch_index):\n (x0, x1), _, _ = batch\n z0 = self.forward(x0)\n z1 = self.forward(x1)\n loss = self.criterion(z0, z1)\n return loss\n\n def configure_optimizers(self):\n optim = torch.optim.SGD(self.parameters(), lr=0.06)\n return optim\n\n\nmodel = SimCLR()\n\ncifar10 = torchvision.datasets.CIFAR10(\"datasets/cifar10\", download=True)\ndataset = LightlyDataset.from_torch_dataset(cifar10)\n# or create a dataset from a folder containing images or videos:\n# dataset = LightlyDataset(\"path/to/folder\")\n\ncollate_fn = SimCLRCollateFunction(input_size=32)\n\ndataloader = torch.utils.data.DataLoader(\n dataset,\n batch_size=256,\n collate_fn=collate_fn,\n shuffle=True,\n drop_last=True,\n num_workers=8,\n)\n\ngpus = 1 if torch.cuda.is_available() else 0\n\ntrainer = pl.Trainer(max_epochs=10, gpus=gpus)\ntrainer.fit(model=model, train_dataloaders=dataloader)\n", "path": "examples/pytorch_lightning/simclr.py"}], "after_files": [{"content": "import torch\nfrom torch import nn\nimport torchvision\n\nfrom lightly.data import LightlyDataset\nfrom lightly.data import SimCLRCollateFunction\nfrom lightly.loss import NTXentLoss\nfrom lightly.models.modules import SimCLRProjectionHead\n\n\nclass SimCLR(nn.Module):\n def __init__(self, backbone):\n super().__init__()\n self.backbone = backbone\n self.projection_head = SimCLRProjectionHead(512, 512, 128)\n\n def forward(self, x):\n x = self.backbone(x).flatten(start_dim=1)\n z = self.projection_head(x)\n return z\n\n\nresnet = torchvision.models.resnet18()\nbackbone = nn.Sequential(*list(resnet.children())[:-1])\nmodel = SimCLR(backbone)\n\ndevice = \"cuda\" if torch.cuda.is_available() else \"cpu\"\nmodel.to(device)\n\ncifar10 = torchvision.datasets.CIFAR10(\"datasets/cifar10\", download=True)\ndataset = LightlyDataset.from_torch_dataset(cifar10)\n# or create a dataset from a folder containing images or videos:\n# dataset = LightlyDataset(\"path/to/folder\")\n\ncollate_fn = SimCLRCollateFunction(\n input_size=32,\n gaussian_blur=0.,\n)\n\ndataloader = torch.utils.data.DataLoader(\n dataset,\n batch_size=256,\n collate_fn=collate_fn,\n shuffle=True,\n drop_last=True,\n num_workers=8,\n)\n\ncriterion = NTXentLoss()\noptimizer = torch.optim.SGD(model.parameters(), lr=0.06)\n\nprint(\"Starting Training\")\nfor epoch in range(10):\n total_loss = 0\n for (x0, x1), _, _ in dataloader:\n x0 = x0.to(device)\n x1 = x1.to(device)\n z0 = model(x0)\n z1 = model(x1)\n loss = criterion(z0, z1)\n total_loss += loss.detach()\n loss.backward()\n optimizer.step()\n optimizer.zero_grad()\n avg_loss = total_loss / len(dataloader)\n print(f\"epoch: {epoch:>02}, loss: {avg_loss:.5f}\")\n", "path": "examples/pytorch/simclr.py"}, {"content": "import torch\nfrom torch import nn\nimport torchvision\nimport pytorch_lightning as pl\n\nfrom lightly.data import LightlyDataset\nfrom lightly.data import SimCLRCollateFunction\nfrom lightly.loss import NTXentLoss\nfrom lightly.models.modules import SimCLRProjectionHead\n\n\nclass SimCLR(pl.LightningModule):\n def __init__(self):\n super().__init__()\n resnet = torchvision.models.resnet18()\n self.backbone = nn.Sequential(*list(resnet.children())[:-1])\n self.projection_head = SimCLRProjectionHead(512, 2048, 2048)\n self.criterion = NTXentLoss()\n\n def forward(self, x):\n x = self.backbone(x).flatten(start_dim=1)\n z = self.projection_head(x)\n return z\n\n def training_step(self, batch, batch_index):\n (x0, x1), _, _ = batch\n z0 = self.forward(x0)\n z1 = self.forward(x1)\n loss = self.criterion(z0, z1)\n return loss\n\n def configure_optimizers(self):\n optim = torch.optim.SGD(self.parameters(), lr=0.06)\n return optim\n\n\nmodel = SimCLR()\n\ncifar10 = torchvision.datasets.CIFAR10(\"datasets/cifar10\", download=True)\ndataset = LightlyDataset.from_torch_dataset(cifar10)\n# or create a dataset from a folder containing images or videos:\n# dataset = LightlyDataset(\"path/to/folder\")\n\ncollate_fn = SimCLRCollateFunction(\n input_size=32,\n gaussian_blur=0.,\n)\n\ndataloader = torch.utils.data.DataLoader(\n dataset,\n batch_size=256,\n collate_fn=collate_fn,\n shuffle=True,\n drop_last=True,\n num_workers=8,\n)\n\ngpus = 1 if torch.cuda.is_available() else 0\n\ntrainer = pl.Trainer(max_epochs=10, gpus=gpus)\ntrainer.fit(model=model, train_dataloaders=dataloader)\n", "path": "examples/pytorch_lightning/simclr.py"}]} | 1,553 | 288 |
gh_patches_debug_40648 | rasdani/github-patches | git_diff | microsoft__AzureTRE-241 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[BUG] ResourceTemplates have properties - should be parameters
Rename properties to parameters
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `management_api_app/models/domain/resource_template.py`
Content:
```
1 from typing import List, Optional, Any
2
3 from pydantic import Field
4
5 from models.domain.azuretremodel import AzureTREModel
6 from models.domain.resource import ResourceType
7
8
9 class Parameter(AzureTREModel):
10 name: str = Field(title="Parameter name")
11 type: str = Field(title="Parameter type")
12 default: Any = Field(title="Default value for the parameter")
13 applyTo: str = Field("All Actions", title="The actions that the parameter applies to e.g. install, delete etc")
14 description: Optional[str] = Field(title="Parameter description")
15 required: bool = Field(False, title="Is the parameter required")
16
17
18 class ResourceTemplate(AzureTREModel):
19 id: str
20 name: str = Field(title="Unique template name")
21 description: str = Field(title="Template description")
22 version: str = Field(title="Template version")
23 properties: List[dict] = Field(title="Template parameters")
24 resourceType: ResourceType = Field(title="Type of resource this template is for (workspace/service)")
25 current: bool = Field(title="Is this the current version of this template")
26
```
Path: `management_api_app/models/schemas/workspace_template.py`
Content:
```
1 from typing import List
2 from pydantic import BaseModel, Field
3
4 from models.domain.resource import ResourceType
5 from models.domain.resource_template import ResourceTemplate, Parameter
6
7
8 def get_sample_workspace_template_object(template_name: str = "tre-workspace-vanilla") -> ResourceTemplate:
9 return ResourceTemplate(
10 id="a7a7a7bd-7f4e-4a4e-b970-dc86a6b31dfb",
11 name=template_name,
12 description="vanilla workspace bundle",
13 version="0.1.0",
14 properties=[
15 Parameter(name="azure_location", type="string"),
16 Parameter(name="tre_id", type="string"),
17 Parameter(name="workspace_id", type="string"),
18 Parameter(name="address_space", type="string", default="10.2.1.0/24", description="VNet address space for the workspace services")
19 ],
20 resourceType=ResourceType.Workspace,
21 current=True,
22 )
23
24
25 def get_sample_workspace_template() -> dict:
26 return get_sample_workspace_template_object().dict()
27
28
29 class WorkspaceTemplateNamesInList(BaseModel):
30 templateNames: List[str]
31
32 class Config:
33 schema_extra = {
34 "example": {
35 "templateNames": ["tre-workspace-vanilla", "tre-workspace-base"]
36 }
37 }
38
39
40 class WorkspaceTemplateInCreate(BaseModel):
41
42 name: str = Field(title="Name of workspace template")
43 version: str = Field(title="Version of workspace template")
44 description: str = Field(title=" Description of workspace template")
45 properties: List[dict] = Field([{}], title="Workspace template properties",
46 description="Values for the properties required by the workspace template")
47 resourceType: str = Field(title="Type of workspace template")
48 current: bool = Field(title="Mark this version as current")
49
50 class Config:
51 schema_extra = {
52 "example": {
53 "name": "my-tre-workspace",
54 "version": "0.0.1",
55 "description": "workspace template for great product",
56 "properties": [{
57 "name": "azure_location",
58 "type": "string"
59 }],
60 "resourceType": "workspace",
61 "current": "true"
62 }
63 }
64
65
66 class WorkspaceTemplateInResponse(BaseModel):
67 workspaceTemplate: ResourceTemplate
68
69 class Config:
70 schema_extra = {
71 "example": {
72 "resourceTemplateId": "49a7445c-aae6-41ec-a539-30dfa90ab1ae",
73 "workspaceTemplate": get_sample_workspace_template()
74 }
75 }
76
```
Path: `management_api_app/db/repositories/workspace_templates.py`
Content:
```
1 import uuid
2 from typing import List
3
4 from azure.cosmos import CosmosClient
5
6 from core import config
7 from db.errors import EntityDoesNotExist
8 from db.repositories.base import BaseRepository
9 from models.domain.resource_template import ResourceTemplate
10 from models.schemas.workspace_template import WorkspaceTemplateInCreate
11
12
13 class WorkspaceTemplateRepository(BaseRepository):
14 def __init__(self, client: CosmosClient):
15 super().__init__(client, config.STATE_STORE_RESOURCE_TEMPLATES_CONTAINER)
16
17 @staticmethod
18 def _workspace_template_by_name_query(name: str) -> str:
19 return f'SELECT * FROM c WHERE c.resourceType = "workspace" AND c.name = "{name}"'
20
21 def get_workspace_templates_by_name(self, name: str) -> List[ResourceTemplate]:
22 query = self._workspace_template_by_name_query(name)
23 return self.query(query=query)
24
25 def get_current_workspace_template_by_name(self, name: str) -> ResourceTemplate:
26 query = self._workspace_template_by_name_query(name) + ' AND c.current = true'
27 workspace_templates = self.query(query=query)
28 if len(workspace_templates) != 1:
29 raise EntityDoesNotExist
30 return workspace_templates[0]
31
32 def get_workspace_template_by_name_and_version(self, name: str, version: str) -> ResourceTemplate:
33 query = self._workspace_template_by_name_query(name) + f' AND c.version = "{version}"'
34 workspace_templates = self.query(query=query)
35 if len(workspace_templates) != 1:
36 raise EntityDoesNotExist
37 return workspace_templates[0]
38
39 def get_workspace_template_names(self) -> List[str]:
40 query = 'SELECT c.name FROM c'
41 workspace_templates = self.query(query=query)
42 print(workspace_templates)
43 workspace_template_names = [template["name"] for template in workspace_templates]
44 return list(set(workspace_template_names))
45
46 def create_workspace_template_item(self, workspace_template_create: WorkspaceTemplateInCreate):
47 item_id = str(uuid.uuid4())
48 resource_template = ResourceTemplate(
49 id=item_id,
50 name=workspace_template_create.name,
51 description=workspace_template_create.description,
52 version=workspace_template_create.version,
53 properties=workspace_template_create.properties,
54 resourceType=workspace_template_create.resourceType,
55 current=workspace_template_create.current
56 )
57 self.create_item(resource_template)
58 return resource_template
59
60 def update_item(self, resource_template: ResourceTemplate):
61 self.container.upsert_item(resource_template)
62
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/management_api_app/db/repositories/workspace_templates.py b/management_api_app/db/repositories/workspace_templates.py
--- a/management_api_app/db/repositories/workspace_templates.py
+++ b/management_api_app/db/repositories/workspace_templates.py
@@ -50,7 +50,7 @@
name=workspace_template_create.name,
description=workspace_template_create.description,
version=workspace_template_create.version,
- properties=workspace_template_create.properties,
+ parameters=workspace_template_create.parameters,
resourceType=workspace_template_create.resourceType,
current=workspace_template_create.current
)
diff --git a/management_api_app/models/domain/resource_template.py b/management_api_app/models/domain/resource_template.py
--- a/management_api_app/models/domain/resource_template.py
+++ b/management_api_app/models/domain/resource_template.py
@@ -20,6 +20,6 @@
name: str = Field(title="Unique template name")
description: str = Field(title="Template description")
version: str = Field(title="Template version")
- properties: List[dict] = Field(title="Template parameters")
+ parameters: List[dict] = Field(title="Template parameters")
resourceType: ResourceType = Field(title="Type of resource this template is for (workspace/service)")
current: bool = Field(title="Is this the current version of this template")
diff --git a/management_api_app/models/schemas/workspace_template.py b/management_api_app/models/schemas/workspace_template.py
--- a/management_api_app/models/schemas/workspace_template.py
+++ b/management_api_app/models/schemas/workspace_template.py
@@ -11,7 +11,7 @@
name=template_name,
description="vanilla workspace bundle",
version="0.1.0",
- properties=[
+ parameters=[
Parameter(name="azure_location", type="string"),
Parameter(name="tre_id", type="string"),
Parameter(name="workspace_id", type="string"),
@@ -42,8 +42,8 @@
name: str = Field(title="Name of workspace template")
version: str = Field(title="Version of workspace template")
description: str = Field(title=" Description of workspace template")
- properties: List[dict] = Field([{}], title="Workspace template properties",
- description="Values for the properties required by the workspace template")
+ parameters: List[dict] = Field([{}], title="Workspace template parameters",
+ description="Values for the parameters required by the workspace template")
resourceType: str = Field(title="Type of workspace template")
current: bool = Field(title="Mark this version as current")
@@ -53,7 +53,7 @@
"name": "my-tre-workspace",
"version": "0.0.1",
"description": "workspace template for great product",
- "properties": [{
+ "parameters": [{
"name": "azure_location",
"type": "string"
}],
| {"golden_diff": "diff --git a/management_api_app/db/repositories/workspace_templates.py b/management_api_app/db/repositories/workspace_templates.py\n--- a/management_api_app/db/repositories/workspace_templates.py\n+++ b/management_api_app/db/repositories/workspace_templates.py\n@@ -50,7 +50,7 @@\n name=workspace_template_create.name,\n description=workspace_template_create.description,\n version=workspace_template_create.version,\n- properties=workspace_template_create.properties,\n+ parameters=workspace_template_create.parameters,\n resourceType=workspace_template_create.resourceType,\n current=workspace_template_create.current\n )\ndiff --git a/management_api_app/models/domain/resource_template.py b/management_api_app/models/domain/resource_template.py\n--- a/management_api_app/models/domain/resource_template.py\n+++ b/management_api_app/models/domain/resource_template.py\n@@ -20,6 +20,6 @@\n name: str = Field(title=\"Unique template name\")\n description: str = Field(title=\"Template description\")\n version: str = Field(title=\"Template version\")\n- properties: List[dict] = Field(title=\"Template parameters\")\n+ parameters: List[dict] = Field(title=\"Template parameters\")\n resourceType: ResourceType = Field(title=\"Type of resource this template is for (workspace/service)\")\n current: bool = Field(title=\"Is this the current version of this template\")\ndiff --git a/management_api_app/models/schemas/workspace_template.py b/management_api_app/models/schemas/workspace_template.py\n--- a/management_api_app/models/schemas/workspace_template.py\n+++ b/management_api_app/models/schemas/workspace_template.py\n@@ -11,7 +11,7 @@\n name=template_name,\n description=\"vanilla workspace bundle\",\n version=\"0.1.0\",\n- properties=[\n+ parameters=[\n Parameter(name=\"azure_location\", type=\"string\"),\n Parameter(name=\"tre_id\", type=\"string\"),\n Parameter(name=\"workspace_id\", type=\"string\"),\n@@ -42,8 +42,8 @@\n name: str = Field(title=\"Name of workspace template\")\n version: str = Field(title=\"Version of workspace template\")\n description: str = Field(title=\" Description of workspace template\")\n- properties: List[dict] = Field([{}], title=\"Workspace template properties\",\n- description=\"Values for the properties required by the workspace template\")\n+ parameters: List[dict] = Field([{}], title=\"Workspace template parameters\",\n+ description=\"Values for the parameters required by the workspace template\")\n resourceType: str = Field(title=\"Type of workspace template\")\n current: bool = Field(title=\"Mark this version as current\")\n \n@@ -53,7 +53,7 @@\n \"name\": \"my-tre-workspace\",\n \"version\": \"0.0.1\",\n \"description\": \"workspace template for great product\",\n- \"properties\": [{\n+ \"parameters\": [{\n \"name\": \"azure_location\",\n \"type\": \"string\"\n }],\n", "issue": "[BUG] ResourceTemplates have properties - should be parameters\nRename properties to parameters\r\n\n", "before_files": [{"content": "from typing import List, Optional, Any\n\nfrom pydantic import Field\n\nfrom models.domain.azuretremodel import AzureTREModel\nfrom models.domain.resource import ResourceType\n\n\nclass Parameter(AzureTREModel):\n name: str = Field(title=\"Parameter name\")\n type: str = Field(title=\"Parameter type\")\n default: Any = Field(title=\"Default value for the parameter\")\n applyTo: str = Field(\"All Actions\", title=\"The actions that the parameter applies to e.g. install, delete etc\")\n description: Optional[str] = Field(title=\"Parameter description\")\n required: bool = Field(False, title=\"Is the parameter required\")\n\n\nclass ResourceTemplate(AzureTREModel):\n id: str\n name: str = Field(title=\"Unique template name\")\n description: str = Field(title=\"Template description\")\n version: str = Field(title=\"Template version\")\n properties: List[dict] = Field(title=\"Template parameters\")\n resourceType: ResourceType = Field(title=\"Type of resource this template is for (workspace/service)\")\n current: bool = Field(title=\"Is this the current version of this template\")\n", "path": "management_api_app/models/domain/resource_template.py"}, {"content": "from typing import List\nfrom pydantic import BaseModel, Field\n\nfrom models.domain.resource import ResourceType\nfrom models.domain.resource_template import ResourceTemplate, Parameter\n\n\ndef get_sample_workspace_template_object(template_name: str = \"tre-workspace-vanilla\") -> ResourceTemplate:\n return ResourceTemplate(\n id=\"a7a7a7bd-7f4e-4a4e-b970-dc86a6b31dfb\",\n name=template_name,\n description=\"vanilla workspace bundle\",\n version=\"0.1.0\",\n properties=[\n Parameter(name=\"azure_location\", type=\"string\"),\n Parameter(name=\"tre_id\", type=\"string\"),\n Parameter(name=\"workspace_id\", type=\"string\"),\n Parameter(name=\"address_space\", type=\"string\", default=\"10.2.1.0/24\", description=\"VNet address space for the workspace services\")\n ],\n resourceType=ResourceType.Workspace,\n current=True,\n )\n\n\ndef get_sample_workspace_template() -> dict:\n return get_sample_workspace_template_object().dict()\n\n\nclass WorkspaceTemplateNamesInList(BaseModel):\n templateNames: List[str]\n\n class Config:\n schema_extra = {\n \"example\": {\n \"templateNames\": [\"tre-workspace-vanilla\", \"tre-workspace-base\"]\n }\n }\n\n\nclass WorkspaceTemplateInCreate(BaseModel):\n\n name: str = Field(title=\"Name of workspace template\")\n version: str = Field(title=\"Version of workspace template\")\n description: str = Field(title=\" Description of workspace template\")\n properties: List[dict] = Field([{}], title=\"Workspace template properties\",\n description=\"Values for the properties required by the workspace template\")\n resourceType: str = Field(title=\"Type of workspace template\")\n current: bool = Field(title=\"Mark this version as current\")\n\n class Config:\n schema_extra = {\n \"example\": {\n \"name\": \"my-tre-workspace\",\n \"version\": \"0.0.1\",\n \"description\": \"workspace template for great product\",\n \"properties\": [{\n \"name\": \"azure_location\",\n \"type\": \"string\"\n }],\n \"resourceType\": \"workspace\",\n \"current\": \"true\"\n }\n }\n\n\nclass WorkspaceTemplateInResponse(BaseModel):\n workspaceTemplate: ResourceTemplate\n\n class Config:\n schema_extra = {\n \"example\": {\n \"resourceTemplateId\": \"49a7445c-aae6-41ec-a539-30dfa90ab1ae\",\n \"workspaceTemplate\": get_sample_workspace_template()\n }\n }\n", "path": "management_api_app/models/schemas/workspace_template.py"}, {"content": "import uuid\nfrom typing import List\n\nfrom azure.cosmos import CosmosClient\n\nfrom core import config\nfrom db.errors import EntityDoesNotExist\nfrom db.repositories.base import BaseRepository\nfrom models.domain.resource_template import ResourceTemplate\nfrom models.schemas.workspace_template import WorkspaceTemplateInCreate\n\n\nclass WorkspaceTemplateRepository(BaseRepository):\n def __init__(self, client: CosmosClient):\n super().__init__(client, config.STATE_STORE_RESOURCE_TEMPLATES_CONTAINER)\n\n @staticmethod\n def _workspace_template_by_name_query(name: str) -> str:\n return f'SELECT * FROM c WHERE c.resourceType = \"workspace\" AND c.name = \"{name}\"'\n\n def get_workspace_templates_by_name(self, name: str) -> List[ResourceTemplate]:\n query = self._workspace_template_by_name_query(name)\n return self.query(query=query)\n\n def get_current_workspace_template_by_name(self, name: str) -> ResourceTemplate:\n query = self._workspace_template_by_name_query(name) + ' AND c.current = true'\n workspace_templates = self.query(query=query)\n if len(workspace_templates) != 1:\n raise EntityDoesNotExist\n return workspace_templates[0]\n\n def get_workspace_template_by_name_and_version(self, name: str, version: str) -> ResourceTemplate:\n query = self._workspace_template_by_name_query(name) + f' AND c.version = \"{version}\"'\n workspace_templates = self.query(query=query)\n if len(workspace_templates) != 1:\n raise EntityDoesNotExist\n return workspace_templates[0]\n\n def get_workspace_template_names(self) -> List[str]:\n query = 'SELECT c.name FROM c'\n workspace_templates = self.query(query=query)\n print(workspace_templates)\n workspace_template_names = [template[\"name\"] for template in workspace_templates]\n return list(set(workspace_template_names))\n\n def create_workspace_template_item(self, workspace_template_create: WorkspaceTemplateInCreate):\n item_id = str(uuid.uuid4())\n resource_template = ResourceTemplate(\n id=item_id,\n name=workspace_template_create.name,\n description=workspace_template_create.description,\n version=workspace_template_create.version,\n properties=workspace_template_create.properties,\n resourceType=workspace_template_create.resourceType,\n current=workspace_template_create.current\n )\n self.create_item(resource_template)\n return resource_template\n\n def update_item(self, resource_template: ResourceTemplate):\n self.container.upsert_item(resource_template)\n", "path": "management_api_app/db/repositories/workspace_templates.py"}], "after_files": [{"content": "from typing import List, Optional, Any\n\nfrom pydantic import Field\n\nfrom models.domain.azuretremodel import AzureTREModel\nfrom models.domain.resource import ResourceType\n\n\nclass Parameter(AzureTREModel):\n name: str = Field(title=\"Parameter name\")\n type: str = Field(title=\"Parameter type\")\n default: Any = Field(title=\"Default value for the parameter\")\n applyTo: str = Field(\"All Actions\", title=\"The actions that the parameter applies to e.g. install, delete etc\")\n description: Optional[str] = Field(title=\"Parameter description\")\n required: bool = Field(False, title=\"Is the parameter required\")\n\n\nclass ResourceTemplate(AzureTREModel):\n id: str\n name: str = Field(title=\"Unique template name\")\n description: str = Field(title=\"Template description\")\n version: str = Field(title=\"Template version\")\n parameters: List[dict] = Field(title=\"Template parameters\")\n resourceType: ResourceType = Field(title=\"Type of resource this template is for (workspace/service)\")\n current: bool = Field(title=\"Is this the current version of this template\")\n", "path": "management_api_app/models/domain/resource_template.py"}, {"content": "from typing import List\nfrom pydantic import BaseModel, Field\n\nfrom models.domain.resource import ResourceType\nfrom models.domain.resource_template import ResourceTemplate, Parameter\n\n\ndef get_sample_workspace_template_object(template_name: str = \"tre-workspace-vanilla\") -> ResourceTemplate:\n return ResourceTemplate(\n id=\"a7a7a7bd-7f4e-4a4e-b970-dc86a6b31dfb\",\n name=template_name,\n description=\"vanilla workspace bundle\",\n version=\"0.1.0\",\n parameters=[\n Parameter(name=\"azure_location\", type=\"string\"),\n Parameter(name=\"tre_id\", type=\"string\"),\n Parameter(name=\"workspace_id\", type=\"string\"),\n Parameter(name=\"address_space\", type=\"string\", default=\"10.2.1.0/24\", description=\"VNet address space for the workspace services\")\n ],\n resourceType=ResourceType.Workspace,\n current=True,\n )\n\n\ndef get_sample_workspace_template() -> dict:\n return get_sample_workspace_template_object().dict()\n\n\nclass WorkspaceTemplateNamesInList(BaseModel):\n templateNames: List[str]\n\n class Config:\n schema_extra = {\n \"example\": {\n \"templateNames\": [\"tre-workspace-vanilla\", \"tre-workspace-base\"]\n }\n }\n\n\nclass WorkspaceTemplateInCreate(BaseModel):\n\n name: str = Field(title=\"Name of workspace template\")\n version: str = Field(title=\"Version of workspace template\")\n description: str = Field(title=\" Description of workspace template\")\n parameters: List[dict] = Field([{}], title=\"Workspace template parameters\",\n description=\"Values for the parameters required by the workspace template\")\n resourceType: str = Field(title=\"Type of workspace template\")\n current: bool = Field(title=\"Mark this version as current\")\n\n class Config:\n schema_extra = {\n \"example\": {\n \"name\": \"my-tre-workspace\",\n \"version\": \"0.0.1\",\n \"description\": \"workspace template for great product\",\n \"parameters\": [{\n \"name\": \"azure_location\",\n \"type\": \"string\"\n }],\n \"resourceType\": \"workspace\",\n \"current\": \"true\"\n }\n }\n\n\nclass WorkspaceTemplateInResponse(BaseModel):\n workspaceTemplate: ResourceTemplate\n\n class Config:\n schema_extra = {\n \"example\": {\n \"resourceTemplateId\": \"49a7445c-aae6-41ec-a539-30dfa90ab1ae\",\n \"workspaceTemplate\": get_sample_workspace_template()\n }\n }\n", "path": "management_api_app/models/schemas/workspace_template.py"}, {"content": "import uuid\nfrom typing import List\n\nfrom azure.cosmos import CosmosClient\n\nfrom core import config\nfrom db.errors import EntityDoesNotExist\nfrom db.repositories.base import BaseRepository\nfrom models.domain.resource_template import ResourceTemplate\nfrom models.schemas.workspace_template import WorkspaceTemplateInCreate\n\n\nclass WorkspaceTemplateRepository(BaseRepository):\n def __init__(self, client: CosmosClient):\n super().__init__(client, config.STATE_STORE_RESOURCE_TEMPLATES_CONTAINER)\n\n @staticmethod\n def _workspace_template_by_name_query(name: str) -> str:\n return f'SELECT * FROM c WHERE c.resourceType = \"workspace\" AND c.name = \"{name}\"'\n\n def get_workspace_templates_by_name(self, name: str) -> List[ResourceTemplate]:\n query = self._workspace_template_by_name_query(name)\n return self.query(query=query)\n\n def get_current_workspace_template_by_name(self, name: str) -> ResourceTemplate:\n query = self._workspace_template_by_name_query(name) + ' AND c.current = true'\n workspace_templates = self.query(query=query)\n if len(workspace_templates) != 1:\n raise EntityDoesNotExist\n return workspace_templates[0]\n\n def get_workspace_template_by_name_and_version(self, name: str, version: str) -> ResourceTemplate:\n query = self._workspace_template_by_name_query(name) + f' AND c.version = \"{version}\"'\n workspace_templates = self.query(query=query)\n if len(workspace_templates) != 1:\n raise EntityDoesNotExist\n return workspace_templates[0]\n\n def get_workspace_template_names(self) -> List[str]:\n query = 'SELECT c.name FROM c'\n workspace_templates = self.query(query=query)\n print(workspace_templates)\n workspace_template_names = [template[\"name\"] for template in workspace_templates]\n return list(set(workspace_template_names))\n\n def create_workspace_template_item(self, workspace_template_create: WorkspaceTemplateInCreate):\n item_id = str(uuid.uuid4())\n resource_template = ResourceTemplate(\n id=item_id,\n name=workspace_template_create.name,\n description=workspace_template_create.description,\n version=workspace_template_create.version,\n parameters=workspace_template_create.parameters,\n resourceType=workspace_template_create.resourceType,\n current=workspace_template_create.current\n )\n self.create_item(resource_template)\n return resource_template\n\n def update_item(self, resource_template: ResourceTemplate):\n self.container.upsert_item(resource_template)\n", "path": "management_api_app/db/repositories/workspace_templates.py"}]} | 1,948 | 635 |
gh_patches_debug_10154 | rasdani/github-patches | git_diff | cal-itp__benefits-1611 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
MultiValueDictKeyError: 'language'
Sentry Issue: (https://sentry.calitp.org/organizations/sentry/issues/69523/?referrer=github_integration)
```
KeyError: 'language'
File "django/utils/datastructures.py", line 84, in __getitem__
list_ = super().__getitem__(key)
MultiValueDictKeyError: 'language'
File "benefits/core/middleware.py", line 157, in process_view
new_lang = request.POST["language"]
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `benefits/core/middleware.py`
Content:
```
1 """
2 The core application: middleware definitions for request/response cycle.
3 """
4 import logging
5
6 from django.conf import settings
7 from django.http import HttpResponse
8 from django.shortcuts import redirect
9 from django.template.response import TemplateResponse
10 from django.urls import reverse
11 from django.utils.decorators import decorator_from_middleware
12 from django.utils.deprecation import MiddlewareMixin
13 from django.views import i18n
14
15 from . import analytics, recaptcha, session
16
17
18 logger = logging.getLogger(__name__)
19
20 HEALTHCHECK_PATH = "/healthcheck"
21 ROUTE_INDEX = "core:index"
22 TEMPLATE_USER_ERROR = "200-user-error.html"
23
24
25 def user_error(request):
26 return TemplateResponse(request, TEMPLATE_USER_ERROR)
27
28
29 class AgencySessionRequired(MiddlewareMixin):
30 """Middleware raises an exception for sessions lacking an agency configuration."""
31
32 def process_request(self, request):
33 if session.active_agency(request):
34 logger.debug("Session configured with agency")
35 return None
36 else:
37 logger.debug("Session not configured with agency")
38 return user_error(request)
39
40
41 class EligibleSessionRequired(MiddlewareMixin):
42 """Middleware raises an exception for sessions lacking confirmed eligibility."""
43
44 def process_request(self, request):
45 if session.eligible(request):
46 logger.debug("Session has confirmed eligibility")
47 return None
48 else:
49 logger.debug("Session has no confirmed eligibility")
50 return user_error(request)
51
52
53 class DebugSession(MiddlewareMixin):
54 """Middleware to configure debug context in the request session."""
55
56 def process_request(self, request):
57 session.update(request, debug=settings.DEBUG)
58 return None
59
60
61 class Healthcheck:
62 """Middleware intercepts and accepts /healthcheck requests."""
63
64 def __init__(self, get_response):
65 self.get_response = get_response
66
67 def __call__(self, request):
68 if request.path == HEALTHCHECK_PATH:
69 return HttpResponse("Healthy", content_type="text/plain")
70 return self.get_response(request)
71
72
73 class HealthcheckUserAgents(MiddlewareMixin):
74 """Middleware to return healthcheck for user agents specified in HEALTHCHECK_USER_AGENTS."""
75
76 def process_request(self, request):
77 if hasattr(request, "META"):
78 user_agent = request.META.get("HTTP_USER_AGENT", "")
79 if user_agent in settings.HEALTHCHECK_USER_AGENTS:
80 return HttpResponse("Healthy", content_type="text/plain")
81
82 return self.get_response(request)
83
84
85 class VerifierSessionRequired(MiddlewareMixin):
86 """Middleware raises an exception for sessions lacking an eligibility verifier configuration."""
87
88 def process_request(self, request):
89 if session.verifier(request):
90 logger.debug("Session configured with eligibility verifier")
91 return None
92 else:
93 logger.debug("Session not configured with eligibility verifier")
94 return user_error(request)
95
96
97 class ViewedPageEvent(MiddlewareMixin):
98 """Middleware sends an analytics event for page views."""
99
100 def process_response(self, request, response):
101 event = analytics.ViewedPageEvent(request)
102 try:
103 analytics.send_event(event)
104 except Exception:
105 logger.warning(f"Failed to send event: {event}")
106 finally:
107 return response
108
109
110 pageview_decorator = decorator_from_middleware(ViewedPageEvent)
111
112
113 class ChangedLanguageEvent(MiddlewareMixin):
114 """Middleware hooks into django.views.i18n.set_language to send an analytics event."""
115
116 def process_view(self, request, view_func, view_args, view_kwargs):
117 if view_func == i18n.set_language:
118 new_lang = request.POST["language"]
119 event = analytics.ChangedLanguageEvent(request, new_lang)
120 analytics.send_event(event)
121 return None
122
123
124 class LoginRequired(MiddlewareMixin):
125 """Middleware that checks whether a user is logged in."""
126
127 def process_view(self, request, view_func, view_args, view_kwargs):
128 # only require login if verifier requires it
129 verifier = session.verifier(request)
130 if not verifier or not verifier.is_auth_required or session.logged_in(request):
131 # pass through
132 return None
133
134 return redirect("oauth:login")
135
136
137 class RecaptchaEnabled(MiddlewareMixin):
138 """Middleware configures the request with required reCAPTCHA settings."""
139
140 def process_request(self, request):
141 if settings.RECAPTCHA_ENABLED:
142 request.recaptcha = {
143 "data_field": recaptcha.DATA_FIELD,
144 "script_api": settings.RECAPTCHA_API_KEY_URL,
145 "site_key": settings.RECAPTCHA_SITE_KEY,
146 }
147 return None
148
149
150 class IndexOrAgencyIndexOrigin(MiddlewareMixin):
151 """Middleware sets the session.origin to either the core:index or core:agency_index depending on agency config."""
152
153 def process_request(self, request):
154 if session.active_agency(request):
155 session.update(request, origin=session.agency(request).index_url)
156 else:
157 session.update(request, origin=reverse(ROUTE_INDEX))
158 return None
159
160
161 index_or_agencyindex_origin_decorator = decorator_from_middleware(IndexOrAgencyIndexOrigin)
162
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/benefits/core/middleware.py b/benefits/core/middleware.py
--- a/benefits/core/middleware.py
+++ b/benefits/core/middleware.py
@@ -115,9 +115,12 @@
def process_view(self, request, view_func, view_args, view_kwargs):
if view_func == i18n.set_language:
- new_lang = request.POST["language"]
- event = analytics.ChangedLanguageEvent(request, new_lang)
- analytics.send_event(event)
+ new_lang = request.POST.get("language")
+ if new_lang:
+ event = analytics.ChangedLanguageEvent(request, new_lang)
+ analytics.send_event(event)
+ else:
+ logger.warning("i18n.set_language POST without language")
return None
| {"golden_diff": "diff --git a/benefits/core/middleware.py b/benefits/core/middleware.py\n--- a/benefits/core/middleware.py\n+++ b/benefits/core/middleware.py\n@@ -115,9 +115,12 @@\n \n def process_view(self, request, view_func, view_args, view_kwargs):\n if view_func == i18n.set_language:\n- new_lang = request.POST[\"language\"]\n- event = analytics.ChangedLanguageEvent(request, new_lang)\n- analytics.send_event(event)\n+ new_lang = request.POST.get(\"language\")\n+ if new_lang:\n+ event = analytics.ChangedLanguageEvent(request, new_lang)\n+ analytics.send_event(event)\n+ else:\n+ logger.warning(\"i18n.set_language POST without language\")\n return None\n", "issue": "MultiValueDictKeyError: 'language'\nSentry Issue: (https://sentry.calitp.org/organizations/sentry/issues/69523/?referrer=github_integration)\n\n```\nKeyError: 'language'\n File \"django/utils/datastructures.py\", line 84, in __getitem__\n list_ = super().__getitem__(key)\n\nMultiValueDictKeyError: 'language'\n File \"benefits/core/middleware.py\", line 157, in process_view\n new_lang = request.POST[\"language\"]\n```\n", "before_files": [{"content": "\"\"\"\nThe core application: middleware definitions for request/response cycle.\n\"\"\"\nimport logging\n\nfrom django.conf import settings\nfrom django.http import HttpResponse\nfrom django.shortcuts import redirect\nfrom django.template.response import TemplateResponse\nfrom django.urls import reverse\nfrom django.utils.decorators import decorator_from_middleware\nfrom django.utils.deprecation import MiddlewareMixin\nfrom django.views import i18n\n\nfrom . import analytics, recaptcha, session\n\n\nlogger = logging.getLogger(__name__)\n\nHEALTHCHECK_PATH = \"/healthcheck\"\nROUTE_INDEX = \"core:index\"\nTEMPLATE_USER_ERROR = \"200-user-error.html\"\n\n\ndef user_error(request):\n return TemplateResponse(request, TEMPLATE_USER_ERROR)\n\n\nclass AgencySessionRequired(MiddlewareMixin):\n \"\"\"Middleware raises an exception for sessions lacking an agency configuration.\"\"\"\n\n def process_request(self, request):\n if session.active_agency(request):\n logger.debug(\"Session configured with agency\")\n return None\n else:\n logger.debug(\"Session not configured with agency\")\n return user_error(request)\n\n\nclass EligibleSessionRequired(MiddlewareMixin):\n \"\"\"Middleware raises an exception for sessions lacking confirmed eligibility.\"\"\"\n\n def process_request(self, request):\n if session.eligible(request):\n logger.debug(\"Session has confirmed eligibility\")\n return None\n else:\n logger.debug(\"Session has no confirmed eligibility\")\n return user_error(request)\n\n\nclass DebugSession(MiddlewareMixin):\n \"\"\"Middleware to configure debug context in the request session.\"\"\"\n\n def process_request(self, request):\n session.update(request, debug=settings.DEBUG)\n return None\n\n\nclass Healthcheck:\n \"\"\"Middleware intercepts and accepts /healthcheck requests.\"\"\"\n\n def __init__(self, get_response):\n self.get_response = get_response\n\n def __call__(self, request):\n if request.path == HEALTHCHECK_PATH:\n return HttpResponse(\"Healthy\", content_type=\"text/plain\")\n return self.get_response(request)\n\n\nclass HealthcheckUserAgents(MiddlewareMixin):\n \"\"\"Middleware to return healthcheck for user agents specified in HEALTHCHECK_USER_AGENTS.\"\"\"\n\n def process_request(self, request):\n if hasattr(request, \"META\"):\n user_agent = request.META.get(\"HTTP_USER_AGENT\", \"\")\n if user_agent in settings.HEALTHCHECK_USER_AGENTS:\n return HttpResponse(\"Healthy\", content_type=\"text/plain\")\n\n return self.get_response(request)\n\n\nclass VerifierSessionRequired(MiddlewareMixin):\n \"\"\"Middleware raises an exception for sessions lacking an eligibility verifier configuration.\"\"\"\n\n def process_request(self, request):\n if session.verifier(request):\n logger.debug(\"Session configured with eligibility verifier\")\n return None\n else:\n logger.debug(\"Session not configured with eligibility verifier\")\n return user_error(request)\n\n\nclass ViewedPageEvent(MiddlewareMixin):\n \"\"\"Middleware sends an analytics event for page views.\"\"\"\n\n def process_response(self, request, response):\n event = analytics.ViewedPageEvent(request)\n try:\n analytics.send_event(event)\n except Exception:\n logger.warning(f\"Failed to send event: {event}\")\n finally:\n return response\n\n\npageview_decorator = decorator_from_middleware(ViewedPageEvent)\n\n\nclass ChangedLanguageEvent(MiddlewareMixin):\n \"\"\"Middleware hooks into django.views.i18n.set_language to send an analytics event.\"\"\"\n\n def process_view(self, request, view_func, view_args, view_kwargs):\n if view_func == i18n.set_language:\n new_lang = request.POST[\"language\"]\n event = analytics.ChangedLanguageEvent(request, new_lang)\n analytics.send_event(event)\n return None\n\n\nclass LoginRequired(MiddlewareMixin):\n \"\"\"Middleware that checks whether a user is logged in.\"\"\"\n\n def process_view(self, request, view_func, view_args, view_kwargs):\n # only require login if verifier requires it\n verifier = session.verifier(request)\n if not verifier or not verifier.is_auth_required or session.logged_in(request):\n # pass through\n return None\n\n return redirect(\"oauth:login\")\n\n\nclass RecaptchaEnabled(MiddlewareMixin):\n \"\"\"Middleware configures the request with required reCAPTCHA settings.\"\"\"\n\n def process_request(self, request):\n if settings.RECAPTCHA_ENABLED:\n request.recaptcha = {\n \"data_field\": recaptcha.DATA_FIELD,\n \"script_api\": settings.RECAPTCHA_API_KEY_URL,\n \"site_key\": settings.RECAPTCHA_SITE_KEY,\n }\n return None\n\n\nclass IndexOrAgencyIndexOrigin(MiddlewareMixin):\n \"\"\"Middleware sets the session.origin to either the core:index or core:agency_index depending on agency config.\"\"\"\n\n def process_request(self, request):\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=reverse(ROUTE_INDEX))\n return None\n\n\nindex_or_agencyindex_origin_decorator = decorator_from_middleware(IndexOrAgencyIndexOrigin)\n", "path": "benefits/core/middleware.py"}], "after_files": [{"content": "\"\"\"\nThe core application: middleware definitions for request/response cycle.\n\"\"\"\nimport logging\n\nfrom django.conf import settings\nfrom django.http import HttpResponse\nfrom django.shortcuts import redirect\nfrom django.template.response import TemplateResponse\nfrom django.urls import reverse\nfrom django.utils.decorators import decorator_from_middleware\nfrom django.utils.deprecation import MiddlewareMixin\nfrom django.views import i18n\n\nfrom . import analytics, recaptcha, session\n\n\nlogger = logging.getLogger(__name__)\n\nHEALTHCHECK_PATH = \"/healthcheck\"\nROUTE_INDEX = \"core:index\"\nTEMPLATE_USER_ERROR = \"200-user-error.html\"\n\n\ndef user_error(request):\n return TemplateResponse(request, TEMPLATE_USER_ERROR)\n\n\nclass AgencySessionRequired(MiddlewareMixin):\n \"\"\"Middleware raises an exception for sessions lacking an agency configuration.\"\"\"\n\n def process_request(self, request):\n if session.active_agency(request):\n logger.debug(\"Session configured with agency\")\n return None\n else:\n logger.debug(\"Session not configured with agency\")\n return user_error(request)\n\n\nclass EligibleSessionRequired(MiddlewareMixin):\n \"\"\"Middleware raises an exception for sessions lacking confirmed eligibility.\"\"\"\n\n def process_request(self, request):\n if session.eligible(request):\n logger.debug(\"Session has confirmed eligibility\")\n return None\n else:\n logger.debug(\"Session has no confirmed eligibility\")\n return user_error(request)\n\n\nclass DebugSession(MiddlewareMixin):\n \"\"\"Middleware to configure debug context in the request session.\"\"\"\n\n def process_request(self, request):\n session.update(request, debug=settings.DEBUG)\n return None\n\n\nclass Healthcheck:\n \"\"\"Middleware intercepts and accepts /healthcheck requests.\"\"\"\n\n def __init__(self, get_response):\n self.get_response = get_response\n\n def __call__(self, request):\n if request.path == HEALTHCHECK_PATH:\n return HttpResponse(\"Healthy\", content_type=\"text/plain\")\n return self.get_response(request)\n\n\nclass HealthcheckUserAgents(MiddlewareMixin):\n \"\"\"Middleware to return healthcheck for user agents specified in HEALTHCHECK_USER_AGENTS.\"\"\"\n\n def process_request(self, request):\n if hasattr(request, \"META\"):\n user_agent = request.META.get(\"HTTP_USER_AGENT\", \"\")\n if user_agent in settings.HEALTHCHECK_USER_AGENTS:\n return HttpResponse(\"Healthy\", content_type=\"text/plain\")\n\n return self.get_response(request)\n\n\nclass VerifierSessionRequired(MiddlewareMixin):\n \"\"\"Middleware raises an exception for sessions lacking an eligibility verifier configuration.\"\"\"\n\n def process_request(self, request):\n if session.verifier(request):\n logger.debug(\"Session configured with eligibility verifier\")\n return None\n else:\n logger.debug(\"Session not configured with eligibility verifier\")\n return user_error(request)\n\n\nclass ViewedPageEvent(MiddlewareMixin):\n \"\"\"Middleware sends an analytics event for page views.\"\"\"\n\n def process_response(self, request, response):\n event = analytics.ViewedPageEvent(request)\n try:\n analytics.send_event(event)\n except Exception:\n logger.warning(f\"Failed to send event: {event}\")\n finally:\n return response\n\n\npageview_decorator = decorator_from_middleware(ViewedPageEvent)\n\n\nclass ChangedLanguageEvent(MiddlewareMixin):\n \"\"\"Middleware hooks into django.views.i18n.set_language to send an analytics event.\"\"\"\n\n def process_view(self, request, view_func, view_args, view_kwargs):\n if view_func == i18n.set_language:\n new_lang = request.POST.get(\"language\")\n if new_lang:\n event = analytics.ChangedLanguageEvent(request, new_lang)\n analytics.send_event(event)\n else:\n logger.warning(\"i18n.set_language POST without language\")\n return None\n\n\nclass LoginRequired(MiddlewareMixin):\n \"\"\"Middleware that checks whether a user is logged in.\"\"\"\n\n def process_view(self, request, view_func, view_args, view_kwargs):\n # only require login if verifier requires it\n verifier = session.verifier(request)\n if not verifier or not verifier.is_auth_required or session.logged_in(request):\n # pass through\n return None\n\n return redirect(\"oauth:login\")\n\n\nclass RecaptchaEnabled(MiddlewareMixin):\n \"\"\"Middleware configures the request with required reCAPTCHA settings.\"\"\"\n\n def process_request(self, request):\n if settings.RECAPTCHA_ENABLED:\n request.recaptcha = {\n \"data_field\": recaptcha.DATA_FIELD,\n \"script_api\": settings.RECAPTCHA_API_KEY_URL,\n \"site_key\": settings.RECAPTCHA_SITE_KEY,\n }\n return None\n\n\nclass IndexOrAgencyIndexOrigin(MiddlewareMixin):\n \"\"\"Middleware sets the session.origin to either the core:index or core:agency_index depending on agency config.\"\"\"\n\n def process_request(self, request):\n if session.active_agency(request):\n session.update(request, origin=session.agency(request).index_url)\n else:\n session.update(request, origin=reverse(ROUTE_INDEX))\n return None\n\n\nindex_or_agencyindex_origin_decorator = decorator_from_middleware(IndexOrAgencyIndexOrigin)\n", "path": "benefits/core/middleware.py"}]} | 1,791 | 177 |
gh_patches_debug_3496 | rasdani/github-patches | git_diff | strawberry-graphql__strawberry-858 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
from_pydantic() converts False values to None
When calling `from_pydantic()`, values consistent with `bool(value) == False` may be replaced with None.
This recreates the issue:
```
from pydantic import BaseModel
import strawberry
class PydanticClass(BaseModel):
str1: str
str2: str
bool1: bool
bool2: bool
@strawberry.experimental.pydantic.type(
model=PydanticClass,
fields=['str1', 'str2', 'bool1', 'bool2']
)
class StrawberryClass:
pass
str1 = 'nonempty'
str2 = ''
bool1 = True
bool2 = False
myobj = PydanticClass(
str1=str1,
str2=str2,
bool1=bool1,
bool2=bool2
)
print('pydantic obj:', myobj)
converted = StrawberryClass.from_pydantic(myobj)
print('converted:', converted)
```
The output:
```
pydantic obj: str1='nonempty' str2='' bool1=True bool2=False
converted obj: StrawberryClass(str1='nonempty', str2=None, bool1=True, bool2=None)
```
Both str2 and bool2 were converted to None.
Location of the bug: https://github.com/strawberry-graphql/strawberry/blob/main/strawberry/experimental/pydantic/conversion.py#L10
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `strawberry/experimental/pydantic/conversion.py`
Content:
```
1 from typing import cast
2
3 from strawberry.field import StrawberryField
4 from strawberry.scalars import is_scalar
5
6
7 def _convert_from_pydantic_to_strawberry_field(
8 field: StrawberryField, data_from_model=None, extra=None
9 ):
10 data = data_from_model or extra
11
12 if field.is_list:
13 assert field.child is not None
14
15 items = [None for _ in data]
16
17 for index, item in enumerate(data):
18 items[index] = _convert_from_pydantic_to_strawberry_field(
19 field.child,
20 data_from_model=item,
21 extra=extra[index] if extra else None,
22 )
23
24 return items
25 elif is_scalar(field.type): # type: ignore
26 return data
27 else:
28 return convert_pydantic_model_to_strawberry_class(
29 field.type, model_instance=data_from_model, extra=extra
30 )
31
32
33 def convert_pydantic_model_to_strawberry_class(cls, *, model_instance=None, extra=None):
34 extra = extra or {}
35 kwargs = {}
36
37 for field in cls._type_definition.fields:
38 field = cast(StrawberryField, field)
39 python_name = field.python_name
40
41 data_from_extra = extra.get(python_name, None)
42 data_from_model = (
43 getattr(model_instance, python_name, None) if model_instance else None
44 )
45 kwargs[python_name] = _convert_from_pydantic_to_strawberry_field(
46 field, data_from_model, extra=data_from_extra
47 )
48
49 return cls(**kwargs)
50
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/strawberry/experimental/pydantic/conversion.py b/strawberry/experimental/pydantic/conversion.py
--- a/strawberry/experimental/pydantic/conversion.py
+++ b/strawberry/experimental/pydantic/conversion.py
@@ -7,7 +7,7 @@
def _convert_from_pydantic_to_strawberry_field(
field: StrawberryField, data_from_model=None, extra=None
):
- data = data_from_model or extra
+ data = data_from_model if data_from_model is not None else extra
if field.is_list:
assert field.child is not None
| {"golden_diff": "diff --git a/strawberry/experimental/pydantic/conversion.py b/strawberry/experimental/pydantic/conversion.py\n--- a/strawberry/experimental/pydantic/conversion.py\n+++ b/strawberry/experimental/pydantic/conversion.py\n@@ -7,7 +7,7 @@\n def _convert_from_pydantic_to_strawberry_field(\n field: StrawberryField, data_from_model=None, extra=None\n ):\n- data = data_from_model or extra\n+ data = data_from_model if data_from_model is not None else extra\n \n if field.is_list:\n assert field.child is not None\n", "issue": "from_pydantic() converts False values to None\nWhen calling `from_pydantic()`, values consistent with `bool(value) == False` may be replaced with None. \r\n\r\nThis recreates the issue:\r\n```\r\nfrom pydantic import BaseModel\r\nimport strawberry\r\n\r\nclass PydanticClass(BaseModel):\r\n str1: str\r\n str2: str\r\n bool1: bool\r\n bool2: bool\r\n\r\[email protected](\r\n model=PydanticClass,\r\n fields=['str1', 'str2', 'bool1', 'bool2']\r\n)\r\nclass StrawberryClass:\r\n pass\r\n\r\nstr1 = 'nonempty'\r\nstr2 = ''\r\nbool1 = True\r\nbool2 = False\r\n\r\nmyobj = PydanticClass(\r\n str1=str1,\r\n str2=str2,\r\n bool1=bool1,\r\n bool2=bool2\r\n)\r\nprint('pydantic obj:', myobj)\r\n\r\nconverted = StrawberryClass.from_pydantic(myobj)\r\nprint('converted:', converted)\r\n```\r\n\r\nThe output:\r\n```\r\npydantic obj: str1='nonempty' str2='' bool1=True bool2=False\r\nconverted obj: StrawberryClass(str1='nonempty', str2=None, bool1=True, bool2=None)\r\n```\r\nBoth str2 and bool2 were converted to None.\r\n\r\nLocation of the bug: https://github.com/strawberry-graphql/strawberry/blob/main/strawberry/experimental/pydantic/conversion.py#L10\r\n\r\n\n", "before_files": [{"content": "from typing import cast\n\nfrom strawberry.field import StrawberryField\nfrom strawberry.scalars import is_scalar\n\n\ndef _convert_from_pydantic_to_strawberry_field(\n field: StrawberryField, data_from_model=None, extra=None\n):\n data = data_from_model or extra\n\n if field.is_list:\n assert field.child is not None\n\n items = [None for _ in data]\n\n for index, item in enumerate(data):\n items[index] = _convert_from_pydantic_to_strawberry_field(\n field.child,\n data_from_model=item,\n extra=extra[index] if extra else None,\n )\n\n return items\n elif is_scalar(field.type): # type: ignore\n return data\n else:\n return convert_pydantic_model_to_strawberry_class(\n field.type, model_instance=data_from_model, extra=extra\n )\n\n\ndef convert_pydantic_model_to_strawberry_class(cls, *, model_instance=None, extra=None):\n extra = extra or {}\n kwargs = {}\n\n for field in cls._type_definition.fields:\n field = cast(StrawberryField, field)\n python_name = field.python_name\n\n data_from_extra = extra.get(python_name, None)\n data_from_model = (\n getattr(model_instance, python_name, None) if model_instance else None\n )\n kwargs[python_name] = _convert_from_pydantic_to_strawberry_field(\n field, data_from_model, extra=data_from_extra\n )\n\n return cls(**kwargs)\n", "path": "strawberry/experimental/pydantic/conversion.py"}], "after_files": [{"content": "from typing import cast\n\nfrom strawberry.field import StrawberryField\nfrom strawberry.scalars import is_scalar\n\n\ndef _convert_from_pydantic_to_strawberry_field(\n field: StrawberryField, data_from_model=None, extra=None\n):\n data = data_from_model if data_from_model is not None else extra\n\n if field.is_list:\n assert field.child is not None\n\n items = [None for _ in data]\n\n for index, item in enumerate(data):\n items[index] = _convert_from_pydantic_to_strawberry_field(\n field.child,\n data_from_model=item,\n extra=extra[index] if extra else None,\n )\n\n return items\n elif is_scalar(field.type): # type: ignore\n return data\n else:\n return convert_pydantic_model_to_strawberry_class(\n field.type, model_instance=data_from_model, extra=extra\n )\n\n\ndef convert_pydantic_model_to_strawberry_class(cls, *, model_instance=None, extra=None):\n extra = extra or {}\n kwargs = {}\n\n for field in cls._type_definition.fields:\n field = cast(StrawberryField, field)\n python_name = field.python_name\n\n data_from_extra = extra.get(python_name, None)\n data_from_model = (\n getattr(model_instance, python_name, None) if model_instance else None\n )\n kwargs[python_name] = _convert_from_pydantic_to_strawberry_field(\n field, data_from_model, extra=data_from_extra\n )\n\n return cls(**kwargs)\n", "path": "strawberry/experimental/pydantic/conversion.py"}]} | 1,003 | 139 |
gh_patches_debug_24313 | rasdani/github-patches | git_diff | microsoft__ptvsd-111 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Pip Installing PTVSD fails
* Python2.7
* Pip install ptvsd from local source fails with the following error:
```
running build_ext
building 'ptvsd.pydevd._pydevd_bundle.pydevd_cython' extension
error: Microsoft Visual C++ 9.0 is required. Get it from http://aka.ms/vcpython27
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2
3 # Copyright (c) Microsoft Corporation. All rights reserved.
4 # Licensed under the MIT License. See LICENSE in the project root
5 # for license information.
6
7 import os
8 import os.path
9 from setuptools import setup, Extension
10
11 ROOT = os.path.dirname(os.path.abspath(__file__))
12
13 # Add pydevd files as data files for this package. They are not treated as a package of their own,
14 # because we don't actually want to provide pydevd - just use our own copy internally.
15 def get_pydevd_package_data():
16 ptvsd_prefix = os.path.join(ROOT, 'ptvsd')
17 pydevd_prefix = os.path.join(ptvsd_prefix, 'pydevd')
18 for root, dirs, files in os.walk(pydevd_prefix):
19 # From the root of pydevd repo, we want only scripts and subdirectories that
20 # constitute the package itself (not helper scripts, tests etc). But when
21 # walking down into those subdirectories, we want everything below.
22 if os.path.normcase(root) == os.path.normcase(pydevd_prefix):
23 dirs[:] = [d for d in dirs if d.startswith('pydev') or d.startswith('_pydev')]
24 files[:] = [f for f in files if f.endswith('.py') and 'pydev' in f]
25 for f in files:
26 yield os.path.join(root[len(ptvsd_prefix) + 1:], f)
27
28 setup(name='ptvsd',
29 version='4.0.0a1',
30 description='Visual Studio remote debugging server for Python',
31 license='MIT',
32 author='Microsoft Corporation',
33 author_email='[email protected]',
34 url='https://aka.ms/ptvs',
35 classifiers=[
36 'Development Status :: 3 - Alpha',
37 'Programming Language :: Python',
38 'Programming Language :: Python :: 2',
39 'Programming Language :: Python :: 3',
40 'License :: OSI Approved :: MIT License'],
41 packages=['ptvsd'],
42 package_data={'ptvsd': list(get_pydevd_package_data()) + ['ThirdPartyNotices.txt']},
43 ext_modules=[Extension('ptvsd.pydevd._pydevd_bundle.pydevd_cython',
44 ['ptvsd/pydevd/_pydevd_bundle/pydevd_cython.c'],
45 optional=True)],
46 )
47
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -6,6 +6,7 @@
import os
import os.path
+import sys
from setuptools import setup, Extension
ROOT = os.path.dirname(os.path.abspath(__file__))
@@ -25,6 +26,18 @@
for f in files:
yield os.path.join(root[len(ptvsd_prefix) + 1:], f)
+cmdclass = {}
+
+if sys.version_info[0] == 2:
+ from setuptools.command.build_ext import build_ext
+ class build_optional_ext(build_ext):
+ def build_extension(self, ext):
+ try:
+ super(build_optional_ext, self).build_extension(ext)
+ except:
+ pass
+ cmdclass = { 'build_ext': build_optional_ext }
+
setup(name='ptvsd',
version='4.0.0a1',
description='Visual Studio remote debugging server for Python',
@@ -43,4 +56,5 @@
ext_modules=[Extension('ptvsd.pydevd._pydevd_bundle.pydevd_cython',
['ptvsd/pydevd/_pydevd_bundle/pydevd_cython.c'],
optional=True)],
+ cmdclass=cmdclass,
)
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -6,6 +6,7 @@\n \n import os\n import os.path\n+import sys\n from setuptools import setup, Extension\n \n ROOT = os.path.dirname(os.path.abspath(__file__))\n@@ -25,6 +26,18 @@\n for f in files:\n yield os.path.join(root[len(ptvsd_prefix) + 1:], f)\n \n+cmdclass = {}\n+\n+if sys.version_info[0] == 2:\n+ from setuptools.command.build_ext import build_ext\n+ class build_optional_ext(build_ext):\n+ def build_extension(self, ext):\n+ try:\n+ super(build_optional_ext, self).build_extension(ext)\n+ except:\n+ pass\n+ cmdclass = { 'build_ext': build_optional_ext }\n+\n setup(name='ptvsd',\n version='4.0.0a1',\n description='Visual Studio remote debugging server for Python',\n@@ -43,4 +56,5 @@\n ext_modules=[Extension('ptvsd.pydevd._pydevd_bundle.pydevd_cython',\n ['ptvsd/pydevd/_pydevd_bundle/pydevd_cython.c'],\n optional=True)],\n+ cmdclass=cmdclass,\n )\n", "issue": "Pip Installing PTVSD fails \n* Python2.7\r\n* Pip install ptvsd from local source fails with the following error:\r\n```\r\nrunning build_ext\r\n building 'ptvsd.pydevd._pydevd_bundle.pydevd_cython' extension\r\n error: Microsoft Visual C++ 9.0 is required. Get it from http://aka.ms/vcpython27\r\n```\n", "before_files": [{"content": "#!/usr/bin/env python\n\n# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nimport os\nimport os.path\nfrom setuptools import setup, Extension\n\nROOT = os.path.dirname(os.path.abspath(__file__))\n\n# Add pydevd files as data files for this package. They are not treated as a package of their own,\n# because we don't actually want to provide pydevd - just use our own copy internally.\ndef get_pydevd_package_data():\n ptvsd_prefix = os.path.join(ROOT, 'ptvsd')\n pydevd_prefix = os.path.join(ptvsd_prefix, 'pydevd')\n for root, dirs, files in os.walk(pydevd_prefix):\n # From the root of pydevd repo, we want only scripts and subdirectories that\n # constitute the package itself (not helper scripts, tests etc). But when\n # walking down into those subdirectories, we want everything below.\n if os.path.normcase(root) == os.path.normcase(pydevd_prefix):\n dirs[:] = [d for d in dirs if d.startswith('pydev') or d.startswith('_pydev')]\n files[:] = [f for f in files if f.endswith('.py') and 'pydev' in f]\n for f in files:\n yield os.path.join(root[len(ptvsd_prefix) + 1:], f)\n\nsetup(name='ptvsd',\n version='4.0.0a1',\n description='Visual Studio remote debugging server for Python',\n license='MIT',\n author='Microsoft Corporation',\n author_email='[email protected]',\n url='https://aka.ms/ptvs',\n classifiers=[\n 'Development Status :: 3 - Alpha',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 3',\n 'License :: OSI Approved :: MIT License'],\n packages=['ptvsd'],\n package_data={'ptvsd': list(get_pydevd_package_data()) + ['ThirdPartyNotices.txt']},\n ext_modules=[Extension('ptvsd.pydevd._pydevd_bundle.pydevd_cython',\n ['ptvsd/pydevd/_pydevd_bundle/pydevd_cython.c'],\n optional=True)],\n )\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\n# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nimport os\nimport os.path\nimport sys\nfrom setuptools import setup, Extension\n\nROOT = os.path.dirname(os.path.abspath(__file__))\n\n# Add pydevd files as data files for this package. They are not treated as a package of their own,\n# because we don't actually want to provide pydevd - just use our own copy internally.\ndef get_pydevd_package_data():\n ptvsd_prefix = os.path.join(ROOT, 'ptvsd')\n pydevd_prefix = os.path.join(ptvsd_prefix, 'pydevd')\n for root, dirs, files in os.walk(pydevd_prefix):\n # From the root of pydevd repo, we want only scripts and subdirectories that\n # constitute the package itself (not helper scripts, tests etc). But when\n # walking down into those subdirectories, we want everything below.\n if os.path.normcase(root) == os.path.normcase(pydevd_prefix):\n dirs[:] = [d for d in dirs if d.startswith('pydev') or d.startswith('_pydev')]\n files[:] = [f for f in files if f.endswith('.py') and 'pydev' in f]\n for f in files:\n yield os.path.join(root[len(ptvsd_prefix) + 1:], f)\n\ncmdclass = {}\n\nif sys.version_info[0] == 2:\n from setuptools.command.build_ext import build_ext\n class build_optional_ext(build_ext):\n def build_extension(self, ext):\n try:\n super(build_optional_ext, self).build_extension(ext)\n except:\n pass\n cmdclass = { 'build_ext': build_optional_ext }\n\nsetup(name='ptvsd',\n version='4.0.0a1',\n description='Visual Studio remote debugging server for Python',\n license='MIT',\n author='Microsoft Corporation',\n author_email='[email protected]',\n url='https://aka.ms/ptvs',\n classifiers=[\n 'Development Status :: 3 - Alpha',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 3',\n 'License :: OSI Approved :: MIT License'],\n packages=['ptvsd'],\n package_data={'ptvsd': list(get_pydevd_package_data()) + ['ThirdPartyNotices.txt']},\n ext_modules=[Extension('ptvsd.pydevd._pydevd_bundle.pydevd_cython',\n ['ptvsd/pydevd/_pydevd_bundle/pydevd_cython.c'],\n optional=True)],\n cmdclass=cmdclass,\n )\n", "path": "setup.py"}]} | 931 | 283 |
gh_patches_debug_19118 | rasdani/github-patches | git_diff | pfnet__pytorch-pfn-extras-788 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Fix nightly CPU test failures
https://github.com/pfnet/pytorch-pfn-extras/actions/workflows/nightly-test-cpu.yml
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pytorch_pfn_extras/distributed/_distributed_validation_sampler.py`
Content:
```
1 from typing import Iterator, Optional, Sized, TypeVar
2
3 import numpy as np
4 import torch
5 import torch.distributed as dist
6
7 T_co = TypeVar("T_co", covariant=True)
8
9
10 class DistributedValidationSampler(torch.utils.data.Sampler):
11 """Distributed sampler without duplication
12
13 This sampler splits the input dataset to each worker process in distributed setup
14 without allowing repetition.
15 It is for evaluation purpose such as :class:`~DistributedEvaluator`.
16 This does not guarantee each worker to get the same number of samples,
17 so for training do not use this sampler (use PyTorch DistributedSampler instead).
18 """
19
20 def __init__(
21 self,
22 dataset: Sized,
23 num_replicas: Optional[int] = None,
24 rank: Optional[int] = None,
25 shuffle: bool = True,
26 seed: int = 0,
27 ) -> None:
28 if num_replicas is None:
29 if not dist.is_available(): # type: ignore[no-untyped-call]
30 raise RuntimeError(
31 "Requires distributed package to be available"
32 )
33 num_replicas = dist.get_world_size() # type: ignore[no-untyped-call]
34 if rank is None:
35 if not dist.is_available(): # type: ignore[no-untyped-call]
36 raise RuntimeError(
37 "Requires distributed package to be available"
38 )
39 rank = dist.get_rank() # type: ignore[no-untyped-call]
40 if rank >= num_replicas or rank < 0:
41 raise ValueError(
42 "Invalid rank {}, rank should be in the interval"
43 " [0, {}]".format(rank, num_replicas - 1)
44 )
45 self.dataset = dataset
46 self.num_replicas = num_replicas
47 self.rank = rank
48 self.shuffle = shuffle
49 self.seed = seed
50
51 self.dataset_len = len(dataset)
52 self.num_samples = len(
53 np.array_split(range(self.dataset_len), num_replicas)[rank]
54 )
55
56 def __iter__(self) -> Iterator[T_co]:
57 if self.shuffle:
58 # deterministically shuffle based on epoch and seed
59 g = torch.Generator()
60 g.manual_seed(self.seed)
61 indices = torch.randperm(self.dataset_len, generator=g).tolist()
62 else:
63 indices = list(range(self.dataset_len))
64
65 return iter(np.array_split(indices, self.num_replicas)[self.rank])
66
67 def __len__(self) -> int:
68 return self.num_samples
69
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pytorch_pfn_extras/distributed/_distributed_validation_sampler.py b/pytorch_pfn_extras/distributed/_distributed_validation_sampler.py
--- a/pytorch_pfn_extras/distributed/_distributed_validation_sampler.py
+++ b/pytorch_pfn_extras/distributed/_distributed_validation_sampler.py
@@ -26,13 +26,13 @@
seed: int = 0,
) -> None:
if num_replicas is None:
- if not dist.is_available(): # type: ignore[no-untyped-call]
+ if not dist.is_available() or not dist.is_initialized(): # type: ignore[no-untyped-call]
raise RuntimeError(
"Requires distributed package to be available"
)
num_replicas = dist.get_world_size() # type: ignore[no-untyped-call]
if rank is None:
- if not dist.is_available(): # type: ignore[no-untyped-call]
+ if not dist.is_available() or not dist.is_initialized(): # type: ignore[no-untyped-call]
raise RuntimeError(
"Requires distributed package to be available"
)
| {"golden_diff": "diff --git a/pytorch_pfn_extras/distributed/_distributed_validation_sampler.py b/pytorch_pfn_extras/distributed/_distributed_validation_sampler.py\n--- a/pytorch_pfn_extras/distributed/_distributed_validation_sampler.py\n+++ b/pytorch_pfn_extras/distributed/_distributed_validation_sampler.py\n@@ -26,13 +26,13 @@\n seed: int = 0,\n ) -> None:\n if num_replicas is None:\n- if not dist.is_available(): # type: ignore[no-untyped-call]\n+ if not dist.is_available() or not dist.is_initialized(): # type: ignore[no-untyped-call]\n raise RuntimeError(\n \"Requires distributed package to be available\"\n )\n num_replicas = dist.get_world_size() # type: ignore[no-untyped-call]\n if rank is None:\n- if not dist.is_available(): # type: ignore[no-untyped-call]\n+ if not dist.is_available() or not dist.is_initialized(): # type: ignore[no-untyped-call]\n raise RuntimeError(\n \"Requires distributed package to be available\"\n )\n", "issue": "Fix nightly CPU test failures\nhttps://github.com/pfnet/pytorch-pfn-extras/actions/workflows/nightly-test-cpu.yml\n", "before_files": [{"content": "from typing import Iterator, Optional, Sized, TypeVar\n\nimport numpy as np\nimport torch\nimport torch.distributed as dist\n\nT_co = TypeVar(\"T_co\", covariant=True)\n\n\nclass DistributedValidationSampler(torch.utils.data.Sampler):\n \"\"\"Distributed sampler without duplication\n\n This sampler splits the input dataset to each worker process in distributed setup\n without allowing repetition.\n It is for evaluation purpose such as :class:`~DistributedEvaluator`.\n This does not guarantee each worker to get the same number of samples,\n so for training do not use this sampler (use PyTorch DistributedSampler instead).\n \"\"\"\n\n def __init__(\n self,\n dataset: Sized,\n num_replicas: Optional[int] = None,\n rank: Optional[int] = None,\n shuffle: bool = True,\n seed: int = 0,\n ) -> None:\n if num_replicas is None:\n if not dist.is_available(): # type: ignore[no-untyped-call]\n raise RuntimeError(\n \"Requires distributed package to be available\"\n )\n num_replicas = dist.get_world_size() # type: ignore[no-untyped-call]\n if rank is None:\n if not dist.is_available(): # type: ignore[no-untyped-call]\n raise RuntimeError(\n \"Requires distributed package to be available\"\n )\n rank = dist.get_rank() # type: ignore[no-untyped-call]\n if rank >= num_replicas or rank < 0:\n raise ValueError(\n \"Invalid rank {}, rank should be in the interval\"\n \" [0, {}]\".format(rank, num_replicas - 1)\n )\n self.dataset = dataset\n self.num_replicas = num_replicas\n self.rank = rank\n self.shuffle = shuffle\n self.seed = seed\n\n self.dataset_len = len(dataset)\n self.num_samples = len(\n np.array_split(range(self.dataset_len), num_replicas)[rank]\n )\n\n def __iter__(self) -> Iterator[T_co]:\n if self.shuffle:\n # deterministically shuffle based on epoch and seed\n g = torch.Generator()\n g.manual_seed(self.seed)\n indices = torch.randperm(self.dataset_len, generator=g).tolist()\n else:\n indices = list(range(self.dataset_len))\n\n return iter(np.array_split(indices, self.num_replicas)[self.rank])\n\n def __len__(self) -> int:\n return self.num_samples\n", "path": "pytorch_pfn_extras/distributed/_distributed_validation_sampler.py"}], "after_files": [{"content": "from typing import Iterator, Optional, Sized, TypeVar\n\nimport numpy as np\nimport torch\nimport torch.distributed as dist\n\nT_co = TypeVar(\"T_co\", covariant=True)\n\n\nclass DistributedValidationSampler(torch.utils.data.Sampler):\n \"\"\"Distributed sampler without duplication\n\n This sampler splits the input dataset to each worker process in distributed setup\n without allowing repetition.\n It is for evaluation purpose such as :class:`~DistributedEvaluator`.\n This does not guarantee each worker to get the same number of samples,\n so for training do not use this sampler (use PyTorch DistributedSampler instead).\n \"\"\"\n\n def __init__(\n self,\n dataset: Sized,\n num_replicas: Optional[int] = None,\n rank: Optional[int] = None,\n shuffle: bool = True,\n seed: int = 0,\n ) -> None:\n if num_replicas is None:\n if not dist.is_available() or not dist.is_initialized(): # type: ignore[no-untyped-call]\n raise RuntimeError(\n \"Requires distributed package to be available\"\n )\n num_replicas = dist.get_world_size() # type: ignore[no-untyped-call]\n if rank is None:\n if not dist.is_available() or not dist.is_initialized(): # type: ignore[no-untyped-call]\n raise RuntimeError(\n \"Requires distributed package to be available\"\n )\n rank = dist.get_rank() # type: ignore[no-untyped-call]\n if rank >= num_replicas or rank < 0:\n raise ValueError(\n \"Invalid rank {}, rank should be in the interval\"\n \" [0, {}]\".format(rank, num_replicas - 1)\n )\n self.dataset = dataset\n self.num_replicas = num_replicas\n self.rank = rank\n self.shuffle = shuffle\n self.seed = seed\n\n self.dataset_len = len(dataset)\n self.num_samples = len(\n np.array_split(range(self.dataset_len), num_replicas)[rank]\n )\n\n def __iter__(self) -> Iterator[T_co]:\n if self.shuffle:\n # deterministically shuffle based on epoch and seed\n g = torch.Generator()\n g.manual_seed(self.seed)\n indices = torch.randperm(self.dataset_len, generator=g).tolist()\n else:\n indices = list(range(self.dataset_len))\n\n return iter(np.array_split(indices, self.num_replicas)[self.rank])\n\n def __len__(self) -> int:\n return self.num_samples\n", "path": "pytorch_pfn_extras/distributed/_distributed_validation_sampler.py"}]} | 952 | 244 |
gh_patches_debug_32661 | rasdani/github-patches | git_diff | SeldonIO__MLServer-1169 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
500 Error on MacOS
I'm running MLServer on MacOS (arm). I was following [Serving XGBoost models](https://mlserver.readthedocs.io/en/latest/examples/xgboost/README.html) example. I receive a 500 error when requesting a model with url `http://localhost:8080/v2/models/mushroom-xgboost/versions/v0.1.0/infer`.
MLServer throws an error
```
...
File "/opt/homebrew/Cellar/[email protected]/3.10.9/Frameworks/Python.framework/Versions/3.10/lib/python3.10/multiprocessing/queues.py", line 126, in qsize
return self._maxsize - self._sem._semlock._get_value()
NotImplementedError
```
Developers of `queues.py` left a comment that this is broken on MacOS 😬
```python
def qsize(self):
# Raises NotImplementedError on Mac OSX because of broken sem_getvalue()
return self._maxsize - self._sem._semlock._get_value()
```
Is it possible to get around this when running models with MLServer?
Thanks!
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mlserver/parallel/dispatcher.py`
Content:
```
1 import asyncio
2
3 from typing import Dict, List, Tuple
4 from itertools import cycle
5 from multiprocessing import Queue
6 from concurrent.futures import ThreadPoolExecutor
7 from asyncio import Future
8
9 from ..utils import schedule_with_callback, generate_uuid
10 from ..metrics import REGISTRY
11
12 from .worker import Worker
13 from .logging import logger
14 from .utils import END_OF_QUEUE, cancel_task
15 from .messages import (
16 Message,
17 ModelUpdateMessage,
18 ModelRequestMessage,
19 ModelResponseMessage,
20 )
21 from prometheus_client import Histogram
22
23 QUEUE_METRIC_NAME = "parallel_request_queue"
24
25
26 class Dispatcher:
27 def __init__(self, workers: Dict[int, Worker], responses: Queue):
28 self._responses = responses
29 self._workers = workers
30 self._workers_round_robin = cycle(self._workers.keys())
31 self._active = False
32 self._process_responses_task = None
33 self._executor = ThreadPoolExecutor()
34 self._async_responses: Dict[str, Future[ModelResponseMessage]] = {}
35 self.parallel_request_queue_size = self._get_or_create_metric()
36
37 def _get_or_create_metric(self) -> Histogram:
38 if QUEUE_METRIC_NAME in REGISTRY:
39 return REGISTRY[QUEUE_METRIC_NAME] # type: ignore
40
41 return Histogram(
42 QUEUE_METRIC_NAME,
43 "counter of request queue size for workers",
44 ["workerpid"],
45 registry=REGISTRY,
46 )
47
48 def start(self):
49 self._active = True
50 self._process_responses_task = schedule_with_callback(
51 self._process_responses(), self._process_responses_cb
52 )
53
54 def _process_responses_cb(self, process_responses):
55 try:
56 process_responses.result()
57 except asyncio.CancelledError:
58 # NOTE: The response loop was cancelled from the outside, so don't
59 # restart
60 return
61 except Exception:
62 logger.exception("Response processing loop crashed. Restarting the loop...")
63 # If process loop crashed, restart it
64 self.start()
65
66 async def _process_responses(self):
67 logger.debug("Starting response processing loop...")
68 loop = asyncio.get_event_loop()
69 while self._active:
70 response = await loop.run_in_executor(self._executor, self._responses.get)
71
72 # If the queue gets terminated, detect the "sentinel value" and
73 # stop reading
74 if response is END_OF_QUEUE:
75 return
76
77 await self._process_response(response)
78
79 async def _process_response(self, response: ModelResponseMessage):
80 internal_id = response.id
81
82 async_response = self._async_responses[internal_id]
83
84 # NOTE: Use call_soon_threadsafe to cover cases where `model.predict()`
85 # (or other methods) get called from a separate thread (and a separate
86 # AsyncIO loop)
87 response_loop = async_response.get_loop()
88 if response.exception:
89 response_loop.call_soon_threadsafe(
90 async_response.set_exception, response.exception
91 )
92 else:
93 response_loop.call_soon_threadsafe(async_response.set_result, response)
94
95 async def dispatch_request(
96 self, request_message: ModelRequestMessage
97 ) -> ModelResponseMessage:
98 worker, wpid = self._get_worker()
99 self._workers_queue_monitor(worker, wpid)
100 worker.send_request(request_message)
101
102 return await self._dispatch(request_message)
103
104 def _get_worker(self) -> Tuple[Worker, int]:
105 """
106 Get next available worker.
107 By default, this is just a round-robin through all the workers.
108 """
109 worker_pid = next(self._workers_round_robin)
110 return self._workers[worker_pid], worker_pid
111
112 def _workers_queue_monitor(self, worker: Worker, worker_pid: int):
113 """Get metrics from every worker request queue"""
114 queue_size = worker._requests.qsize()
115
116 self.parallel_request_queue_size.labels(workerpid=str(worker_pid)).observe(
117 float(queue_size)
118 )
119
120 async def dispatch_update(
121 self, model_update: ModelUpdateMessage
122 ) -> List[ModelResponseMessage]:
123 return await asyncio.gather(
124 *[
125 self._dispatch_update(worker, model_update)
126 for worker in self._workers.values()
127 ]
128 )
129
130 async def _dispatch_update(
131 self, worker: Worker, model_update: ModelUpdateMessage
132 ) -> ModelResponseMessage:
133 # NOTE: Need to rewrite the UUID to ensure each worker sends back a
134 # unique result
135 worker_update = model_update.copy()
136 worker_update.id = generate_uuid()
137 worker.send_update(worker_update)
138 return await self._dispatch(worker_update)
139
140 async def _dispatch(self, message: Message) -> ModelResponseMessage:
141 loop = asyncio.get_running_loop()
142 async_response = loop.create_future()
143 internal_id = message.id
144 self._async_responses[internal_id] = async_response
145
146 return await self._wait_response(internal_id)
147
148 async def _wait_response(self, internal_id: str) -> ModelResponseMessage:
149 async_response = self._async_responses[internal_id]
150
151 try:
152 inference_response = await async_response
153 return inference_response
154 finally:
155 del self._async_responses[internal_id]
156
157 async def stop(self):
158 self._executor.shutdown()
159 if self._process_responses_task is not None:
160 await cancel_task(self._process_responses_task)
161
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mlserver/parallel/dispatcher.py b/mlserver/parallel/dispatcher.py
--- a/mlserver/parallel/dispatcher.py
+++ b/mlserver/parallel/dispatcher.py
@@ -41,7 +41,6 @@
return Histogram(
QUEUE_METRIC_NAME,
"counter of request queue size for workers",
- ["workerpid"],
registry=REGISTRY,
)
@@ -96,7 +95,6 @@
self, request_message: ModelRequestMessage
) -> ModelResponseMessage:
worker, wpid = self._get_worker()
- self._workers_queue_monitor(worker, wpid)
worker.send_request(request_message)
return await self._dispatch(request_message)
@@ -109,14 +107,6 @@
worker_pid = next(self._workers_round_robin)
return self._workers[worker_pid], worker_pid
- def _workers_queue_monitor(self, worker: Worker, worker_pid: int):
- """Get metrics from every worker request queue"""
- queue_size = worker._requests.qsize()
-
- self.parallel_request_queue_size.labels(workerpid=str(worker_pid)).observe(
- float(queue_size)
- )
-
async def dispatch_update(
self, model_update: ModelUpdateMessage
) -> List[ModelResponseMessage]:
@@ -143,6 +133,8 @@
internal_id = message.id
self._async_responses[internal_id] = async_response
+ # Monitor current in-flight requests
+ self.parallel_request_queue_size.observe(len(self._async_responses))
return await self._wait_response(internal_id)
async def _wait_response(self, internal_id: str) -> ModelResponseMessage:
| {"golden_diff": "diff --git a/mlserver/parallel/dispatcher.py b/mlserver/parallel/dispatcher.py\n--- a/mlserver/parallel/dispatcher.py\n+++ b/mlserver/parallel/dispatcher.py\n@@ -41,7 +41,6 @@\n return Histogram(\n QUEUE_METRIC_NAME,\n \"counter of request queue size for workers\",\n- [\"workerpid\"],\n registry=REGISTRY,\n )\n \n@@ -96,7 +95,6 @@\n self, request_message: ModelRequestMessage\n ) -> ModelResponseMessage:\n worker, wpid = self._get_worker()\n- self._workers_queue_monitor(worker, wpid)\n worker.send_request(request_message)\n \n return await self._dispatch(request_message)\n@@ -109,14 +107,6 @@\n worker_pid = next(self._workers_round_robin)\n return self._workers[worker_pid], worker_pid\n \n- def _workers_queue_monitor(self, worker: Worker, worker_pid: int):\n- \"\"\"Get metrics from every worker request queue\"\"\"\n- queue_size = worker._requests.qsize()\n-\n- self.parallel_request_queue_size.labels(workerpid=str(worker_pid)).observe(\n- float(queue_size)\n- )\n-\n async def dispatch_update(\n self, model_update: ModelUpdateMessage\n ) -> List[ModelResponseMessage]:\n@@ -143,6 +133,8 @@\n internal_id = message.id\n self._async_responses[internal_id] = async_response\n \n+ # Monitor current in-flight requests\n+ self.parallel_request_queue_size.observe(len(self._async_responses))\n return await self._wait_response(internal_id)\n \n async def _wait_response(self, internal_id: str) -> ModelResponseMessage:\n", "issue": "500 Error on MacOS\nI'm running MLServer on MacOS (arm). I was following [Serving XGBoost models](https://mlserver.readthedocs.io/en/latest/examples/xgboost/README.html) example. I receive a 500 error when requesting a model with url `http://localhost:8080/v2/models/mushroom-xgboost/versions/v0.1.0/infer`.\r\n\r\nMLServer throws an error\r\n\r\n```\r\n...\r\n File \"/opt/homebrew/Cellar/[email protected]/3.10.9/Frameworks/Python.framework/Versions/3.10/lib/python3.10/multiprocessing/queues.py\", line 126, in qsize\r\n return self._maxsize - self._sem._semlock._get_value()\r\nNotImplementedError\r\n```\r\n\r\nDevelopers of `queues.py` left a comment that this is broken on MacOS \ud83d\ude2c \r\n\r\n```python\r\n def qsize(self):\r\n # Raises NotImplementedError on Mac OSX because of broken sem_getvalue()\r\n return self._maxsize - self._sem._semlock._get_value()\r\n```\r\n\r\nIs it possible to get around this when running models with MLServer?\r\nThanks!\n", "before_files": [{"content": "import asyncio\n\nfrom typing import Dict, List, Tuple\nfrom itertools import cycle\nfrom multiprocessing import Queue\nfrom concurrent.futures import ThreadPoolExecutor\nfrom asyncio import Future\n\nfrom ..utils import schedule_with_callback, generate_uuid\nfrom ..metrics import REGISTRY\n\nfrom .worker import Worker\nfrom .logging import logger\nfrom .utils import END_OF_QUEUE, cancel_task\nfrom .messages import (\n Message,\n ModelUpdateMessage,\n ModelRequestMessage,\n ModelResponseMessage,\n)\nfrom prometheus_client import Histogram\n\nQUEUE_METRIC_NAME = \"parallel_request_queue\"\n\n\nclass Dispatcher:\n def __init__(self, workers: Dict[int, Worker], responses: Queue):\n self._responses = responses\n self._workers = workers\n self._workers_round_robin = cycle(self._workers.keys())\n self._active = False\n self._process_responses_task = None\n self._executor = ThreadPoolExecutor()\n self._async_responses: Dict[str, Future[ModelResponseMessage]] = {}\n self.parallel_request_queue_size = self._get_or_create_metric()\n\n def _get_or_create_metric(self) -> Histogram:\n if QUEUE_METRIC_NAME in REGISTRY:\n return REGISTRY[QUEUE_METRIC_NAME] # type: ignore\n\n return Histogram(\n QUEUE_METRIC_NAME,\n \"counter of request queue size for workers\",\n [\"workerpid\"],\n registry=REGISTRY,\n )\n\n def start(self):\n self._active = True\n self._process_responses_task = schedule_with_callback(\n self._process_responses(), self._process_responses_cb\n )\n\n def _process_responses_cb(self, process_responses):\n try:\n process_responses.result()\n except asyncio.CancelledError:\n # NOTE: The response loop was cancelled from the outside, so don't\n # restart\n return\n except Exception:\n logger.exception(\"Response processing loop crashed. Restarting the loop...\")\n # If process loop crashed, restart it\n self.start()\n\n async def _process_responses(self):\n logger.debug(\"Starting response processing loop...\")\n loop = asyncio.get_event_loop()\n while self._active:\n response = await loop.run_in_executor(self._executor, self._responses.get)\n\n # If the queue gets terminated, detect the \"sentinel value\" and\n # stop reading\n if response is END_OF_QUEUE:\n return\n\n await self._process_response(response)\n\n async def _process_response(self, response: ModelResponseMessage):\n internal_id = response.id\n\n async_response = self._async_responses[internal_id]\n\n # NOTE: Use call_soon_threadsafe to cover cases where `model.predict()`\n # (or other methods) get called from a separate thread (and a separate\n # AsyncIO loop)\n response_loop = async_response.get_loop()\n if response.exception:\n response_loop.call_soon_threadsafe(\n async_response.set_exception, response.exception\n )\n else:\n response_loop.call_soon_threadsafe(async_response.set_result, response)\n\n async def dispatch_request(\n self, request_message: ModelRequestMessage\n ) -> ModelResponseMessage:\n worker, wpid = self._get_worker()\n self._workers_queue_monitor(worker, wpid)\n worker.send_request(request_message)\n\n return await self._dispatch(request_message)\n\n def _get_worker(self) -> Tuple[Worker, int]:\n \"\"\"\n Get next available worker.\n By default, this is just a round-robin through all the workers.\n \"\"\"\n worker_pid = next(self._workers_round_robin)\n return self._workers[worker_pid], worker_pid\n\n def _workers_queue_monitor(self, worker: Worker, worker_pid: int):\n \"\"\"Get metrics from every worker request queue\"\"\"\n queue_size = worker._requests.qsize()\n\n self.parallel_request_queue_size.labels(workerpid=str(worker_pid)).observe(\n float(queue_size)\n )\n\n async def dispatch_update(\n self, model_update: ModelUpdateMessage\n ) -> List[ModelResponseMessage]:\n return await asyncio.gather(\n *[\n self._dispatch_update(worker, model_update)\n for worker in self._workers.values()\n ]\n )\n\n async def _dispatch_update(\n self, worker: Worker, model_update: ModelUpdateMessage\n ) -> ModelResponseMessage:\n # NOTE: Need to rewrite the UUID to ensure each worker sends back a\n # unique result\n worker_update = model_update.copy()\n worker_update.id = generate_uuid()\n worker.send_update(worker_update)\n return await self._dispatch(worker_update)\n\n async def _dispatch(self, message: Message) -> ModelResponseMessage:\n loop = asyncio.get_running_loop()\n async_response = loop.create_future()\n internal_id = message.id\n self._async_responses[internal_id] = async_response\n\n return await self._wait_response(internal_id)\n\n async def _wait_response(self, internal_id: str) -> ModelResponseMessage:\n async_response = self._async_responses[internal_id]\n\n try:\n inference_response = await async_response\n return inference_response\n finally:\n del self._async_responses[internal_id]\n\n async def stop(self):\n self._executor.shutdown()\n if self._process_responses_task is not None:\n await cancel_task(self._process_responses_task)\n", "path": "mlserver/parallel/dispatcher.py"}], "after_files": [{"content": "import asyncio\n\nfrom typing import Dict, List, Tuple\nfrom itertools import cycle\nfrom multiprocessing import Queue\nfrom concurrent.futures import ThreadPoolExecutor\nfrom asyncio import Future\n\nfrom ..utils import schedule_with_callback, generate_uuid\nfrom ..metrics import REGISTRY\n\nfrom .worker import Worker\nfrom .logging import logger\nfrom .utils import END_OF_QUEUE, cancel_task\nfrom .messages import (\n Message,\n ModelUpdateMessage,\n ModelRequestMessage,\n ModelResponseMessage,\n)\nfrom prometheus_client import Histogram\n\nQUEUE_METRIC_NAME = \"parallel_request_queue\"\n\n\nclass Dispatcher:\n def __init__(self, workers: Dict[int, Worker], responses: Queue):\n self._responses = responses\n self._workers = workers\n self._workers_round_robin = cycle(self._workers.keys())\n self._active = False\n self._process_responses_task = None\n self._executor = ThreadPoolExecutor()\n self._async_responses: Dict[str, Future[ModelResponseMessage]] = {}\n self.parallel_request_queue_size = self._get_or_create_metric()\n\n def _get_or_create_metric(self) -> Histogram:\n if QUEUE_METRIC_NAME in REGISTRY:\n return REGISTRY[QUEUE_METRIC_NAME] # type: ignore\n\n return Histogram(\n QUEUE_METRIC_NAME,\n \"counter of request queue size for workers\",\n registry=REGISTRY,\n )\n\n def start(self):\n self._active = True\n self._process_responses_task = schedule_with_callback(\n self._process_responses(), self._process_responses_cb\n )\n\n def _process_responses_cb(self, process_responses):\n try:\n process_responses.result()\n except asyncio.CancelledError:\n # NOTE: The response loop was cancelled from the outside, so don't\n # restart\n return\n except Exception:\n logger.exception(\"Response processing loop crashed. Restarting the loop...\")\n # If process loop crashed, restart it\n self.start()\n\n async def _process_responses(self):\n logger.debug(\"Starting response processing loop...\")\n loop = asyncio.get_event_loop()\n while self._active:\n response = await loop.run_in_executor(self._executor, self._responses.get)\n\n # If the queue gets terminated, detect the \"sentinel value\" and\n # stop reading\n if response is END_OF_QUEUE:\n return\n\n await self._process_response(response)\n\n async def _process_response(self, response: ModelResponseMessage):\n internal_id = response.id\n\n async_response = self._async_responses[internal_id]\n\n # NOTE: Use call_soon_threadsafe to cover cases where `model.predict()`\n # (or other methods) get called from a separate thread (and a separate\n # AsyncIO loop)\n response_loop = async_response.get_loop()\n if response.exception:\n response_loop.call_soon_threadsafe(\n async_response.set_exception, response.exception\n )\n else:\n response_loop.call_soon_threadsafe(async_response.set_result, response)\n\n async def dispatch_request(\n self, request_message: ModelRequestMessage\n ) -> ModelResponseMessage:\n worker, wpid = self._get_worker()\n worker.send_request(request_message)\n\n return await self._dispatch(request_message)\n\n def _get_worker(self) -> Tuple[Worker, int]:\n \"\"\"\n Get next available worker.\n By default, this is just a round-robin through all the workers.\n \"\"\"\n worker_pid = next(self._workers_round_robin)\n return self._workers[worker_pid], worker_pid\n\n async def dispatch_update(\n self, model_update: ModelUpdateMessage\n ) -> List[ModelResponseMessage]:\n return await asyncio.gather(\n *[\n self._dispatch_update(worker, model_update)\n for worker in self._workers.values()\n ]\n )\n\n async def _dispatch_update(\n self, worker: Worker, model_update: ModelUpdateMessage\n ) -> ModelResponseMessage:\n # NOTE: Need to rewrite the UUID to ensure each worker sends back a\n # unique result\n worker_update = model_update.copy()\n worker_update.id = generate_uuid()\n worker.send_update(worker_update)\n return await self._dispatch(worker_update)\n\n async def _dispatch(self, message: Message) -> ModelResponseMessage:\n loop = asyncio.get_running_loop()\n async_response = loop.create_future()\n internal_id = message.id\n self._async_responses[internal_id] = async_response\n\n # Monitor current in-flight requests\n self.parallel_request_queue_size.observe(len(self._async_responses))\n return await self._wait_response(internal_id)\n\n async def _wait_response(self, internal_id: str) -> ModelResponseMessage:\n async_response = self._async_responses[internal_id]\n\n try:\n inference_response = await async_response\n return inference_response\n finally:\n del self._async_responses[internal_id]\n\n async def stop(self):\n self._executor.shutdown()\n if self._process_responses_task is not None:\n await cancel_task(self._process_responses_task)\n", "path": "mlserver/parallel/dispatcher.py"}]} | 2,029 | 377 |
gh_patches_debug_11912 | rasdani/github-patches | git_diff | ibis-project__ibis-2558 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
link to documentation on http://ibis-project.org/ is broken
Everything under /docs/ (including the tutorial) 404's as of 2020-12-02.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ibis/backends/impala/__init__.py`
Content:
```
1 """Impala backend"""
2 import ibis.common.exceptions as com
3 import ibis.config
4 from ibis.config import options
5
6 # these objects are exposed in the public API and are not used in the module
7 from .client import ( # noqa: F401
8 ImpalaClient,
9 ImpalaConnection,
10 ImpalaDatabase,
11 ImpalaTable,
12 )
13 from .compiler import dialect # noqa: F401
14 from .hdfs import HDFS, WebHDFS, hdfs_connect # noqa: F401
15 from .udf import * # noqa: F401,F403
16
17 with ibis.config.config_prefix('impala'):
18 ibis.config.register_option(
19 'temp_db',
20 '__ibis_tmp',
21 'Database to use for temporary tables, views. functions, etc.',
22 )
23 ibis.config.register_option(
24 'temp_hdfs_path',
25 '/tmp/ibis',
26 'HDFS path for storage of temporary data',
27 )
28
29
30 def compile(expr, params=None):
31 """Force compilation of expression.
32
33 Returns
34 -------
35 str
36
37 """
38 from .compiler import to_sql
39
40 return to_sql(expr, dialect.make_context(params=params))
41
42
43 def verify(expr, params=None):
44 """
45 Determine if expression can be successfully translated to execute on Impala
46 """
47 try:
48 compile(expr, params=params)
49 return True
50 except com.TranslationError:
51 return False
52
53
54 def connect(
55 host='localhost',
56 port=21050,
57 database='default',
58 timeout=45,
59 use_ssl=False,
60 ca_cert=None,
61 user=None,
62 password=None,
63 auth_mechanism='NOSASL',
64 kerberos_service_name='impala',
65 pool_size=8,
66 hdfs_client=None,
67 ):
68 """Create an ImpalaClient for use with Ibis.
69
70 Parameters
71 ----------
72 host : str, optional
73 Host name of the impalad or HiveServer2 in Hive
74 port : int, optional
75 Impala's HiveServer2 port
76 database : str, optional
77 Default database when obtaining new cursors
78 timeout : int, optional
79 Connection timeout in seconds when communicating with HiveServer2
80 use_ssl : bool, optional
81 Use SSL when connecting to HiveServer2
82 ca_cert : str, optional
83 Local path to 3rd party CA certificate or copy of server certificate
84 for self-signed certificates. If SSL is enabled, but this argument is
85 ``None``, then certificate validation is skipped.
86 user : str, optional
87 LDAP user to authenticate
88 password : str, optional
89 LDAP password to authenticate
90 auth_mechanism : str, optional
91 {'NOSASL' <- default, 'PLAIN', 'GSSAPI', 'LDAP'}.
92 Use NOSASL for non-secured Impala connections. Use PLAIN for
93 non-secured Hive clusters. Use LDAP for LDAP authenticated
94 connections. Use GSSAPI for Kerberos-secured clusters.
95 kerberos_service_name : str, optional
96 Specify particular impalad service principal.
97
98 Examples
99 --------
100 >>> import ibis
101 >>> import os
102 >>> hdfs_host = os.environ.get('IBIS_TEST_NN_HOST', 'localhost')
103 >>> hdfs_port = int(os.environ.get('IBIS_TEST_NN_PORT', 50070))
104 >>> impala_host = os.environ.get('IBIS_TEST_IMPALA_HOST', 'localhost')
105 >>> impala_port = int(os.environ.get('IBIS_TEST_IMPALA_PORT', 21050))
106 >>> hdfs = ibis.hdfs_connect(host=hdfs_host, port=hdfs_port)
107 >>> hdfs # doctest: +ELLIPSIS
108 <ibis.filesystems.WebHDFS object at 0x...>
109 >>> client = ibis.impala.connect(
110 ... host=impala_host,
111 ... port=impala_port,
112 ... hdfs_client=hdfs,
113 ... )
114 >>> client # doctest: +ELLIPSIS
115 <ibis.impala.client.ImpalaClient object at 0x...>
116
117 Returns
118 -------
119 ImpalaClient
120 """
121 params = {
122 'host': host,
123 'port': port,
124 'database': database,
125 'timeout': timeout,
126 'use_ssl': use_ssl,
127 'ca_cert': ca_cert,
128 'user': user,
129 'password': password,
130 'auth_mechanism': auth_mechanism,
131 'kerberos_service_name': kerberos_service_name,
132 }
133
134 con = ImpalaConnection(pool_size=pool_size, **params)
135 try:
136 client = ImpalaClient(con, hdfs_client=hdfs_client)
137 except Exception:
138 con.close()
139 raise
140 else:
141 if options.default_backend is None:
142 options.default_backend = client
143
144 return client
145
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/ibis/backends/impala/__init__.py b/ibis/backends/impala/__init__.py
--- a/ibis/backends/impala/__init__.py
+++ b/ibis/backends/impala/__init__.py
@@ -103,7 +103,7 @@
>>> hdfs_port = int(os.environ.get('IBIS_TEST_NN_PORT', 50070))
>>> impala_host = os.environ.get('IBIS_TEST_IMPALA_HOST', 'localhost')
>>> impala_port = int(os.environ.get('IBIS_TEST_IMPALA_PORT', 21050))
- >>> hdfs = ibis.hdfs_connect(host=hdfs_host, port=hdfs_port)
+ >>> hdfs = ibis.impala.hdfs_connect(host=hdfs_host, port=hdfs_port)
>>> hdfs # doctest: +ELLIPSIS
<ibis.filesystems.WebHDFS object at 0x...>
>>> client = ibis.impala.connect(
| {"golden_diff": "diff --git a/ibis/backends/impala/__init__.py b/ibis/backends/impala/__init__.py\n--- a/ibis/backends/impala/__init__.py\n+++ b/ibis/backends/impala/__init__.py\n@@ -103,7 +103,7 @@\n >>> hdfs_port = int(os.environ.get('IBIS_TEST_NN_PORT', 50070))\n >>> impala_host = os.environ.get('IBIS_TEST_IMPALA_HOST', 'localhost')\n >>> impala_port = int(os.environ.get('IBIS_TEST_IMPALA_PORT', 21050))\n- >>> hdfs = ibis.hdfs_connect(host=hdfs_host, port=hdfs_port)\n+ >>> hdfs = ibis.impala.hdfs_connect(host=hdfs_host, port=hdfs_port)\n >>> hdfs # doctest: +ELLIPSIS\n <ibis.filesystems.WebHDFS object at 0x...>\n >>> client = ibis.impala.connect(\n", "issue": "link to documentation on http://ibis-project.org/ is broken\nEverything under /docs/ (including the tutorial) 404's as of 2020-12-02.\n", "before_files": [{"content": "\"\"\"Impala backend\"\"\"\nimport ibis.common.exceptions as com\nimport ibis.config\nfrom ibis.config import options\n\n# these objects are exposed in the public API and are not used in the module\nfrom .client import ( # noqa: F401\n ImpalaClient,\n ImpalaConnection,\n ImpalaDatabase,\n ImpalaTable,\n)\nfrom .compiler import dialect # noqa: F401\nfrom .hdfs import HDFS, WebHDFS, hdfs_connect # noqa: F401\nfrom .udf import * # noqa: F401,F403\n\nwith ibis.config.config_prefix('impala'):\n ibis.config.register_option(\n 'temp_db',\n '__ibis_tmp',\n 'Database to use for temporary tables, views. functions, etc.',\n )\n ibis.config.register_option(\n 'temp_hdfs_path',\n '/tmp/ibis',\n 'HDFS path for storage of temporary data',\n )\n\n\ndef compile(expr, params=None):\n \"\"\"Force compilation of expression.\n\n Returns\n -------\n str\n\n \"\"\"\n from .compiler import to_sql\n\n return to_sql(expr, dialect.make_context(params=params))\n\n\ndef verify(expr, params=None):\n \"\"\"\n Determine if expression can be successfully translated to execute on Impala\n \"\"\"\n try:\n compile(expr, params=params)\n return True\n except com.TranslationError:\n return False\n\n\ndef connect(\n host='localhost',\n port=21050,\n database='default',\n timeout=45,\n use_ssl=False,\n ca_cert=None,\n user=None,\n password=None,\n auth_mechanism='NOSASL',\n kerberos_service_name='impala',\n pool_size=8,\n hdfs_client=None,\n):\n \"\"\"Create an ImpalaClient for use with Ibis.\n\n Parameters\n ----------\n host : str, optional\n Host name of the impalad or HiveServer2 in Hive\n port : int, optional\n Impala's HiveServer2 port\n database : str, optional\n Default database when obtaining new cursors\n timeout : int, optional\n Connection timeout in seconds when communicating with HiveServer2\n use_ssl : bool, optional\n Use SSL when connecting to HiveServer2\n ca_cert : str, optional\n Local path to 3rd party CA certificate or copy of server certificate\n for self-signed certificates. If SSL is enabled, but this argument is\n ``None``, then certificate validation is skipped.\n user : str, optional\n LDAP user to authenticate\n password : str, optional\n LDAP password to authenticate\n auth_mechanism : str, optional\n {'NOSASL' <- default, 'PLAIN', 'GSSAPI', 'LDAP'}.\n Use NOSASL for non-secured Impala connections. Use PLAIN for\n non-secured Hive clusters. Use LDAP for LDAP authenticated\n connections. Use GSSAPI for Kerberos-secured clusters.\n kerberos_service_name : str, optional\n Specify particular impalad service principal.\n\n Examples\n --------\n >>> import ibis\n >>> import os\n >>> hdfs_host = os.environ.get('IBIS_TEST_NN_HOST', 'localhost')\n >>> hdfs_port = int(os.environ.get('IBIS_TEST_NN_PORT', 50070))\n >>> impala_host = os.environ.get('IBIS_TEST_IMPALA_HOST', 'localhost')\n >>> impala_port = int(os.environ.get('IBIS_TEST_IMPALA_PORT', 21050))\n >>> hdfs = ibis.hdfs_connect(host=hdfs_host, port=hdfs_port)\n >>> hdfs # doctest: +ELLIPSIS\n <ibis.filesystems.WebHDFS object at 0x...>\n >>> client = ibis.impala.connect(\n ... host=impala_host,\n ... port=impala_port,\n ... hdfs_client=hdfs,\n ... )\n >>> client # doctest: +ELLIPSIS\n <ibis.impala.client.ImpalaClient object at 0x...>\n\n Returns\n -------\n ImpalaClient\n \"\"\"\n params = {\n 'host': host,\n 'port': port,\n 'database': database,\n 'timeout': timeout,\n 'use_ssl': use_ssl,\n 'ca_cert': ca_cert,\n 'user': user,\n 'password': password,\n 'auth_mechanism': auth_mechanism,\n 'kerberos_service_name': kerberos_service_name,\n }\n\n con = ImpalaConnection(pool_size=pool_size, **params)\n try:\n client = ImpalaClient(con, hdfs_client=hdfs_client)\n except Exception:\n con.close()\n raise\n else:\n if options.default_backend is None:\n options.default_backend = client\n\n return client\n", "path": "ibis/backends/impala/__init__.py"}], "after_files": [{"content": "\"\"\"Impala backend\"\"\"\nimport ibis.common.exceptions as com\nimport ibis.config\nfrom ibis.config import options\n\n# these objects are exposed in the public API and are not used in the module\nfrom .client import ( # noqa: F401\n ImpalaClient,\n ImpalaConnection,\n ImpalaDatabase,\n ImpalaTable,\n)\nfrom .compiler import dialect # noqa: F401\nfrom .hdfs import HDFS, WebHDFS, hdfs_connect # noqa: F401\nfrom .udf import * # noqa: F401,F403\n\nwith ibis.config.config_prefix('impala'):\n ibis.config.register_option(\n 'temp_db',\n '__ibis_tmp',\n 'Database to use for temporary tables, views. functions, etc.',\n )\n ibis.config.register_option(\n 'temp_hdfs_path',\n '/tmp/ibis',\n 'HDFS path for storage of temporary data',\n )\n\n\ndef compile(expr, params=None):\n \"\"\"Force compilation of expression.\n\n Returns\n -------\n str\n\n \"\"\"\n from .compiler import to_sql\n\n return to_sql(expr, dialect.make_context(params=params))\n\n\ndef verify(expr, params=None):\n \"\"\"\n Determine if expression can be successfully translated to execute on Impala\n \"\"\"\n try:\n compile(expr, params=params)\n return True\n except com.TranslationError:\n return False\n\n\ndef connect(\n host='localhost',\n port=21050,\n database='default',\n timeout=45,\n use_ssl=False,\n ca_cert=None,\n user=None,\n password=None,\n auth_mechanism='NOSASL',\n kerberos_service_name='impala',\n pool_size=8,\n hdfs_client=None,\n):\n \"\"\"Create an ImpalaClient for use with Ibis.\n\n Parameters\n ----------\n host : str, optional\n Host name of the impalad or HiveServer2 in Hive\n port : int, optional\n Impala's HiveServer2 port\n database : str, optional\n Default database when obtaining new cursors\n timeout : int, optional\n Connection timeout in seconds when communicating with HiveServer2\n use_ssl : bool, optional\n Use SSL when connecting to HiveServer2\n ca_cert : str, optional\n Local path to 3rd party CA certificate or copy of server certificate\n for self-signed certificates. If SSL is enabled, but this argument is\n ``None``, then certificate validation is skipped.\n user : str, optional\n LDAP user to authenticate\n password : str, optional\n LDAP password to authenticate\n auth_mechanism : str, optional\n {'NOSASL' <- default, 'PLAIN', 'GSSAPI', 'LDAP'}.\n Use NOSASL for non-secured Impala connections. Use PLAIN for\n non-secured Hive clusters. Use LDAP for LDAP authenticated\n connections. Use GSSAPI for Kerberos-secured clusters.\n kerberos_service_name : str, optional\n Specify particular impalad service principal.\n\n Examples\n --------\n >>> import ibis\n >>> import os\n >>> hdfs_host = os.environ.get('IBIS_TEST_NN_HOST', 'localhost')\n >>> hdfs_port = int(os.environ.get('IBIS_TEST_NN_PORT', 50070))\n >>> impala_host = os.environ.get('IBIS_TEST_IMPALA_HOST', 'localhost')\n >>> impala_port = int(os.environ.get('IBIS_TEST_IMPALA_PORT', 21050))\n >>> hdfs = ibis.impala.hdfs_connect(host=hdfs_host, port=hdfs_port)\n >>> hdfs # doctest: +ELLIPSIS\n <ibis.filesystems.WebHDFS object at 0x...>\n >>> client = ibis.impala.connect(\n ... host=impala_host,\n ... port=impala_port,\n ... hdfs_client=hdfs,\n ... )\n >>> client # doctest: +ELLIPSIS\n <ibis.impala.client.ImpalaClient object at 0x...>\n\n Returns\n -------\n ImpalaClient\n \"\"\"\n params = {\n 'host': host,\n 'port': port,\n 'database': database,\n 'timeout': timeout,\n 'use_ssl': use_ssl,\n 'ca_cert': ca_cert,\n 'user': user,\n 'password': password,\n 'auth_mechanism': auth_mechanism,\n 'kerberos_service_name': kerberos_service_name,\n }\n\n con = ImpalaConnection(pool_size=pool_size, **params)\n try:\n client = ImpalaClient(con, hdfs_client=hdfs_client)\n except Exception:\n con.close()\n raise\n else:\n if options.default_backend is None:\n options.default_backend = client\n\n return client\n", "path": "ibis/backends/impala/__init__.py"}]} | 1,710 | 230 |
gh_patches_debug_26835 | rasdani/github-patches | git_diff | azavea__raster-vision-536 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Avoid error when working with subtypes for SemanticSegmentationRasterSource
Here: https://github.com/azavea/raster-vision/blob/f6ea64a37fd4d09375da1838cd679e6cbce5b35b/rastervision/data/label_source/semantic_segmentation_raster_source_config.py#L123
We check for the type explicitly. We should use `isinstance` instead to allow for subclasses to pass this check - or figure out a more general way of not having other types and allowing them to bypass having to set the rgb class map.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `rastervision/data/label_source/semantic_segmentation_raster_source_config.py`
Content:
```
1 from copy import deepcopy
2
3 import rastervision as rv
4 from rastervision.core.class_map import ClassMap
5 from rastervision.data.label_source import (LabelSourceConfig,
6 LabelSourceConfigBuilder,
7 SemanticSegmentationRasterSource)
8 from rastervision.protos.label_source_pb2 import LabelSourceConfig as LabelSourceConfigMsg
9 from rastervision.data.raster_source import RasterSourceConfig, GeoJSONSourceConfig
10
11
12 class SemanticSegmentationRasterSourceConfig(LabelSourceConfig):
13 def __init__(self, source, rgb_class_map=None):
14 super().__init__(source_type=rv.SEMANTIC_SEGMENTATION_RASTER)
15 self.source = source
16 self.rgb_class_map = rgb_class_map
17
18 def to_proto(self):
19 msg = super().to_proto()
20
21 rgb_class_items = None
22 if self.rgb_class_map is not None:
23 rgb_class_items = self.rgb_class_map.to_proto()
24 opts = LabelSourceConfigMsg.SemanticSegmentationRasterSource(
25 source=self.source.to_proto(), rgb_class_items=rgb_class_items)
26 msg.semantic_segmentation_raster_source.CopyFrom(opts)
27 return msg
28
29 def create_source(self, task_config, extent, crs_transformer, tmp_dir):
30 return SemanticSegmentationRasterSource(
31 self.source.create_source(tmp_dir, extent, crs_transformer),
32 self.rgb_class_map)
33
34 def update_for_command(self, command_type, experiment_config, context=[]):
35 if context is None:
36 context = []
37 context = context + [self]
38 io_def = rv.core.CommandIODefinition()
39
40 b = self.to_builder()
41
42 (new_raster_source, sub_io_def) = self.source.update_for_command(
43 command_type, experiment_config, context)
44
45 io_def.merge(sub_io_def)
46 b = b.with_raster_source(new_raster_source)
47
48 return (b.build(), io_def)
49
50
51 class SemanticSegmentationRasterSourceConfigBuilder(LabelSourceConfigBuilder):
52 def __init__(self, prev=None):
53 config = {}
54 if prev:
55 config = {
56 'source': prev.source,
57 'rgb_class_map': prev.rgb_class_map
58 }
59
60 super().__init__(SemanticSegmentationRasterSourceConfig, config)
61
62 def from_proto(self, msg):
63 b = SemanticSegmentationRasterSourceConfigBuilder()
64
65 raster_source_config = rv.RasterSourceConfig.from_proto(
66 msg.semantic_segmentation_raster_source.source)
67
68 b = b.with_raster_source(raster_source_config)
69 rgb_class_items = msg.semantic_segmentation_raster_source.rgb_class_items
70 if rgb_class_items:
71 b = b.with_rgb_class_map(
72 ClassMap.construct_from(list(rgb_class_items)))
73
74 return b
75
76 def with_raster_source(self, source, channel_order=None):
77 """Set raster_source.
78
79 Args:
80 source: (RasterSourceConfig) A RasterSource assumed to have RGB values that
81 are mapped to class_ids using the rgb_class_map.
82
83 Returns:
84 SemanticSegmentationRasterSourceConfigBuilder
85 """
86 b = deepcopy(self)
87 if isinstance(source, RasterSourceConfig):
88 b.config['source'] = source
89 elif isinstance(source, str):
90 provider = rv._registry.get_raster_source_default_provider(source)
91 source = provider.construct(source, channel_order=channel_order)
92 b.config['source'] = source
93 else:
94 raise rv.ConfigError(
95 'source must be either string or RasterSourceConfig, '
96 ' not {}'.format(str(type(source))))
97
98 return b
99
100 def with_rgb_class_map(self, rgb_class_map):
101 """Set rgb_class_map.
102
103 Args:
104 rgb_class_map: (something accepted by ClassMap.construct_from) a class
105 map with color values used to map RGB values to class ids
106
107 Returns:
108 SemanticSegmentationRasterSourceConfigBuilder
109 """
110 b = deepcopy(self)
111 b.config['rgb_class_map'] = ClassMap.construct_from(rgb_class_map)
112 return b
113
114 def validate(self):
115 source = self.config.get('source')
116 rgb_class_map = self.config.get('rgb_class_map')
117
118 if source is None:
119 raise rv.ConfigError(
120 'You must set the source for SemanticSegmentationRasterSourceConfig'
121 ' Use "with_raster_source".')
122
123 if type(source) != GeoJSONSourceConfig and rgb_class_map is None:
124 raise rv.ConfigError(
125 'You must set the rgb_class_map for '
126 'SemanticSegmentationRasterSourceConfig. Use "with_rgb_class_map".'
127 )
128
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/rastervision/data/label_source/semantic_segmentation_raster_source_config.py b/rastervision/data/label_source/semantic_segmentation_raster_source_config.py
--- a/rastervision/data/label_source/semantic_segmentation_raster_source_config.py
+++ b/rastervision/data/label_source/semantic_segmentation_raster_source_config.py
@@ -6,7 +6,7 @@
LabelSourceConfigBuilder,
SemanticSegmentationRasterSource)
from rastervision.protos.label_source_pb2 import LabelSourceConfig as LabelSourceConfigMsg
-from rastervision.data.raster_source import RasterSourceConfig, GeoJSONSourceConfig
+from rastervision.data.raster_source import RasterSourceConfig
class SemanticSegmentationRasterSourceConfig(LabelSourceConfig):
@@ -113,15 +113,8 @@
def validate(self):
source = self.config.get('source')
- rgb_class_map = self.config.get('rgb_class_map')
if source is None:
raise rv.ConfigError(
'You must set the source for SemanticSegmentationRasterSourceConfig'
' Use "with_raster_source".')
-
- if type(source) != GeoJSONSourceConfig and rgb_class_map is None:
- raise rv.ConfigError(
- 'You must set the rgb_class_map for '
- 'SemanticSegmentationRasterSourceConfig. Use "with_rgb_class_map".'
- )
| {"golden_diff": "diff --git a/rastervision/data/label_source/semantic_segmentation_raster_source_config.py b/rastervision/data/label_source/semantic_segmentation_raster_source_config.py\n--- a/rastervision/data/label_source/semantic_segmentation_raster_source_config.py\n+++ b/rastervision/data/label_source/semantic_segmentation_raster_source_config.py\n@@ -6,7 +6,7 @@\n LabelSourceConfigBuilder,\n SemanticSegmentationRasterSource)\n from rastervision.protos.label_source_pb2 import LabelSourceConfig as LabelSourceConfigMsg\n-from rastervision.data.raster_source import RasterSourceConfig, GeoJSONSourceConfig\n+from rastervision.data.raster_source import RasterSourceConfig\n \n \n class SemanticSegmentationRasterSourceConfig(LabelSourceConfig):\n@@ -113,15 +113,8 @@\n \n def validate(self):\n source = self.config.get('source')\n- rgb_class_map = self.config.get('rgb_class_map')\n \n if source is None:\n raise rv.ConfigError(\n 'You must set the source for SemanticSegmentationRasterSourceConfig'\n ' Use \"with_raster_source\".')\n-\n- if type(source) != GeoJSONSourceConfig and rgb_class_map is None:\n- raise rv.ConfigError(\n- 'You must set the rgb_class_map for '\n- 'SemanticSegmentationRasterSourceConfig. Use \"with_rgb_class_map\".'\n- )\n", "issue": "Avoid error when working with subtypes for SemanticSegmentationRasterSource\nHere: https://github.com/azavea/raster-vision/blob/f6ea64a37fd4d09375da1838cd679e6cbce5b35b/rastervision/data/label_source/semantic_segmentation_raster_source_config.py#L123\r\n\r\nWe check for the type explicitly. We should use `isinstance` instead to allow for subclasses to pass this check - or figure out a more general way of not having other types and allowing them to bypass having to set the rgb class map.\n", "before_files": [{"content": "from copy import deepcopy\n\nimport rastervision as rv\nfrom rastervision.core.class_map import ClassMap\nfrom rastervision.data.label_source import (LabelSourceConfig,\n LabelSourceConfigBuilder,\n SemanticSegmentationRasterSource)\nfrom rastervision.protos.label_source_pb2 import LabelSourceConfig as LabelSourceConfigMsg\nfrom rastervision.data.raster_source import RasterSourceConfig, GeoJSONSourceConfig\n\n\nclass SemanticSegmentationRasterSourceConfig(LabelSourceConfig):\n def __init__(self, source, rgb_class_map=None):\n super().__init__(source_type=rv.SEMANTIC_SEGMENTATION_RASTER)\n self.source = source\n self.rgb_class_map = rgb_class_map\n\n def to_proto(self):\n msg = super().to_proto()\n\n rgb_class_items = None\n if self.rgb_class_map is not None:\n rgb_class_items = self.rgb_class_map.to_proto()\n opts = LabelSourceConfigMsg.SemanticSegmentationRasterSource(\n source=self.source.to_proto(), rgb_class_items=rgb_class_items)\n msg.semantic_segmentation_raster_source.CopyFrom(opts)\n return msg\n\n def create_source(self, task_config, extent, crs_transformer, tmp_dir):\n return SemanticSegmentationRasterSource(\n self.source.create_source(tmp_dir, extent, crs_transformer),\n self.rgb_class_map)\n\n def update_for_command(self, command_type, experiment_config, context=[]):\n if context is None:\n context = []\n context = context + [self]\n io_def = rv.core.CommandIODefinition()\n\n b = self.to_builder()\n\n (new_raster_source, sub_io_def) = self.source.update_for_command(\n command_type, experiment_config, context)\n\n io_def.merge(sub_io_def)\n b = b.with_raster_source(new_raster_source)\n\n return (b.build(), io_def)\n\n\nclass SemanticSegmentationRasterSourceConfigBuilder(LabelSourceConfigBuilder):\n def __init__(self, prev=None):\n config = {}\n if prev:\n config = {\n 'source': prev.source,\n 'rgb_class_map': prev.rgb_class_map\n }\n\n super().__init__(SemanticSegmentationRasterSourceConfig, config)\n\n def from_proto(self, msg):\n b = SemanticSegmentationRasterSourceConfigBuilder()\n\n raster_source_config = rv.RasterSourceConfig.from_proto(\n msg.semantic_segmentation_raster_source.source)\n\n b = b.with_raster_source(raster_source_config)\n rgb_class_items = msg.semantic_segmentation_raster_source.rgb_class_items\n if rgb_class_items:\n b = b.with_rgb_class_map(\n ClassMap.construct_from(list(rgb_class_items)))\n\n return b\n\n def with_raster_source(self, source, channel_order=None):\n \"\"\"Set raster_source.\n\n Args:\n source: (RasterSourceConfig) A RasterSource assumed to have RGB values that\n are mapped to class_ids using the rgb_class_map.\n\n Returns:\n SemanticSegmentationRasterSourceConfigBuilder\n \"\"\"\n b = deepcopy(self)\n if isinstance(source, RasterSourceConfig):\n b.config['source'] = source\n elif isinstance(source, str):\n provider = rv._registry.get_raster_source_default_provider(source)\n source = provider.construct(source, channel_order=channel_order)\n b.config['source'] = source\n else:\n raise rv.ConfigError(\n 'source must be either string or RasterSourceConfig, '\n ' not {}'.format(str(type(source))))\n\n return b\n\n def with_rgb_class_map(self, rgb_class_map):\n \"\"\"Set rgb_class_map.\n\n Args:\n rgb_class_map: (something accepted by ClassMap.construct_from) a class\n map with color values used to map RGB values to class ids\n\n Returns:\n SemanticSegmentationRasterSourceConfigBuilder\n \"\"\"\n b = deepcopy(self)\n b.config['rgb_class_map'] = ClassMap.construct_from(rgb_class_map)\n return b\n\n def validate(self):\n source = self.config.get('source')\n rgb_class_map = self.config.get('rgb_class_map')\n\n if source is None:\n raise rv.ConfigError(\n 'You must set the source for SemanticSegmentationRasterSourceConfig'\n ' Use \"with_raster_source\".')\n\n if type(source) != GeoJSONSourceConfig and rgb_class_map is None:\n raise rv.ConfigError(\n 'You must set the rgb_class_map for '\n 'SemanticSegmentationRasterSourceConfig. Use \"with_rgb_class_map\".'\n )\n", "path": "rastervision/data/label_source/semantic_segmentation_raster_source_config.py"}], "after_files": [{"content": "from copy import deepcopy\n\nimport rastervision as rv\nfrom rastervision.core.class_map import ClassMap\nfrom rastervision.data.label_source import (LabelSourceConfig,\n LabelSourceConfigBuilder,\n SemanticSegmentationRasterSource)\nfrom rastervision.protos.label_source_pb2 import LabelSourceConfig as LabelSourceConfigMsg\nfrom rastervision.data.raster_source import RasterSourceConfig\n\n\nclass SemanticSegmentationRasterSourceConfig(LabelSourceConfig):\n def __init__(self, source, rgb_class_map=None):\n super().__init__(source_type=rv.SEMANTIC_SEGMENTATION_RASTER)\n self.source = source\n self.rgb_class_map = rgb_class_map\n\n def to_proto(self):\n msg = super().to_proto()\n\n rgb_class_items = None\n if self.rgb_class_map is not None:\n rgb_class_items = self.rgb_class_map.to_proto()\n opts = LabelSourceConfigMsg.SemanticSegmentationRasterSource(\n source=self.source.to_proto(), rgb_class_items=rgb_class_items)\n msg.semantic_segmentation_raster_source.CopyFrom(opts)\n return msg\n\n def create_source(self, task_config, extent, crs_transformer, tmp_dir):\n return SemanticSegmentationRasterSource(\n self.source.create_source(tmp_dir, extent, crs_transformer),\n self.rgb_class_map)\n\n def update_for_command(self, command_type, experiment_config, context=[]):\n if context is None:\n context = []\n context = context + [self]\n io_def = rv.core.CommandIODefinition()\n\n b = self.to_builder()\n\n (new_raster_source, sub_io_def) = self.source.update_for_command(\n command_type, experiment_config, context)\n\n io_def.merge(sub_io_def)\n b = b.with_raster_source(new_raster_source)\n\n return (b.build(), io_def)\n\n\nclass SemanticSegmentationRasterSourceConfigBuilder(LabelSourceConfigBuilder):\n def __init__(self, prev=None):\n config = {}\n if prev:\n config = {\n 'source': prev.source,\n 'rgb_class_map': prev.rgb_class_map\n }\n\n super().__init__(SemanticSegmentationRasterSourceConfig, config)\n\n def from_proto(self, msg):\n b = SemanticSegmentationRasterSourceConfigBuilder()\n\n raster_source_config = rv.RasterSourceConfig.from_proto(\n msg.semantic_segmentation_raster_source.source)\n\n b = b.with_raster_source(raster_source_config)\n rgb_class_items = msg.semantic_segmentation_raster_source.rgb_class_items\n if rgb_class_items:\n b = b.with_rgb_class_map(\n ClassMap.construct_from(list(rgb_class_items)))\n\n return b\n\n def with_raster_source(self, source, channel_order=None):\n \"\"\"Set raster_source.\n\n Args:\n source: (RasterSourceConfig) A RasterSource assumed to have RGB values that\n are mapped to class_ids using the rgb_class_map.\n\n Returns:\n SemanticSegmentationRasterSourceConfigBuilder\n \"\"\"\n b = deepcopy(self)\n if isinstance(source, RasterSourceConfig):\n b.config['source'] = source\n elif isinstance(source, str):\n provider = rv._registry.get_raster_source_default_provider(source)\n source = provider.construct(source, channel_order=channel_order)\n b.config['source'] = source\n else:\n raise rv.ConfigError(\n 'source must be either string or RasterSourceConfig, '\n ' not {}'.format(str(type(source))))\n\n return b\n\n def with_rgb_class_map(self, rgb_class_map):\n \"\"\"Set rgb_class_map.\n\n Args:\n rgb_class_map: (something accepted by ClassMap.construct_from) a class\n map with color values used to map RGB values to class ids\n\n Returns:\n SemanticSegmentationRasterSourceConfigBuilder\n \"\"\"\n b = deepcopy(self)\n b.config['rgb_class_map'] = ClassMap.construct_from(rgb_class_map)\n return b\n\n def validate(self):\n source = self.config.get('source')\n\n if source is None:\n raise rv.ConfigError(\n 'You must set the source for SemanticSegmentationRasterSourceConfig'\n ' Use \"with_raster_source\".')\n", "path": "rastervision/data/label_source/semantic_segmentation_raster_source_config.py"}]} | 1,660 | 313 |
gh_patches_debug_28879 | rasdani/github-patches | git_diff | python-geeks__Automation-scripts-885 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
yaml_to_json add command line interface
**Describe the bug**
yaml_to_json currently only allow to enter filename. It is not convenient and cannot be used with bash autocomplete
**To Reproduce**
**Expected behavior**
Application should accept command line arguments with filenames
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Additional context**
Add any other context about the problem here.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `yaml_to_json/yaml_to_json.py`
Content:
```
1 from ruyaml import YAML
2 import json
3
4
5 def get_yaml_data():
6 yaml_name = input("Enter the yaml file name: ")
7
8 try:
9 with open(yaml_name, "r+") as f:
10 yaml_data = YAML().load(f)
11 return yaml_data
12 except: # noqa
13 print("Invalid input enter a valid yaml file name e.g. example.yaml")
14 yaml_data = get_yaml_data()
15
16
17 def convert_to_json(yaml_data):
18 json_name = input("Enter the name of output json file: ")
19
20 try:
21 with open(json_name, "w+") as o:
22 o.write(json.dumps(yaml_data))
23 except: # noqa
24 print("Invalid input enter a valid json file name e.g. example.json")
25 convert_to_json(yaml_data)
26
27
28 yaml_data = get_yaml_data()
29 convert_to_json(yaml_data)
30
31 print("Your yaml file has been converted and saved as json")
32
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/yaml_to_json/yaml_to_json.py b/yaml_to_json/yaml_to_json.py
--- a/yaml_to_json/yaml_to_json.py
+++ b/yaml_to_json/yaml_to_json.py
@@ -1,9 +1,11 @@
from ruyaml import YAML
+import argparse
import json
-def get_yaml_data():
- yaml_name = input("Enter the yaml file name: ")
+def get_yaml_data(yaml_name=None):
+ if not yaml_name:
+ yaml_name = input("Enter the yaml file name: ")
try:
with open(yaml_name, "r+") as f:
@@ -14,18 +16,34 @@
yaml_data = get_yaml_data()
-def convert_to_json(yaml_data):
- json_name = input("Enter the name of output json file: ")
+def convert_to_json(yaml_data, json_name=None, intent=None):
+ if not json_name:
+ json_name = input("Enter the name of output json file: ")
try:
with open(json_name, "w+") as o:
- o.write(json.dumps(yaml_data))
+ o.write(json.dumps(yaml_data, indent=intent))
except: # noqa
print("Invalid input enter a valid json file name e.g. example.json")
convert_to_json(yaml_data)
-yaml_data = get_yaml_data()
-convert_to_json(yaml_data)
+def main():
+ parser = argparse.ArgumentParser(description='Convert YAML file to JSON')
+ parser.add_argument('--yaml', type=str, help='YAML filename')
+ parser.add_argument('--json', type=str, help='JSON filename')
+ parser.add_argument('--intent', type=int, help="intent value for JSON")
+ args = parser.parse_args()
-print("Your yaml file has been converted and saved as json")
+ yaml_name = args.yaml
+ json_name = args.json
+ intent = args.intent
+
+ yaml_data = get_yaml_data(yaml_name)
+ convert_to_json(yaml_data, json_name, intent=intent)
+
+ print("Your yaml file has been converted and saved as json")
+
+
+if __name__ == "__main__":
+ main()
| {"golden_diff": "diff --git a/yaml_to_json/yaml_to_json.py b/yaml_to_json/yaml_to_json.py\n--- a/yaml_to_json/yaml_to_json.py\n+++ b/yaml_to_json/yaml_to_json.py\n@@ -1,9 +1,11 @@\n from ruyaml import YAML\n+import argparse\n import json\n \n \n-def get_yaml_data():\n- yaml_name = input(\"Enter the yaml file name: \")\n+def get_yaml_data(yaml_name=None):\n+ if not yaml_name:\n+ yaml_name = input(\"Enter the yaml file name: \")\n \n try:\n with open(yaml_name, \"r+\") as f:\n@@ -14,18 +16,34 @@\n yaml_data = get_yaml_data()\n \n \n-def convert_to_json(yaml_data):\n- json_name = input(\"Enter the name of output json file: \")\n+def convert_to_json(yaml_data, json_name=None, intent=None):\n+ if not json_name:\n+ json_name = input(\"Enter the name of output json file: \")\n \n try:\n with open(json_name, \"w+\") as o:\n- o.write(json.dumps(yaml_data))\n+ o.write(json.dumps(yaml_data, indent=intent))\n except: # noqa\n print(\"Invalid input enter a valid json file name e.g. example.json\")\n convert_to_json(yaml_data)\n \n \n-yaml_data = get_yaml_data()\n-convert_to_json(yaml_data)\n+def main():\n+ parser = argparse.ArgumentParser(description='Convert YAML file to JSON')\n+ parser.add_argument('--yaml', type=str, help='YAML filename')\n+ parser.add_argument('--json', type=str, help='JSON filename')\n+ parser.add_argument('--intent', type=int, help=\"intent value for JSON\")\n+ args = parser.parse_args()\n \n-print(\"Your yaml file has been converted and saved as json\")\n+ yaml_name = args.yaml\n+ json_name = args.json\n+ intent = args.intent\n+\n+ yaml_data = get_yaml_data(yaml_name)\n+ convert_to_json(yaml_data, json_name, intent=intent)\n+\n+ print(\"Your yaml file has been converted and saved as json\")\n+\n+\n+if __name__ == \"__main__\":\n+ main()\n", "issue": "yaml_to_json add command line interface\n**Describe the bug**\r\nyaml_to_json currently only allow to enter filename. It is not convenient and cannot be used with bash autocomplete \r\n\r\n**To Reproduce**\r\n\r\n**Expected behavior**\r\nApplication should accept command line arguments with filenames\r\n\r\n**Screenshots**\r\nIf applicable, add screenshots to help explain your problem.\r\n\r\n**Additional context**\r\nAdd any other context about the problem here.\n", "before_files": [{"content": "from ruyaml import YAML\nimport json\n\n\ndef get_yaml_data():\n yaml_name = input(\"Enter the yaml file name: \")\n\n try:\n with open(yaml_name, \"r+\") as f:\n yaml_data = YAML().load(f)\n return yaml_data\n except: # noqa\n print(\"Invalid input enter a valid yaml file name e.g. example.yaml\")\n yaml_data = get_yaml_data()\n\n\ndef convert_to_json(yaml_data):\n json_name = input(\"Enter the name of output json file: \")\n\n try:\n with open(json_name, \"w+\") as o:\n o.write(json.dumps(yaml_data))\n except: # noqa\n print(\"Invalid input enter a valid json file name e.g. example.json\")\n convert_to_json(yaml_data)\n\n\nyaml_data = get_yaml_data()\nconvert_to_json(yaml_data)\n\nprint(\"Your yaml file has been converted and saved as json\")\n", "path": "yaml_to_json/yaml_to_json.py"}], "after_files": [{"content": "from ruyaml import YAML\nimport argparse\nimport json\n\n\ndef get_yaml_data(yaml_name=None):\n if not yaml_name:\n yaml_name = input(\"Enter the yaml file name: \")\n\n try:\n with open(yaml_name, \"r+\") as f:\n yaml_data = YAML().load(f)\n return yaml_data\n except: # noqa\n print(\"Invalid input enter a valid yaml file name e.g. example.yaml\")\n yaml_data = get_yaml_data()\n\n\ndef convert_to_json(yaml_data, json_name=None, intent=None):\n if not json_name:\n json_name = input(\"Enter the name of output json file: \")\n\n try:\n with open(json_name, \"w+\") as o:\n o.write(json.dumps(yaml_data, indent=intent))\n except: # noqa\n print(\"Invalid input enter a valid json file name e.g. example.json\")\n convert_to_json(yaml_data)\n\n\ndef main():\n parser = argparse.ArgumentParser(description='Convert YAML file to JSON')\n parser.add_argument('--yaml', type=str, help='YAML filename')\n parser.add_argument('--json', type=str, help='JSON filename')\n parser.add_argument('--intent', type=int, help=\"intent value for JSON\")\n args = parser.parse_args()\n\n yaml_name = args.yaml\n json_name = args.json\n intent = args.intent\n\n yaml_data = get_yaml_data(yaml_name)\n convert_to_json(yaml_data, json_name, intent=intent)\n\n print(\"Your yaml file has been converted and saved as json\")\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "yaml_to_json/yaml_to_json.py"}]} | 604 | 490 |
gh_patches_debug_12937 | rasdani/github-patches | git_diff | conan-io__conan-center-index-1973 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[package] xorg/system: Can you add support for dnf package manager?
<!--
Please don't forget to update the issue title.
Include all applicable information to help us reproduce your problem.
-->
Fedora uses the dnf package manager instead of yum, although yum exists in Fedora too. Also, dnf uses the same package names as yum, so maybe you could change line 42 like this:
```python
elif tools.os_info.with_yum or tools.os_info.with_dnf:
...
```
In addition, could you also add support for `FreeBSD pkg`? I think in `pkg` this package name is just `xorg`.
### Package and Environment Details (include every applicable attribute)
* Package Name/Version: **xorg/system**
* Operating System+version: **Fedora 32**
* Compiler+version: **GCC 10**
* Conan version: **conan 1.26.0**
* Python version: **Python 3.8.3**
### Conan profile (output of `conan profile show default` or `conan profile show <profile>` if custom profile is in use)
```
arch=x86_64
arch_build=x86_64
build_type=Release
compiler=gcc
compiler.libcxx=libstdc++
compiler.version=10
os=Linux
os_build=Linux
```
### Steps to reproduce (Include if Applicable)
When I try to install xorg/system
`conan install xorg/system@ --build missing`
### Logs (Include/Attach if Applicable)
<details><summary>Click to expand log</summary>
```
Configuration:
[settings]
arch=x86_64
arch_build=x86_64
build_type=Release
compiler=gcc
compiler.libcxx=libstdc++
compiler.version=10
os=Linux
os_build=Linux
[options]
[build_requires]
[env]
Installing package: xorg/system
Requirements
xorg/system from 'conan-center' - Cache
Packages
xorg/system:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9 - Cache
Installing (downloading, building) binaries...
xorg/system: Already installed!
ERROR: xorg/system: Error in package_info() method, line 57
self._fill_cppinfo_from_pkgconfig(name)
while calling '_fill_cppinfo_from_pkgconfig', line 18
if not pkg_config.provides:
ConanException: pkg-config command ['pkg-config', '--print-provides', 'sm', '--print-errors'] failed with error: Command 'pkg-config --print-provides sm --print-errors' returned non-zero exit status 1.
Package sm was not found in the pkg-config search path.
Perhaps you should add the directory containing `sm.pc'
to the PKG_CONFIG_PATH environment variable
Package 'sm', required by 'virtual:world', not found
```
</details>
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `recipes/xorg/all/conanfile.py`
Content:
```
1 from conans import ConanFile, tools
2 from conans.errors import ConanException
3
4
5 class ConanXOrg(ConanFile):
6 name = "xorg"
7 url = "https://github.com/conan-io/conan-center-index"
8 license = "MIT"
9 homepage = "https://www.x.org/wiki/"
10 description = "The X.Org project provides an open source implementation of the X Window System."
11 settings = {"os": "Linux"}
12
13 def package_id(self):
14 self.info.header_only()
15
16 def _fill_cppinfo_from_pkgconfig(self, name):
17 pkg_config = tools.PkgConfig(name)
18 if not pkg_config.provides:
19 raise ConanException("OpenGL development files aren't available, give up")
20 libs = [lib[2:] for lib in pkg_config.libs_only_l]
21 lib_dirs = [lib[2:] for lib in pkg_config.libs_only_L]
22 ldflags = [flag for flag in pkg_config.libs_only_other]
23 include_dirs = [include[2:] for include in pkg_config.cflags_only_I]
24 cflags = [flag for flag in pkg_config.cflags_only_other if not flag.startswith("-D")]
25 defines = [flag[2:] for flag in pkg_config.cflags_only_other if flag.startswith("-D")]
26
27 self.cpp_info.system_libs.extend(libs)
28 self.cpp_info.libdirs.extend(lib_dirs)
29 self.cpp_info.sharedlinkflags.extend(ldflags)
30 self.cpp_info.exelinkflags.extend(ldflags)
31 self.cpp_info.defines.extend(defines)
32 self.cpp_info.includedirs.extend(include_dirs)
33 self.cpp_info.cflags.extend(cflags)
34 self.cpp_info.cxxflags.extend(cflags)
35
36
37 def system_requirements(self):
38 if tools.os_info.is_linux and self.settings.os == "Linux":
39 package_tool = tools.SystemPackageTool(conanfile=self, default_mode="verify")
40 if tools.os_info.with_apt:
41 packages = ["xorg-dev", "libx11-xcb-dev", "libxcb-render0-dev", "libxcb-render-util0-dev"]
42 elif tools.os_info.with_yum:
43 packages = ["xorg-x11-server-devel"]
44 elif tools.os_info.with_pacman:
45 packages = ["xorg-server-devel"]
46 elif tools.os_info.with_zypper:
47 packages = ["Xorg-x11-devel"]
48 else:
49 self.output.warn("Do not know how to install 'xorg' for {}.".format(tools.os_info.linux_distro))
50 for p in packages:
51 package_tool.install(update=True, packages=p)
52
53 def package_info(self):
54 for name in ["x11", "x11-xcb", "dmx", "fontenc", "libfs", "ice", "sm", "xau", "xaw7",
55 "xcomposite","xcursor", "xdamage", "xdmcp", "xext", "xfixes", "xft", "xi",
56 "xinerama", "xkbfile", "xmu", "xmuu", "xpm", "xrandr", "xrender", "xres",
57 "xscrnsaver", "xt", "xtst", "xv", "xvmc", "xxf86dga", "xxf86vm", "xtrans"]:
58 self._fill_cppinfo_from_pkgconfig(name)
59
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/recipes/xorg/all/conanfile.py b/recipes/xorg/all/conanfile.py
--- a/recipes/xorg/all/conanfile.py
+++ b/recipes/xorg/all/conanfile.py
@@ -39,7 +39,7 @@
package_tool = tools.SystemPackageTool(conanfile=self, default_mode="verify")
if tools.os_info.with_apt:
packages = ["xorg-dev", "libx11-xcb-dev", "libxcb-render0-dev", "libxcb-render-util0-dev"]
- elif tools.os_info.with_yum:
+ elif tools.os_info.with_yum or tools.os_info.with_dnf:
packages = ["xorg-x11-server-devel"]
elif tools.os_info.with_pacman:
packages = ["xorg-server-devel"]
| {"golden_diff": "diff --git a/recipes/xorg/all/conanfile.py b/recipes/xorg/all/conanfile.py\n--- a/recipes/xorg/all/conanfile.py\n+++ b/recipes/xorg/all/conanfile.py\n@@ -39,7 +39,7 @@\n package_tool = tools.SystemPackageTool(conanfile=self, default_mode=\"verify\")\n if tools.os_info.with_apt:\n packages = [\"xorg-dev\", \"libx11-xcb-dev\", \"libxcb-render0-dev\", \"libxcb-render-util0-dev\"]\n- elif tools.os_info.with_yum:\n+ elif tools.os_info.with_yum or tools.os_info.with_dnf:\n packages = [\"xorg-x11-server-devel\"]\n elif tools.os_info.with_pacman:\n packages = [\"xorg-server-devel\"]\n", "issue": "[package] xorg/system: Can you add support for dnf package manager?\n<!-- \r\n Please don't forget to update the issue title.\r\n Include all applicable information to help us reproduce your problem.\r\n-->\r\n\r\nFedora uses the dnf package manager instead of yum, although yum exists in Fedora too. Also, dnf uses the same package names as yum, so maybe you could change line 42 like this:\r\n```python\r\nelif tools.os_info.with_yum or tools.os_info.with_dnf:\r\n ...\r\n```\r\nIn addition, could you also add support for `FreeBSD pkg`? I think in `pkg` this package name is just `xorg`.\r\n\r\n### Package and Environment Details (include every applicable attribute)\r\n * Package Name/Version: **xorg/system**\r\n * Operating System+version: **Fedora 32**\r\n * Compiler+version: **GCC 10**\r\n * Conan version: **conan 1.26.0**\r\n * Python version: **Python 3.8.3**\r\n\r\n\r\n### Conan profile (output of `conan profile show default` or `conan profile show <profile>` if custom profile is in use)\r\n```\r\narch=x86_64\r\narch_build=x86_64\r\nbuild_type=Release\r\ncompiler=gcc\r\ncompiler.libcxx=libstdc++\r\ncompiler.version=10\r\nos=Linux\r\nos_build=Linux\r\n```\r\n\r\n\r\n### Steps to reproduce (Include if Applicable)\r\nWhen I try to install xorg/system\r\n`conan install xorg/system@ --build missing`\r\n\r\n\r\n### Logs (Include/Attach if Applicable)\r\n<details><summary>Click to expand log</summary>\r\n\r\n```\r\nConfiguration:\r\n[settings]\r\narch=x86_64\r\narch_build=x86_64\r\nbuild_type=Release\r\ncompiler=gcc\r\ncompiler.libcxx=libstdc++\r\ncompiler.version=10\r\nos=Linux\r\nos_build=Linux\r\n[options]\r\n[build_requires]\r\n[env]\r\n\r\nInstalling package: xorg/system\r\nRequirements\r\n xorg/system from 'conan-center' - Cache\r\nPackages\r\n xorg/system:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9 - Cache\r\n\r\nInstalling (downloading, building) binaries...\r\nxorg/system: Already installed!\r\nERROR: xorg/system: Error in package_info() method, line 57\r\n\tself._fill_cppinfo_from_pkgconfig(name)\r\nwhile calling '_fill_cppinfo_from_pkgconfig', line 18\r\n\tif not pkg_config.provides:\r\n\tConanException: pkg-config command ['pkg-config', '--print-provides', 'sm', '--print-errors'] failed with error: Command 'pkg-config --print-provides sm --print-errors' returned non-zero exit status 1.\r\nPackage sm was not found in the pkg-config search path.\r\nPerhaps you should add the directory containing `sm.pc'\r\nto the PKG_CONFIG_PATH environment variable\r\nPackage 'sm', required by 'virtual:world', not found\r\n```\r\n\r\n</details>\r\n\n", "before_files": [{"content": "from conans import ConanFile, tools\nfrom conans.errors import ConanException\n\n\nclass ConanXOrg(ConanFile):\n name = \"xorg\"\n url = \"https://github.com/conan-io/conan-center-index\"\n license = \"MIT\"\n homepage = \"https://www.x.org/wiki/\"\n description = \"The X.Org project provides an open source implementation of the X Window System.\"\n settings = {\"os\": \"Linux\"}\n\n def package_id(self):\n self.info.header_only()\n\n def _fill_cppinfo_from_pkgconfig(self, name):\n pkg_config = tools.PkgConfig(name)\n if not pkg_config.provides:\n raise ConanException(\"OpenGL development files aren't available, give up\")\n libs = [lib[2:] for lib in pkg_config.libs_only_l]\n lib_dirs = [lib[2:] for lib in pkg_config.libs_only_L]\n ldflags = [flag for flag in pkg_config.libs_only_other]\n include_dirs = [include[2:] for include in pkg_config.cflags_only_I]\n cflags = [flag for flag in pkg_config.cflags_only_other if not flag.startswith(\"-D\")]\n defines = [flag[2:] for flag in pkg_config.cflags_only_other if flag.startswith(\"-D\")]\n\n self.cpp_info.system_libs.extend(libs)\n self.cpp_info.libdirs.extend(lib_dirs)\n self.cpp_info.sharedlinkflags.extend(ldflags)\n self.cpp_info.exelinkflags.extend(ldflags)\n self.cpp_info.defines.extend(defines)\n self.cpp_info.includedirs.extend(include_dirs)\n self.cpp_info.cflags.extend(cflags)\n self.cpp_info.cxxflags.extend(cflags)\n\n\n def system_requirements(self):\n if tools.os_info.is_linux and self.settings.os == \"Linux\":\n package_tool = tools.SystemPackageTool(conanfile=self, default_mode=\"verify\")\n if tools.os_info.with_apt:\n packages = [\"xorg-dev\", \"libx11-xcb-dev\", \"libxcb-render0-dev\", \"libxcb-render-util0-dev\"]\n elif tools.os_info.with_yum:\n packages = [\"xorg-x11-server-devel\"]\n elif tools.os_info.with_pacman:\n packages = [\"xorg-server-devel\"]\n elif tools.os_info.with_zypper:\n packages = [\"Xorg-x11-devel\"]\n else:\n self.output.warn(\"Do not know how to install 'xorg' for {}.\".format(tools.os_info.linux_distro))\n for p in packages:\n package_tool.install(update=True, packages=p)\n\n def package_info(self):\n for name in [\"x11\", \"x11-xcb\", \"dmx\", \"fontenc\", \"libfs\", \"ice\", \"sm\", \"xau\", \"xaw7\",\n \"xcomposite\",\"xcursor\", \"xdamage\", \"xdmcp\", \"xext\", \"xfixes\", \"xft\", \"xi\",\n \"xinerama\", \"xkbfile\", \"xmu\", \"xmuu\", \"xpm\", \"xrandr\", \"xrender\", \"xres\",\n \"xscrnsaver\", \"xt\", \"xtst\", \"xv\", \"xvmc\", \"xxf86dga\", \"xxf86vm\", \"xtrans\"]:\n self._fill_cppinfo_from_pkgconfig(name)\n", "path": "recipes/xorg/all/conanfile.py"}], "after_files": [{"content": "from conans import ConanFile, tools\nfrom conans.errors import ConanException\n\n\nclass ConanXOrg(ConanFile):\n name = \"xorg\"\n url = \"https://github.com/conan-io/conan-center-index\"\n license = \"MIT\"\n homepage = \"https://www.x.org/wiki/\"\n description = \"The X.Org project provides an open source implementation of the X Window System.\"\n settings = {\"os\": \"Linux\"}\n\n def package_id(self):\n self.info.header_only()\n\n def _fill_cppinfo_from_pkgconfig(self, name):\n pkg_config = tools.PkgConfig(name)\n if not pkg_config.provides:\n raise ConanException(\"OpenGL development files aren't available, give up\")\n libs = [lib[2:] for lib in pkg_config.libs_only_l]\n lib_dirs = [lib[2:] for lib in pkg_config.libs_only_L]\n ldflags = [flag for flag in pkg_config.libs_only_other]\n include_dirs = [include[2:] for include in pkg_config.cflags_only_I]\n cflags = [flag for flag in pkg_config.cflags_only_other if not flag.startswith(\"-D\")]\n defines = [flag[2:] for flag in pkg_config.cflags_only_other if flag.startswith(\"-D\")]\n\n self.cpp_info.system_libs.extend(libs)\n self.cpp_info.libdirs.extend(lib_dirs)\n self.cpp_info.sharedlinkflags.extend(ldflags)\n self.cpp_info.exelinkflags.extend(ldflags)\n self.cpp_info.defines.extend(defines)\n self.cpp_info.includedirs.extend(include_dirs)\n self.cpp_info.cflags.extend(cflags)\n self.cpp_info.cxxflags.extend(cflags)\n\n\n def system_requirements(self):\n if tools.os_info.is_linux and self.settings.os == \"Linux\":\n package_tool = tools.SystemPackageTool(conanfile=self, default_mode=\"verify\")\n if tools.os_info.with_apt:\n packages = [\"xorg-dev\", \"libx11-xcb-dev\", \"libxcb-render0-dev\", \"libxcb-render-util0-dev\"]\n elif tools.os_info.with_yum or tools.os_info.with_dnf:\n packages = [\"xorg-x11-server-devel\"]\n elif tools.os_info.with_pacman:\n packages = [\"xorg-server-devel\"]\n elif tools.os_info.with_zypper:\n packages = [\"Xorg-x11-devel\"]\n else:\n self.output.warn(\"Do not know how to install 'xorg' for {}.\".format(tools.os_info.linux_distro))\n for p in packages:\n package_tool.install(update=True, packages=p)\n\n def package_info(self):\n for name in [\"x11\", \"x11-xcb\", \"dmx\", \"fontenc\", \"libfs\", \"ice\", \"sm\", \"xau\", \"xaw7\",\n \"xcomposite\",\"xcursor\", \"xdamage\", \"xdmcp\", \"xext\", \"xfixes\", \"xft\", \"xi\",\n \"xinerama\", \"xkbfile\", \"xmu\", \"xmuu\", \"xpm\", \"xrandr\", \"xrender\", \"xres\",\n \"xscrnsaver\", \"xt\", \"xtst\", \"xv\", \"xvmc\", \"xxf86dga\", \"xxf86vm\", \"xtrans\"]:\n self._fill_cppinfo_from_pkgconfig(name)\n", "path": "recipes/xorg/all/conanfile.py"}]} | 1,748 | 176 |
gh_patches_debug_12110 | rasdani/github-patches | git_diff | getnikola__nikola-492 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Only render the last index page
The render_indexes plugin only renders the last index page.
To reproduce, set the 'use_in_feed' parameter to True in more than one directory of posts.
I provide the patch in a pull request (it's a little identation modification)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nikola/plugins/task_indexes.py`
Content:
```
1 # Copyright (c) 2012 Roberto Alsina y otros.
2
3 # Permission is hereby granted, free of charge, to any
4 # person obtaining a copy of this software and associated
5 # documentation files (the "Software"), to deal in the
6 # Software without restriction, including without limitation
7 # the rights to use, copy, modify, merge, publish,
8 # distribute, sublicense, and/or sell copies of the
9 # Software, and to permit persons to whom the Software is
10 # furnished to do so, subject to the following conditions:
11 #
12 # The above copyright notice and this permission notice
13 # shall be included in all copies or substantial portions of
14 # the Software.
15 #
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
17 # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
18 # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
19 # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
20 # OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
21 # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
22 # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
23 # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24
25 from __future__ import unicode_literals
26 import glob
27 import os
28
29 from nikola.plugin_categories import Task
30 from nikola.utils import config_changed
31
32
33 class Indexes(Task):
34 """Render the blog indexes."""
35
36 name = "render_indexes"
37
38 def gen_tasks(self):
39 self.site.scan_posts()
40
41 kw = {
42 "translations": self.site.config['TRANSLATIONS'],
43 "index_display_post_count":
44 self.site.config['INDEX_DISPLAY_POST_COUNT'],
45 "messages": self.site.MESSAGES,
46 "index_teasers": self.site.config['INDEX_TEASERS'],
47 "output_folder": self.site.config['OUTPUT_FOLDER'],
48 "filters": self.site.config['FILTERS'],
49 "hide_untranslated_posts": self.site.config['HIDE_UNTRANSLATED_POSTS'],
50 "indexes_title": self.site.config['INDEXES_TITLE'],
51 "indexes_pages": self.site.config['INDEXES_PAGES'],
52 "blog_title": self.site.config["BLOG_TITLE"],
53 }
54
55 template_name = "index.tmpl"
56 posts = [x for x in self.site.timeline if x.use_in_feeds]
57 if not posts:
58 yield {'basename': 'render_indexes', 'actions': []}
59 for lang in kw["translations"]:
60 # Split in smaller lists
61 lists = []
62 if kw["hide_untranslated_posts"]:
63 filtered_posts = [x for x in posts if x.is_translation_available(lang)]
64 else:
65 filtered_posts = posts
66 while filtered_posts:
67 lists.append(filtered_posts[:kw["index_display_post_count"]])
68 filtered_posts = filtered_posts[kw["index_display_post_count"]:]
69 num_pages = len(lists)
70 for i, post_list in enumerate(lists):
71 context = {}
72 indexes_title = kw['indexes_title'] or kw['blog_title']
73 if not i:
74 context["title"] = indexes_title
75 else:
76 if kw["indexes_pages"]:
77 indexes_pages = kw["indexes_pages"] % i
78 else:
79 indexes_pages = " (" + \
80 kw["messages"][lang]["old posts page %d"] % i + ")"
81 context["title"] = indexes_title + indexes_pages
82 context["prevlink"] = None
83 context["nextlink"] = None
84 context['index_teasers'] = kw['index_teasers']
85 if i > 1:
86 context["prevlink"] = "index-{0}.html".format(i - 1)
87 if i == 1:
88 context["prevlink"] = self.site.config["INDEX_FILE"]
89 if i < num_pages - 1:
90 context["nextlink"] = "index-{0}.html".format(i + 1)
91 context["permalink"] = self.site.link("index", i, lang)
92 output_name = os.path.join(
93 kw['output_folder'], self.site.path("index", i,
94 lang))
95 task = self.site.generic_post_list_renderer(
96 lang,
97 post_list,
98 output_name,
99 template_name,
100 kw['filters'],
101 context,
102 )
103 task_cfg = {1: task['uptodate'][0].config, 2: kw}
104 task['uptodate'] = [config_changed(task_cfg)]
105 task['basename'] = 'render_indexes'
106 yield task
107
108 if not self.site.config["STORY_INDEX"]:
109 return
110 kw = {
111 "translations": self.site.config['TRANSLATIONS'],
112 "post_pages": self.site.config["post_pages"],
113 "output_folder": self.site.config['OUTPUT_FOLDER'],
114 "filters": self.site.config['FILTERS'],
115 }
116 template_name = "list.tmpl"
117 for lang in kw["translations"]:
118 for wildcard, dest, _, is_post in kw["post_pages"]:
119 if is_post:
120 continue
121 context = {}
122 # vim/pyflakes thinks it's unused
123 # src_dir = os.path.dirname(wildcard)
124 files = glob.glob(wildcard)
125 post_list = [self.site.global_data[os.path.splitext(p)[0]] for
126 p in files]
127 output_name = os.path.join(kw["output_folder"],
128 self.site.path("post_path",
129 wildcard,
130 lang)).encode('utf8')
131 context["items"] = [(post.title(lang), post.permalink(lang))
132 for post in post_list]
133 task = self.site.generic_post_list_renderer(lang, post_list,
134 output_name,
135 template_name,
136 kw['filters'],
137 context)
138 task_cfg = {1: task['uptodate'][0].config, 2: kw}
139 task['uptodate'] = [config_changed(task_cfg)]
140 task['basename'] = self.name
141 yield task
142
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/nikola/plugins/task_indexes.py b/nikola/plugins/task_indexes.py
--- a/nikola/plugins/task_indexes.py
+++ b/nikola/plugins/task_indexes.py
@@ -135,7 +135,7 @@
template_name,
kw['filters'],
context)
- task_cfg = {1: task['uptodate'][0].config, 2: kw}
- task['uptodate'] = [config_changed(task_cfg)]
- task['basename'] = self.name
- yield task
+ task_cfg = {1: task['uptodate'][0].config, 2: kw}
+ task['uptodate'] = [config_changed(task_cfg)]
+ task['basename'] = self.name
+ yield task
| {"golden_diff": "diff --git a/nikola/plugins/task_indexes.py b/nikola/plugins/task_indexes.py\n--- a/nikola/plugins/task_indexes.py\n+++ b/nikola/plugins/task_indexes.py\n@@ -135,7 +135,7 @@\n template_name,\n kw['filters'],\n context)\n- task_cfg = {1: task['uptodate'][0].config, 2: kw}\n- task['uptodate'] = [config_changed(task_cfg)]\n- task['basename'] = self.name\n- yield task\n+ task_cfg = {1: task['uptodate'][0].config, 2: kw}\n+ task['uptodate'] = [config_changed(task_cfg)]\n+ task['basename'] = self.name\n+ yield task\n", "issue": "Only render the last index page\nThe render_indexes plugin only renders the last index page.\n\nTo reproduce, set the 'use_in_feed' parameter to True in more than one directory of posts.\n\nI provide the patch in a pull request (it's a little identation modification)\n\n", "before_files": [{"content": "# Copyright (c) 2012 Roberto Alsina y otros.\n\n# Permission is hereby granted, free of charge, to any\n# person obtaining a copy of this software and associated\n# documentation files (the \"Software\"), to deal in the\n# Software without restriction, including without limitation\n# the rights to use, copy, modify, merge, publish,\n# distribute, sublicense, and/or sell copies of the\n# Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice\n# shall be included in all copies or substantial portions of\n# the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY\n# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE\n# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR\n# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS\n# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR\n# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\nfrom __future__ import unicode_literals\nimport glob\nimport os\n\nfrom nikola.plugin_categories import Task\nfrom nikola.utils import config_changed\n\n\nclass Indexes(Task):\n \"\"\"Render the blog indexes.\"\"\"\n\n name = \"render_indexes\"\n\n def gen_tasks(self):\n self.site.scan_posts()\n\n kw = {\n \"translations\": self.site.config['TRANSLATIONS'],\n \"index_display_post_count\":\n self.site.config['INDEX_DISPLAY_POST_COUNT'],\n \"messages\": self.site.MESSAGES,\n \"index_teasers\": self.site.config['INDEX_TEASERS'],\n \"output_folder\": self.site.config['OUTPUT_FOLDER'],\n \"filters\": self.site.config['FILTERS'],\n \"hide_untranslated_posts\": self.site.config['HIDE_UNTRANSLATED_POSTS'],\n \"indexes_title\": self.site.config['INDEXES_TITLE'],\n \"indexes_pages\": self.site.config['INDEXES_PAGES'],\n \"blog_title\": self.site.config[\"BLOG_TITLE\"],\n }\n\n template_name = \"index.tmpl\"\n posts = [x for x in self.site.timeline if x.use_in_feeds]\n if not posts:\n yield {'basename': 'render_indexes', 'actions': []}\n for lang in kw[\"translations\"]:\n # Split in smaller lists\n lists = []\n if kw[\"hide_untranslated_posts\"]:\n filtered_posts = [x for x in posts if x.is_translation_available(lang)]\n else:\n filtered_posts = posts\n while filtered_posts:\n lists.append(filtered_posts[:kw[\"index_display_post_count\"]])\n filtered_posts = filtered_posts[kw[\"index_display_post_count\"]:]\n num_pages = len(lists)\n for i, post_list in enumerate(lists):\n context = {}\n indexes_title = kw['indexes_title'] or kw['blog_title']\n if not i:\n context[\"title\"] = indexes_title\n else:\n if kw[\"indexes_pages\"]:\n indexes_pages = kw[\"indexes_pages\"] % i\n else:\n indexes_pages = \" (\" + \\\n kw[\"messages\"][lang][\"old posts page %d\"] % i + \")\"\n context[\"title\"] = indexes_title + indexes_pages\n context[\"prevlink\"] = None\n context[\"nextlink\"] = None\n context['index_teasers'] = kw['index_teasers']\n if i > 1:\n context[\"prevlink\"] = \"index-{0}.html\".format(i - 1)\n if i == 1:\n context[\"prevlink\"] = self.site.config[\"INDEX_FILE\"]\n if i < num_pages - 1:\n context[\"nextlink\"] = \"index-{0}.html\".format(i + 1)\n context[\"permalink\"] = self.site.link(\"index\", i, lang)\n output_name = os.path.join(\n kw['output_folder'], self.site.path(\"index\", i,\n lang))\n task = self.site.generic_post_list_renderer(\n lang,\n post_list,\n output_name,\n template_name,\n kw['filters'],\n context,\n )\n task_cfg = {1: task['uptodate'][0].config, 2: kw}\n task['uptodate'] = [config_changed(task_cfg)]\n task['basename'] = 'render_indexes'\n yield task\n\n if not self.site.config[\"STORY_INDEX\"]:\n return\n kw = {\n \"translations\": self.site.config['TRANSLATIONS'],\n \"post_pages\": self.site.config[\"post_pages\"],\n \"output_folder\": self.site.config['OUTPUT_FOLDER'],\n \"filters\": self.site.config['FILTERS'],\n }\n template_name = \"list.tmpl\"\n for lang in kw[\"translations\"]:\n for wildcard, dest, _, is_post in kw[\"post_pages\"]:\n if is_post:\n continue\n context = {}\n # vim/pyflakes thinks it's unused\n # src_dir = os.path.dirname(wildcard)\n files = glob.glob(wildcard)\n post_list = [self.site.global_data[os.path.splitext(p)[0]] for\n p in files]\n output_name = os.path.join(kw[\"output_folder\"],\n self.site.path(\"post_path\",\n wildcard,\n lang)).encode('utf8')\n context[\"items\"] = [(post.title(lang), post.permalink(lang))\n for post in post_list]\n task = self.site.generic_post_list_renderer(lang, post_list,\n output_name,\n template_name,\n kw['filters'],\n context)\n task_cfg = {1: task['uptodate'][0].config, 2: kw}\n task['uptodate'] = [config_changed(task_cfg)]\n task['basename'] = self.name\n yield task\n", "path": "nikola/plugins/task_indexes.py"}], "after_files": [{"content": "# Copyright (c) 2012 Roberto Alsina y otros.\n\n# Permission is hereby granted, free of charge, to any\n# person obtaining a copy of this software and associated\n# documentation files (the \"Software\"), to deal in the\n# Software without restriction, including without limitation\n# the rights to use, copy, modify, merge, publish,\n# distribute, sublicense, and/or sell copies of the\n# Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice\n# shall be included in all copies or substantial portions of\n# the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY\n# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE\n# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR\n# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS\n# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR\n# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\nfrom __future__ import unicode_literals\nimport glob\nimport os\n\nfrom nikola.plugin_categories import Task\nfrom nikola.utils import config_changed\n\n\nclass Indexes(Task):\n \"\"\"Render the blog indexes.\"\"\"\n\n name = \"render_indexes\"\n\n def gen_tasks(self):\n self.site.scan_posts()\n\n kw = {\n \"translations\": self.site.config['TRANSLATIONS'],\n \"index_display_post_count\":\n self.site.config['INDEX_DISPLAY_POST_COUNT'],\n \"messages\": self.site.MESSAGES,\n \"index_teasers\": self.site.config['INDEX_TEASERS'],\n \"output_folder\": self.site.config['OUTPUT_FOLDER'],\n \"filters\": self.site.config['FILTERS'],\n \"hide_untranslated_posts\": self.site.config['HIDE_UNTRANSLATED_POSTS'],\n \"indexes_title\": self.site.config['INDEXES_TITLE'],\n \"indexes_pages\": self.site.config['INDEXES_PAGES'],\n \"blog_title\": self.site.config[\"BLOG_TITLE\"],\n }\n\n template_name = \"index.tmpl\"\n posts = [x for x in self.site.timeline if x.use_in_feeds]\n if not posts:\n yield {'basename': 'render_indexes', 'actions': []}\n for lang in kw[\"translations\"]:\n # Split in smaller lists\n lists = []\n if kw[\"hide_untranslated_posts\"]:\n filtered_posts = [x for x in posts if x.is_translation_available(lang)]\n else:\n filtered_posts = posts\n while filtered_posts:\n lists.append(filtered_posts[:kw[\"index_display_post_count\"]])\n filtered_posts = filtered_posts[kw[\"index_display_post_count\"]:]\n num_pages = len(lists)\n for i, post_list in enumerate(lists):\n context = {}\n indexes_title = kw['indexes_title'] or kw['blog_title']\n if not i:\n context[\"title\"] = indexes_title\n else:\n if kw[\"indexes_pages\"]:\n indexes_pages = kw[\"indexes_pages\"] % i\n else:\n indexes_pages = \" (\" + \\\n kw[\"messages\"][lang][\"old posts page %d\"] % i + \")\"\n context[\"title\"] = indexes_title + indexes_pages\n context[\"prevlink\"] = None\n context[\"nextlink\"] = None\n context['index_teasers'] = kw['index_teasers']\n if i > 1:\n context[\"prevlink\"] = \"index-{0}.html\".format(i - 1)\n if i == 1:\n context[\"prevlink\"] = self.site.config[\"INDEX_FILE\"]\n if i < num_pages - 1:\n context[\"nextlink\"] = \"index-{0}.html\".format(i + 1)\n context[\"permalink\"] = self.site.link(\"index\", i, lang)\n output_name = os.path.join(\n kw['output_folder'], self.site.path(\"index\", i,\n lang))\n task = self.site.generic_post_list_renderer(\n lang,\n post_list,\n output_name,\n template_name,\n kw['filters'],\n context,\n )\n task_cfg = {1: task['uptodate'][0].config, 2: kw}\n task['uptodate'] = [config_changed(task_cfg)]\n task['basename'] = 'render_indexes'\n yield task\n\n if not self.site.config[\"STORY_INDEX\"]:\n return\n kw = {\n \"translations\": self.site.config['TRANSLATIONS'],\n \"post_pages\": self.site.config[\"post_pages\"],\n \"output_folder\": self.site.config['OUTPUT_FOLDER'],\n \"filters\": self.site.config['FILTERS'],\n }\n template_name = \"list.tmpl\"\n for lang in kw[\"translations\"]:\n for wildcard, dest, _, is_post in kw[\"post_pages\"]:\n if is_post:\n continue\n context = {}\n # vim/pyflakes thinks it's unused\n # src_dir = os.path.dirname(wildcard)\n files = glob.glob(wildcard)\n post_list = [self.site.global_data[os.path.splitext(p)[0]] for\n p in files]\n output_name = os.path.join(kw[\"output_folder\"],\n self.site.path(\"post_path\",\n wildcard,\n lang)).encode('utf8')\n context[\"items\"] = [(post.title(lang), post.permalink(lang))\n for post in post_list]\n task = self.site.generic_post_list_renderer(lang, post_list,\n output_name,\n template_name,\n kw['filters'],\n context)\n task_cfg = {1: task['uptodate'][0].config, 2: kw}\n task['uptodate'] = [config_changed(task_cfg)]\n task['basename'] = self.name\n yield task\n", "path": "nikola/plugins/task_indexes.py"}]} | 1,871 | 168 |
gh_patches_debug_29497 | rasdani/github-patches | git_diff | python__peps-2533 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Waste less vertical space at top of rendered PEP
This is about usability of peps rendered on peps.python.org.
At the top of a PEP (e.g. https://peps.python.org/pep-0687/) there's a table with metadata. Most of that I ignore or is even duplicate (the title). I usually have to scroll right past that to the Abstract. Maybe the metadata could be collapsed, like the ToC? Or moved to the sidebar, like the ToC?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pep_sphinx_extensions/pep_processor/transforms/pep_title.py`
Content:
```
1 from pathlib import Path
2
3 from docutils import nodes
4 from docutils import transforms
5 from docutils import utils
6 from docutils.parsers.rst import roles
7 from docutils.parsers.rst import states
8
9
10 class PEPTitle(transforms.Transform):
11 """Add PEP title and organise document hierarchy."""
12
13 # needs to run before docutils.transforms.frontmatter.DocInfo and after
14 # pep_processor.transforms.pep_title.PEPTitle
15 default_priority = 335
16
17 def apply(self) -> None:
18 if not Path(self.document["source"]).match("pep-*"):
19 return # not a PEP file, exit early
20
21 # Directory to hold the PEP's RFC2822 header details, to extract a title string
22 pep_header_details = {}
23
24 # Iterate through the header fields, which are the first section of the document
25 for field in self.document[0]:
26 # Hold details of the attribute's tag against its details
27 row_attributes = {sub.tagname: sub.rawsource for sub in field}
28 pep_header_details[row_attributes["field_name"]] = row_attributes["field_body"]
29
30 # We only need the PEP number and title
31 if pep_header_details.keys() >= {"PEP", "Title"}:
32 break
33
34 # Create the title string for the PEP
35 pep_number = int(pep_header_details["PEP"])
36 pep_title = pep_header_details["Title"]
37 pep_title_string = f"PEP {pep_number} -- {pep_title}" # double hyphen for en dash
38
39 # Generate the title section node and its properties
40 title_nodes = _line_to_nodes(pep_title_string)
41 pep_title_node = nodes.section("", nodes.title("", "", *title_nodes, classes=["page-title"]), names=["pep-content"])
42
43 # Insert the title node as the root element, move children down
44 document_children = self.document.children
45 self.document.children = [pep_title_node]
46 pep_title_node.extend(document_children)
47 self.document.note_implicit_target(pep_title_node, pep_title_node)
48
49
50 def _line_to_nodes(text: str) -> list[nodes.Node]:
51 """Parse RST string to nodes."""
52 document = utils.new_document("<inline-rst>")
53 document.settings.pep_references = document.settings.rfc_references = False # patch settings
54 states.RSTStateMachine(state_classes=states.state_classes, initial_state="Body").run([text], document) # do parsing
55 roles._roles.pop("", None) # restore the "default" default role after parsing a document
56 return document[0].children
57
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pep_sphinx_extensions/pep_processor/transforms/pep_title.py b/pep_sphinx_extensions/pep_processor/transforms/pep_title.py
--- a/pep_sphinx_extensions/pep_processor/transforms/pep_title.py
+++ b/pep_sphinx_extensions/pep_processor/transforms/pep_title.py
@@ -22,13 +22,19 @@
pep_header_details = {}
# Iterate through the header fields, which are the first section of the document
+ desired_fields = {"PEP", "Title"}
+ fields_to_remove = []
for field in self.document[0]:
# Hold details of the attribute's tag against its details
row_attributes = {sub.tagname: sub.rawsource for sub in field}
pep_header_details[row_attributes["field_name"]] = row_attributes["field_body"]
+ # Store the redundant fields in the table for removal
+ if row_attributes["field_name"] in desired_fields:
+ fields_to_remove.append(field)
+
# We only need the PEP number and title
- if pep_header_details.keys() >= {"PEP", "Title"}:
+ if pep_header_details.keys() >= desired_fields:
break
# Create the title string for the PEP
@@ -46,6 +52,10 @@
pep_title_node.extend(document_children)
self.document.note_implicit_target(pep_title_node, pep_title_node)
+ # Remove the now-redundant fields
+ for field in fields_to_remove:
+ field.parent.remove(field)
+
def _line_to_nodes(text: str) -> list[nodes.Node]:
"""Parse RST string to nodes."""
| {"golden_diff": "diff --git a/pep_sphinx_extensions/pep_processor/transforms/pep_title.py b/pep_sphinx_extensions/pep_processor/transforms/pep_title.py\n--- a/pep_sphinx_extensions/pep_processor/transforms/pep_title.py\n+++ b/pep_sphinx_extensions/pep_processor/transforms/pep_title.py\n@@ -22,13 +22,19 @@\n pep_header_details = {}\n \n # Iterate through the header fields, which are the first section of the document\n+ desired_fields = {\"PEP\", \"Title\"}\n+ fields_to_remove = []\n for field in self.document[0]:\n # Hold details of the attribute's tag against its details\n row_attributes = {sub.tagname: sub.rawsource for sub in field}\n pep_header_details[row_attributes[\"field_name\"]] = row_attributes[\"field_body\"]\n \n+ # Store the redundant fields in the table for removal\n+ if row_attributes[\"field_name\"] in desired_fields:\n+ fields_to_remove.append(field)\n+\n # We only need the PEP number and title\n- if pep_header_details.keys() >= {\"PEP\", \"Title\"}:\n+ if pep_header_details.keys() >= desired_fields:\n break\n \n # Create the title string for the PEP\n@@ -46,6 +52,10 @@\n pep_title_node.extend(document_children)\n self.document.note_implicit_target(pep_title_node, pep_title_node)\n \n+ # Remove the now-redundant fields\n+ for field in fields_to_remove:\n+ field.parent.remove(field)\n+\n \n def _line_to_nodes(text: str) -> list[nodes.Node]:\n \"\"\"Parse RST string to nodes.\"\"\"\n", "issue": "Waste less vertical space at top of rendered PEP\nThis is about usability of peps rendered on peps.python.org.\r\n\r\nAt the top of a PEP (e.g. https://peps.python.org/pep-0687/) there's a table with metadata. Most of that I ignore or is even duplicate (the title). I usually have to scroll right past that to the Abstract. Maybe the metadata could be collapsed, like the ToC? Or moved to the sidebar, like the ToC?\n", "before_files": [{"content": "from pathlib import Path\n\nfrom docutils import nodes\nfrom docutils import transforms\nfrom docutils import utils\nfrom docutils.parsers.rst import roles\nfrom docutils.parsers.rst import states\n\n\nclass PEPTitle(transforms.Transform):\n \"\"\"Add PEP title and organise document hierarchy.\"\"\"\n\n # needs to run before docutils.transforms.frontmatter.DocInfo and after\n # pep_processor.transforms.pep_title.PEPTitle\n default_priority = 335\n\n def apply(self) -> None:\n if not Path(self.document[\"source\"]).match(\"pep-*\"):\n return # not a PEP file, exit early\n\n # Directory to hold the PEP's RFC2822 header details, to extract a title string\n pep_header_details = {}\n\n # Iterate through the header fields, which are the first section of the document\n for field in self.document[0]:\n # Hold details of the attribute's tag against its details\n row_attributes = {sub.tagname: sub.rawsource for sub in field}\n pep_header_details[row_attributes[\"field_name\"]] = row_attributes[\"field_body\"]\n\n # We only need the PEP number and title\n if pep_header_details.keys() >= {\"PEP\", \"Title\"}:\n break\n\n # Create the title string for the PEP\n pep_number = int(pep_header_details[\"PEP\"])\n pep_title = pep_header_details[\"Title\"]\n pep_title_string = f\"PEP {pep_number} -- {pep_title}\" # double hyphen for en dash\n\n # Generate the title section node and its properties\n title_nodes = _line_to_nodes(pep_title_string)\n pep_title_node = nodes.section(\"\", nodes.title(\"\", \"\", *title_nodes, classes=[\"page-title\"]), names=[\"pep-content\"])\n\n # Insert the title node as the root element, move children down\n document_children = self.document.children\n self.document.children = [pep_title_node]\n pep_title_node.extend(document_children)\n self.document.note_implicit_target(pep_title_node, pep_title_node)\n\n\ndef _line_to_nodes(text: str) -> list[nodes.Node]:\n \"\"\"Parse RST string to nodes.\"\"\"\n document = utils.new_document(\"<inline-rst>\")\n document.settings.pep_references = document.settings.rfc_references = False # patch settings\n states.RSTStateMachine(state_classes=states.state_classes, initial_state=\"Body\").run([text], document) # do parsing\n roles._roles.pop(\"\", None) # restore the \"default\" default role after parsing a document\n return document[0].children\n", "path": "pep_sphinx_extensions/pep_processor/transforms/pep_title.py"}], "after_files": [{"content": "from pathlib import Path\n\nfrom docutils import nodes\nfrom docutils import transforms\nfrom docutils import utils\nfrom docutils.parsers.rst import roles\nfrom docutils.parsers.rst import states\n\n\nclass PEPTitle(transforms.Transform):\n \"\"\"Add PEP title and organise document hierarchy.\"\"\"\n\n # needs to run before docutils.transforms.frontmatter.DocInfo and after\n # pep_processor.transforms.pep_title.PEPTitle\n default_priority = 335\n\n def apply(self) -> None:\n if not Path(self.document[\"source\"]).match(\"pep-*\"):\n return # not a PEP file, exit early\n\n # Directory to hold the PEP's RFC2822 header details, to extract a title string\n pep_header_details = {}\n\n # Iterate through the header fields, which are the first section of the document\n desired_fields = {\"PEP\", \"Title\"}\n fields_to_remove = []\n for field in self.document[0]:\n # Hold details of the attribute's tag against its details\n row_attributes = {sub.tagname: sub.rawsource for sub in field}\n pep_header_details[row_attributes[\"field_name\"]] = row_attributes[\"field_body\"]\n\n # Store the redundant fields in the table for removal\n if row_attributes[\"field_name\"] in desired_fields:\n fields_to_remove.append(field)\n\n # We only need the PEP number and title\n if pep_header_details.keys() >= desired_fields:\n break\n\n # Create the title string for the PEP\n pep_number = int(pep_header_details[\"PEP\"])\n pep_title = pep_header_details[\"Title\"]\n pep_title_string = f\"PEP {pep_number} -- {pep_title}\" # double hyphen for en dash\n\n # Generate the title section node and its properties\n title_nodes = _line_to_nodes(pep_title_string)\n pep_title_node = nodes.section(\"\", nodes.title(\"\", \"\", *title_nodes, classes=[\"page-title\"]), names=[\"pep-content\"])\n\n # Insert the title node as the root element, move children down\n document_children = self.document.children\n self.document.children = [pep_title_node]\n pep_title_node.extend(document_children)\n self.document.note_implicit_target(pep_title_node, pep_title_node)\n\n # Remove the now-redundant fields\n for field in fields_to_remove:\n field.parent.remove(field)\n\n\ndef _line_to_nodes(text: str) -> list[nodes.Node]:\n \"\"\"Parse RST string to nodes.\"\"\"\n document = utils.new_document(\"<inline-rst>\")\n document.settings.pep_references = document.settings.rfc_references = False # patch settings\n states.RSTStateMachine(state_classes=states.state_classes, initial_state=\"Body\").run([text], document) # do parsing\n roles._roles.pop(\"\", None) # restore the \"default\" default role after parsing a document\n return document[0].children\n", "path": "pep_sphinx_extensions/pep_processor/transforms/pep_title.py"}]} | 1,046 | 376 |
gh_patches_debug_13421 | rasdani/github-patches | git_diff | napari__napari-2413 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Points layer allows panning when in Add mode
## 🐛 Bug
When in the `Add Points` mode on a points layer panning is not prevented. This can make it tricky to label a point as you have to hold the mouse perfectly still.

In contrast the `pick` mode of a labels layer prevents panning.
## To Reproduce
1. Open napari
2. Add a points layer
3. Move mouse around and then click
Alternatively:
`python examples/nD_points.py`
## Expected behavior
Clicking when in `Add Points` mode will not allow panning, the same as when when in `Pick` mode for a labels layer.
## Environment
```
napari: 0.4.7.dev14+gc473058
Platform: Linux-5.8.0-7630-generic-x86_64-with-glibc2.31
System: Pop!_OS 20.04 LTS
Python: 3.9.1 | packaged by conda-forge | (default, Jan 26 2021, 01:34:10) [GCC 9.3.0]
Qt: 5.15.2
PyQt5: 5.15.2
NumPy: 1.19.5
SciPy: 1.6.0
Dask: 2021.01.1
VisPy: 0.6.6
OpenGL:
- GL version: 4.6 (Compatibility Profile) Mesa 20.2.6
- MAX_TEXTURE_SIZE: 16384
Screens:
- screen 1: resolution 1920x1080, scale 1.0
Plugins:
- console: 0.0.3
- svg: 0.1.4
```
## Additional context
<!-- Add any other context about the problem here. -->
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `napari/layers/points/_points_mouse_bindings.py`
Content:
```
1 import numpy as np
2
3 from ._points_utils import points_in_box
4
5
6 def select(layer, event):
7 """Select points.
8
9 Clicking on a point will select that point. If holding shift while clicking
10 that point will be added to or removed from the existing selection
11 depending on whether it is selected or not.
12
13 Clicking and dragging a point that is already selected will drag all the
14 currently selected points.
15
16 Clicking and dragging on an empty part of the canvas (i.e. not on a point)
17 will create a drag box that will select all points inside it when finished.
18 Holding shift throughout the entirety of this process will add those points
19 to any existing selection, otherwise these will become the only selected
20 points.
21 """
22 # on press
23 modify_selection = (
24 'Shift' in event.modifiers or 'Control' in event.modifiers
25 )
26
27 # Get value under the cursor, for points, this is the index of the highlighted
28 # if any, or None.
29 value = layer.get_value(event.position, world=True)
30 # if modifying selection add / remove any from existing selection
31 if modify_selection:
32 if value is not None:
33 layer.selected_data = _toggle_selected(layer.selected_data, value)
34 else:
35 if value is not None:
36 # If the current index is not in the current list make it the only
37 # index selected, otherwise don't change the selection so that
38 # the current selection can be dragged together.
39 if value not in layer.selected_data:
40 layer.selected_data = {value}
41 else:
42 layer.selected_data = set()
43 layer._set_highlight()
44
45 yield
46
47 # on move
48 while event.type == 'mouse_move':
49 coordinates = layer.world_to_data(event.position)
50 # If not holding modifying selection and points selected then drag them
51 if not modify_selection and len(layer.selected_data) > 0:
52 layer._move(layer.selected_data, coordinates)
53 else:
54 coord = [coordinates[i] for i in layer._dims_displayed]
55 layer._is_selecting = True
56 if layer._drag_start is None:
57 layer._drag_start = coord
58 layer._drag_box = np.array([layer._drag_start, coord])
59 layer._set_highlight()
60 yield
61
62 # on release
63 layer._drag_start = None
64 if layer._is_selecting:
65 layer._is_selecting = False
66 if len(layer._view_data) > 0:
67 selection = points_in_box(
68 layer._drag_box, layer._view_data, layer._view_size
69 )
70 # If shift combine drag selection with existing selected ones
71 if modify_selection:
72 new_selected = layer._indices_view[selection]
73 target = set(layer.selected_data).symmetric_difference(
74 set(new_selected)
75 )
76 layer.selected_data = list(target)
77 else:
78 layer.selected_data = layer._indices_view[selection]
79 else:
80 layer.selected_data = set()
81 layer._set_highlight(force=True)
82
83
84 def add(layer, event):
85 """Add a new point at the clicked position."""
86 # on press
87 dragged = False
88 yield
89
90 # on move
91 while event.type == 'mouse_move':
92 dragged = True
93 yield
94
95 # on release
96 if not dragged:
97 coordinates = layer.world_to_data(event.position)
98 layer.add(coordinates)
99
100
101 def highlight(layer, event):
102 """Highlight hovered points."""
103 layer._set_highlight()
104
105
106 def _toggle_selected(selected_data, value):
107 """Add or remove value from the selected data set.
108
109 Parameters
110 ----------
111 selected_data : set
112 Set of selected data points to be modified.
113 value : int
114 Index of point to add or remove from selected data set.
115
116 Returns
117 -------
118 set
119 Modified selected_data set.
120 """
121 if value in selected_data:
122 selected_data.remove(value)
123 else:
124 selected_data.add(value)
125
126 return selected_data
127
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/napari/layers/points/_points_mouse_bindings.py b/napari/layers/points/_points_mouse_bindings.py
--- a/napari/layers/points/_points_mouse_bindings.py
+++ b/napari/layers/points/_points_mouse_bindings.py
@@ -81,19 +81,20 @@
layer._set_highlight(force=True)
+DRAG_DIST_THRESHOLD = 5
+
+
def add(layer, event):
"""Add a new point at the clicked position."""
- # on press
- dragged = False
- yield
- # on move
- while event.type == 'mouse_move':
- dragged = True
+ if event.type == 'mouse_press':
+ start_pos = event.pos
+
+ while event.type != 'mouse_release':
yield
- # on release
- if not dragged:
+ dist = np.linalg.norm(start_pos - event.pos)
+ if dist < DRAG_DIST_THRESHOLD:
coordinates = layer.world_to_data(event.position)
layer.add(coordinates)
| {"golden_diff": "diff --git a/napari/layers/points/_points_mouse_bindings.py b/napari/layers/points/_points_mouse_bindings.py\n--- a/napari/layers/points/_points_mouse_bindings.py\n+++ b/napari/layers/points/_points_mouse_bindings.py\n@@ -81,19 +81,20 @@\n layer._set_highlight(force=True)\n \n \n+DRAG_DIST_THRESHOLD = 5\n+\n+\n def add(layer, event):\n \"\"\"Add a new point at the clicked position.\"\"\"\n- # on press\n- dragged = False\n- yield\n \n- # on move\n- while event.type == 'mouse_move':\n- dragged = True\n+ if event.type == 'mouse_press':\n+ start_pos = event.pos\n+\n+ while event.type != 'mouse_release':\n yield\n \n- # on release\n- if not dragged:\n+ dist = np.linalg.norm(start_pos - event.pos)\n+ if dist < DRAG_DIST_THRESHOLD:\n coordinates = layer.world_to_data(event.position)\n layer.add(coordinates)\n", "issue": "Points layer allows panning when in Add mode\n## \ud83d\udc1b Bug\r\nWhen in the `Add Points` mode on a points layer panning is not prevented. This can make it tricky to label a point as you have to hold the mouse perfectly still.\r\n\r\n\r\n\r\nIn contrast the `pick` mode of a labels layer prevents panning.\r\n\r\n## To Reproduce\r\n1. Open napari\r\n2. Add a points layer\r\n3. Move mouse around and then click\r\n\r\nAlternatively:\r\n`python examples/nD_points.py`\r\n\r\n## Expected behavior\r\nClicking when in `Add Points` mode will not allow panning, the same as when when in `Pick` mode for a labels layer.\r\n\r\n\r\n## Environment\r\n\r\n```\r\nnapari: 0.4.7.dev14+gc473058\r\nPlatform: Linux-5.8.0-7630-generic-x86_64-with-glibc2.31\r\nSystem: Pop!_OS 20.04 LTS\r\nPython: 3.9.1 | packaged by conda-forge | (default, Jan 26 2021, 01:34:10) [GCC 9.3.0]\r\nQt: 5.15.2\r\nPyQt5: 5.15.2\r\nNumPy: 1.19.5\r\nSciPy: 1.6.0\r\nDask: 2021.01.1\r\nVisPy: 0.6.6\r\n\r\nOpenGL:\r\n- GL version: 4.6 (Compatibility Profile) Mesa 20.2.6\r\n- MAX_TEXTURE_SIZE: 16384\r\n\r\nScreens:\r\n- screen 1: resolution 1920x1080, scale 1.0\r\n\r\nPlugins:\r\n- console: 0.0.3\r\n- svg: 0.1.4\r\n```\r\n\r\n## Additional context\r\n\r\n<!-- Add any other context about the problem here. -->\r\n\n", "before_files": [{"content": "import numpy as np\n\nfrom ._points_utils import points_in_box\n\n\ndef select(layer, event):\n \"\"\"Select points.\n\n Clicking on a point will select that point. If holding shift while clicking\n that point will be added to or removed from the existing selection\n depending on whether it is selected or not.\n\n Clicking and dragging a point that is already selected will drag all the\n currently selected points.\n\n Clicking and dragging on an empty part of the canvas (i.e. not on a point)\n will create a drag box that will select all points inside it when finished.\n Holding shift throughout the entirety of this process will add those points\n to any existing selection, otherwise these will become the only selected\n points.\n \"\"\"\n # on press\n modify_selection = (\n 'Shift' in event.modifiers or 'Control' in event.modifiers\n )\n\n # Get value under the cursor, for points, this is the index of the highlighted\n # if any, or None.\n value = layer.get_value(event.position, world=True)\n # if modifying selection add / remove any from existing selection\n if modify_selection:\n if value is not None:\n layer.selected_data = _toggle_selected(layer.selected_data, value)\n else:\n if value is not None:\n # If the current index is not in the current list make it the only\n # index selected, otherwise don't change the selection so that\n # the current selection can be dragged together.\n if value not in layer.selected_data:\n layer.selected_data = {value}\n else:\n layer.selected_data = set()\n layer._set_highlight()\n\n yield\n\n # on move\n while event.type == 'mouse_move':\n coordinates = layer.world_to_data(event.position)\n # If not holding modifying selection and points selected then drag them\n if not modify_selection and len(layer.selected_data) > 0:\n layer._move(layer.selected_data, coordinates)\n else:\n coord = [coordinates[i] for i in layer._dims_displayed]\n layer._is_selecting = True\n if layer._drag_start is None:\n layer._drag_start = coord\n layer._drag_box = np.array([layer._drag_start, coord])\n layer._set_highlight()\n yield\n\n # on release\n layer._drag_start = None\n if layer._is_selecting:\n layer._is_selecting = False\n if len(layer._view_data) > 0:\n selection = points_in_box(\n layer._drag_box, layer._view_data, layer._view_size\n )\n # If shift combine drag selection with existing selected ones\n if modify_selection:\n new_selected = layer._indices_view[selection]\n target = set(layer.selected_data).symmetric_difference(\n set(new_selected)\n )\n layer.selected_data = list(target)\n else:\n layer.selected_data = layer._indices_view[selection]\n else:\n layer.selected_data = set()\n layer._set_highlight(force=True)\n\n\ndef add(layer, event):\n \"\"\"Add a new point at the clicked position.\"\"\"\n # on press\n dragged = False\n yield\n\n # on move\n while event.type == 'mouse_move':\n dragged = True\n yield\n\n # on release\n if not dragged:\n coordinates = layer.world_to_data(event.position)\n layer.add(coordinates)\n\n\ndef highlight(layer, event):\n \"\"\"Highlight hovered points.\"\"\"\n layer._set_highlight()\n\n\ndef _toggle_selected(selected_data, value):\n \"\"\"Add or remove value from the selected data set.\n\n Parameters\n ----------\n selected_data : set\n Set of selected data points to be modified.\n value : int\n Index of point to add or remove from selected data set.\n\n Returns\n -------\n set\n Modified selected_data set.\n \"\"\"\n if value in selected_data:\n selected_data.remove(value)\n else:\n selected_data.add(value)\n\n return selected_data\n", "path": "napari/layers/points/_points_mouse_bindings.py"}], "after_files": [{"content": "import numpy as np\n\nfrom ._points_utils import points_in_box\n\n\ndef select(layer, event):\n \"\"\"Select points.\n\n Clicking on a point will select that point. If holding shift while clicking\n that point will be added to or removed from the existing selection\n depending on whether it is selected or not.\n\n Clicking and dragging a point that is already selected will drag all the\n currently selected points.\n\n Clicking and dragging on an empty part of the canvas (i.e. not on a point)\n will create a drag box that will select all points inside it when finished.\n Holding shift throughout the entirety of this process will add those points\n to any existing selection, otherwise these will become the only selected\n points.\n \"\"\"\n # on press\n modify_selection = (\n 'Shift' in event.modifiers or 'Control' in event.modifiers\n )\n\n # Get value under the cursor, for points, this is the index of the highlighted\n # if any, or None.\n value = layer.get_value(event.position, world=True)\n # if modifying selection add / remove any from existing selection\n if modify_selection:\n if value is not None:\n layer.selected_data = _toggle_selected(layer.selected_data, value)\n else:\n if value is not None:\n # If the current index is not in the current list make it the only\n # index selected, otherwise don't change the selection so that\n # the current selection can be dragged together.\n if value not in layer.selected_data:\n layer.selected_data = {value}\n else:\n layer.selected_data = set()\n layer._set_highlight()\n\n yield\n\n # on move\n while event.type == 'mouse_move':\n coordinates = layer.world_to_data(event.position)\n # If not holding modifying selection and points selected then drag them\n if not modify_selection and len(layer.selected_data) > 0:\n layer._move(layer.selected_data, coordinates)\n else:\n coord = [coordinates[i] for i in layer._dims_displayed]\n layer._is_selecting = True\n if layer._drag_start is None:\n layer._drag_start = coord\n layer._drag_box = np.array([layer._drag_start, coord])\n layer._set_highlight()\n yield\n\n # on release\n layer._drag_start = None\n if layer._is_selecting:\n layer._is_selecting = False\n if len(layer._view_data) > 0:\n selection = points_in_box(\n layer._drag_box, layer._view_data, layer._view_size\n )\n # If shift combine drag selection with existing selected ones\n if modify_selection:\n new_selected = layer._indices_view[selection]\n target = set(layer.selected_data).symmetric_difference(\n set(new_selected)\n )\n layer.selected_data = list(target)\n else:\n layer.selected_data = layer._indices_view[selection]\n else:\n layer.selected_data = set()\n layer._set_highlight(force=True)\n\n\nDRAG_DIST_THRESHOLD = 5\n\n\ndef add(layer, event):\n \"\"\"Add a new point at the clicked position.\"\"\"\n\n if event.type == 'mouse_press':\n start_pos = event.pos\n\n while event.type != 'mouse_release':\n yield\n\n dist = np.linalg.norm(start_pos - event.pos)\n if dist < DRAG_DIST_THRESHOLD:\n coordinates = layer.world_to_data(event.position)\n layer.add(coordinates)\n\n\ndef highlight(layer, event):\n \"\"\"Highlight hovered points.\"\"\"\n layer._set_highlight()\n\n\ndef _toggle_selected(selected_data, value):\n \"\"\"Add or remove value from the selected data set.\n\n Parameters\n ----------\n selected_data : set\n Set of selected data points to be modified.\n value : int\n Index of point to add or remove from selected data set.\n\n Returns\n -------\n set\n Modified selected_data set.\n \"\"\"\n if value in selected_data:\n selected_data.remove(value)\n else:\n selected_data.add(value)\n\n return selected_data\n", "path": "napari/layers/points/_points_mouse_bindings.py"}]} | 1,904 | 231 |
gh_patches_debug_4941 | rasdani/github-patches | git_diff | Parsl__parsl-1156 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Monitoring should be an optional extra
We should push this out as 0.8.1. This is needed by the nersc DESC stack.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 from setuptools import setup, find_packages
2
3 with open('parsl/version.py') as f:
4 exec(f.read())
5
6 with open('requirements.txt') as f:
7 install_requires = f.readlines()
8
9 extras_require = {
10 'aws' : ['boto3'],
11 'kubernetes' : ['kubernetes'],
12 'oauth_ssh' : ['oauth-ssh>=0.9'],
13 'extreme_scale' : ['mpi4py'],
14 'docs' : ['nbsphinx', 'sphinx_rtd_theme'],
15 'google_cloud' : ['google-auth', 'google-api-python-client'],
16 'gssapi' : ['python-gssapi'],
17 'azure' : ['azure', 'msrestazure'],
18 'workqueue': ['work_queue'],
19 }
20 extras_require['all'] = sum(extras_require.values(), [])
21
22 setup(
23 name='parsl',
24 version=VERSION,
25 description='Simple data dependent workflows in Python',
26 long_description='Simple parallel workflows system for Python',
27 url='https://github.com/Parsl/parsl',
28 author='The Parsl Team',
29 author_email='[email protected]',
30 license='Apache 2.0',
31 download_url='https://github.com/Parsl/parsl/archive/{}.tar.gz'.format(VERSION),
32 include_package_data=True,
33 packages=find_packages(),
34 install_requires=install_requires,
35 scripts = ['parsl/executors/high_throughput/process_worker_pool.py',
36 'parsl/executors/extreme_scale/mpi_worker_pool.py',
37 'parsl/executors/low_latency/lowlatency_worker.py',
38 'parsl/executors/workqueue/workqueue_worker.py',
39 ],
40
41 extras_require=extras_require,
42 classifiers=[
43 # Maturity
44 'Development Status :: 3 - Alpha',
45 # Intended audience
46 'Intended Audience :: Developers',
47 # Licence, must match with licence above
48 'License :: OSI Approved :: Apache Software License',
49 # Python versions supported
50 'Programming Language :: Python :: 3.5',
51 'Programming Language :: Python :: 3.6',
52 ],
53 keywords=['Workflows', 'Scientific computing'],
54 entry_points={'console_scripts':
55 [
56 'parsl-globus-auth=parsl.data_provider.globus:cli_run',
57 'parsl-visualize=parsl.monitoring.visualization.app:cli_run',
58 ]}
59 )
60
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -7,6 +7,17 @@
install_requires = f.readlines()
extras_require = {
+ 'monitoring' : [
+ 'sqlalchemy>=1.3.0,!=1.3.4',
+ 'sqlalchemy_utils',
+ 'pydot',
+ 'networkx',
+ 'Flask>=1.0.2',
+ 'flask_sqlalchemy',
+ 'pandas',
+ 'plotly',
+ 'python-daemon'
+ ],
'aws' : ['boto3'],
'kubernetes' : ['kubernetes'],
'oauth_ssh' : ['oauth-ssh>=0.9'],
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -7,6 +7,17 @@\n install_requires = f.readlines()\n \n extras_require = {\n+ 'monitoring' : [\n+ 'sqlalchemy>=1.3.0,!=1.3.4',\n+ 'sqlalchemy_utils',\n+ 'pydot',\n+ 'networkx',\n+ 'Flask>=1.0.2',\n+ 'flask_sqlalchemy',\n+ 'pandas',\n+ 'plotly',\n+ 'python-daemon'\n+ ],\n 'aws' : ['boto3'],\n 'kubernetes' : ['kubernetes'],\n 'oauth_ssh' : ['oauth-ssh>=0.9'],\n", "issue": "Monitoring should be an optional extra\nWe should push this out as 0.8.1. This is needed by the nersc DESC stack.\n", "before_files": [{"content": "from setuptools import setup, find_packages\n\nwith open('parsl/version.py') as f:\n exec(f.read())\n\nwith open('requirements.txt') as f:\n install_requires = f.readlines()\n\nextras_require = {\n 'aws' : ['boto3'],\n 'kubernetes' : ['kubernetes'],\n 'oauth_ssh' : ['oauth-ssh>=0.9'],\n 'extreme_scale' : ['mpi4py'],\n 'docs' : ['nbsphinx', 'sphinx_rtd_theme'],\n 'google_cloud' : ['google-auth', 'google-api-python-client'],\n 'gssapi' : ['python-gssapi'],\n 'azure' : ['azure', 'msrestazure'],\n 'workqueue': ['work_queue'],\n}\nextras_require['all'] = sum(extras_require.values(), [])\n\nsetup(\n name='parsl',\n version=VERSION,\n description='Simple data dependent workflows in Python',\n long_description='Simple parallel workflows system for Python',\n url='https://github.com/Parsl/parsl',\n author='The Parsl Team',\n author_email='[email protected]',\n license='Apache 2.0',\n download_url='https://github.com/Parsl/parsl/archive/{}.tar.gz'.format(VERSION),\n include_package_data=True,\n packages=find_packages(),\n install_requires=install_requires,\n scripts = ['parsl/executors/high_throughput/process_worker_pool.py',\n 'parsl/executors/extreme_scale/mpi_worker_pool.py',\n 'parsl/executors/low_latency/lowlatency_worker.py',\n 'parsl/executors/workqueue/workqueue_worker.py',\n ],\n\n extras_require=extras_require,\n classifiers=[\n # Maturity\n 'Development Status :: 3 - Alpha',\n # Intended audience\n 'Intended Audience :: Developers',\n # Licence, must match with licence above\n 'License :: OSI Approved :: Apache Software License',\n # Python versions supported\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n ],\n keywords=['Workflows', 'Scientific computing'],\n entry_points={'console_scripts':\n [\n 'parsl-globus-auth=parsl.data_provider.globus:cli_run',\n 'parsl-visualize=parsl.monitoring.visualization.app:cli_run',\n ]}\n)\n", "path": "setup.py"}], "after_files": [{"content": "from setuptools import setup, find_packages\n\nwith open('parsl/version.py') as f:\n exec(f.read())\n\nwith open('requirements.txt') as f:\n install_requires = f.readlines()\n\nextras_require = {\n 'monitoring' : [\n 'sqlalchemy>=1.3.0,!=1.3.4',\n 'sqlalchemy_utils',\n 'pydot',\n 'networkx',\n 'Flask>=1.0.2',\n 'flask_sqlalchemy',\n 'pandas',\n 'plotly',\n 'python-daemon'\n ],\n 'aws' : ['boto3'],\n 'kubernetes' : ['kubernetes'],\n 'oauth_ssh' : ['oauth-ssh>=0.9'],\n 'extreme_scale' : ['mpi4py'],\n 'docs' : ['nbsphinx', 'sphinx_rtd_theme'],\n 'google_cloud' : ['google-auth', 'google-api-python-client'],\n 'gssapi' : ['python-gssapi'],\n 'azure' : ['azure', 'msrestazure'],\n 'workqueue': ['work_queue'],\n}\nextras_require['all'] = sum(extras_require.values(), [])\n\nsetup(\n name='parsl',\n version=VERSION,\n description='Simple data dependent workflows in Python',\n long_description='Simple parallel workflows system for Python',\n url='https://github.com/Parsl/parsl',\n author='The Parsl Team',\n author_email='[email protected]',\n license='Apache 2.0',\n download_url='https://github.com/Parsl/parsl/archive/{}.tar.gz'.format(VERSION),\n include_package_data=True,\n packages=find_packages(),\n install_requires=install_requires,\n scripts = ['parsl/executors/high_throughput/process_worker_pool.py',\n 'parsl/executors/extreme_scale/mpi_worker_pool.py',\n 'parsl/executors/low_latency/lowlatency_worker.py',\n 'parsl/executors/workqueue/workqueue_worker.py',\n ],\n\n extras_require=extras_require,\n classifiers=[\n # Maturity\n 'Development Status :: 3 - Alpha',\n # Intended audience\n 'Intended Audience :: Developers',\n # Licence, must match with licence above\n 'License :: OSI Approved :: Apache Software License',\n # Python versions supported\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n ],\n keywords=['Workflows', 'Scientific computing'],\n entry_points={'console_scripts':\n [\n 'parsl-globus-auth=parsl.data_provider.globus:cli_run',\n 'parsl-visualize=parsl.monitoring.visualization.app:cli_run',\n ]}\n)\n", "path": "setup.py"}]} | 909 | 166 |
gh_patches_debug_2799 | rasdani/github-patches | git_diff | kartoza__prj.app-199 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Atom feed gets a 500 error
See http://sentry.kartoza.com/kartoza/projecta-live/group/5846/
Problem is cause by models/entry.py get_absolute_url() method which still uses old slug system to resolve path to an entry.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `django_project/changes/models/entry.py`
Content:
```
1 # coding=utf-8
2 """Models for changelog entries."""
3 from django.core.urlresolvers import reverse
4 from django.utils.text import slugify
5 import os
6 import logging
7 from core.settings.contrib import STOP_WORDS
8 from django.conf.global_settings import MEDIA_ROOT
9 from django.db import models
10 from embed_video.fields import EmbedVideoField
11 from django.contrib.auth.models import User
12
13 logger = logging.getLogger(__name__)
14
15
16 class ApprovedEntryManager(models.Manager):
17 """Custom entry manager that shows only approved records."""
18
19 def get_queryset(self):
20 """Query set generator"""
21 return super(
22 ApprovedEntryManager, self).get_queryset().filter(
23 approved=True)
24
25
26 class UnapprovedEntryManager(models.Manager):
27 """Custom entry manager that shows only unapproved records."""
28
29 def get_queryset(self):
30 """Query set generator"""
31 return super(
32 UnapprovedEntryManager, self).get_queryset().filter(
33 approved=False)
34
35
36 class Entry(models.Model):
37 """An entry is the basic unit of a changelog."""
38 title = models.CharField(
39 help_text='Feature title for this changelog entry.',
40 max_length=255,
41 null=False,
42 blank=False,
43 unique=False) # Unique together rule applies in meta class
44
45 description = models.TextField(
46 null=True,
47 blank=True,
48 help_text='Describe the new feature. Markdown is supported.')
49
50 image_file = models.ImageField(
51 help_text=(
52 'A image that is related to this visual changelog entry. '
53 'Most browsers support dragging the image directly on to the '
54 '"Choose File" button above.'),
55 upload_to=os.path.join(MEDIA_ROOT, 'images/entries'),
56 blank=True)
57
58 image_credits = models.CharField(
59 help_text='Who should be credited for this image?',
60 max_length=255,
61 null=True,
62 blank=True)
63
64 video = EmbedVideoField(
65 verbose_name='Youtube video',
66 help_text='Paste your youtube video link',
67 null=True,
68 blank=True)
69
70 funded_by = models.CharField(
71 help_text='Input the funder name.',
72 max_length=255,
73 null=True,
74 blank=True)
75
76 funder_url = models.CharField(
77 help_text='Input the funder URL.',
78 max_length=255,
79 null=True,
80 blank=True)
81
82 developed_by = models.CharField(
83 help_text='Input the developer name.',
84 max_length=255,
85 null=True,
86 blank=True)
87
88 developer_url = models.CharField(
89 help_text='Input the developer URL.',
90 max_length=255,
91 null=True,
92 blank=True)
93
94 approved = models.BooleanField(
95 help_text=(
96 'Whether this entry has been approved for use by the '
97 'project owner.'),
98 default=False
99 )
100 author = models.ForeignKey(User)
101 slug = models.SlugField()
102 # noinspection PyUnresolvedReferences
103 version = models.ForeignKey('Version')
104 # noinspection PyUnresolvedReferences
105 category = models.ForeignKey('Category')
106 objects = models.Manager()
107 approved_objects = ApprovedEntryManager()
108 unapproved_objects = UnapprovedEntryManager()
109
110 # noinspection PyClassicStyleClass
111 class Meta:
112 """Meta options for the version class."""
113 unique_together = (
114 ('title', 'version', 'category'),
115 ('version', 'slug'),
116 )
117 app_label = 'changes'
118
119 def save(self, *args, **kwargs):
120 if not self.pk:
121 words = self.title.split()
122 filtered_words = [t for t in words if t.lower() not in STOP_WORDS]
123 new_list = ' '.join(filtered_words)
124 self.slug = slugify(new_list)[:50]
125 super(Entry, self).save(*args, **kwargs)
126
127 def __unicode__(self):
128 return u'%s' % self.title
129
130 def get_absolute_url(self):
131 return reverse('entry-detail', kwargs={
132 'slug': self.slug,
133 'version_slug': self.version.slug,
134 'project_slug': self.version.project.slug
135 })
136
137 def funder_info_html(self):
138 string = ""
139 if self.funded_by and self.funder_url is None:
140 string = ""
141 return string
142 elif self.funded_by and not self.funder_url:
143 string = "This feature was funded by %s " % self.funded_by
144 return string
145 elif self.funder_url and not self.funded_by:
146 string = "This feature was funded by [%s](%s)" % (
147 self.funder_url, self.funder_url)
148 return string
149 elif self.funded_by and self.funder_url:
150 string = "This feature was funded by [%s](%s)" % (
151 self.funded_by, self.funder_url)
152 return string
153 else:
154 return string
155
156 def developer_info_html(self):
157 string = ""
158 if self.developed_by and self.developer_url is None:
159 string = ""
160 return string
161 elif self.developed_by and not self.developer_url:
162 string = "This feature was developed by %s " % self.developed_by
163 return string
164 elif self.developer_url and not self.developed_by:
165 string = "This feature was developed by [%s](%s)" % (
166 self.developer_url, self.developer_url)
167 return string
168 elif self.developed_by and self.developer_url:
169 string = "This feature was developed by [%s](%s)" % (
170 self.developed_by, self.developer_url)
171 return string
172 else:
173 return string
174
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/django_project/changes/models/entry.py b/django_project/changes/models/entry.py
--- a/django_project/changes/models/entry.py
+++ b/django_project/changes/models/entry.py
@@ -129,9 +129,7 @@
def get_absolute_url(self):
return reverse('entry-detail', kwargs={
- 'slug': self.slug,
- 'version_slug': self.version.slug,
- 'project_slug': self.version.project.slug
+ 'pk': self.id
})
def funder_info_html(self):
| {"golden_diff": "diff --git a/django_project/changes/models/entry.py b/django_project/changes/models/entry.py\n--- a/django_project/changes/models/entry.py\n+++ b/django_project/changes/models/entry.py\n@@ -129,9 +129,7 @@\n \n def get_absolute_url(self):\n return reverse('entry-detail', kwargs={\n- 'slug': self.slug,\n- 'version_slug': self.version.slug,\n- 'project_slug': self.version.project.slug\n+ 'pk': self.id\n })\n \n def funder_info_html(self):\n", "issue": "Atom feed gets a 500 error\nSee http://sentry.kartoza.com/kartoza/projecta-live/group/5846/\n\nProblem is cause by models/entry.py get_absolute_url() method which still uses old slug system to resolve path to an entry.\n\n", "before_files": [{"content": "# coding=utf-8\n\"\"\"Models for changelog entries.\"\"\"\nfrom django.core.urlresolvers import reverse\nfrom django.utils.text import slugify\nimport os\nimport logging\nfrom core.settings.contrib import STOP_WORDS\nfrom django.conf.global_settings import MEDIA_ROOT\nfrom django.db import models\nfrom embed_video.fields import EmbedVideoField\nfrom django.contrib.auth.models import User\n\nlogger = logging.getLogger(__name__)\n\n\nclass ApprovedEntryManager(models.Manager):\n \"\"\"Custom entry manager that shows only approved records.\"\"\"\n\n def get_queryset(self):\n \"\"\"Query set generator\"\"\"\n return super(\n ApprovedEntryManager, self).get_queryset().filter(\n approved=True)\n\n\nclass UnapprovedEntryManager(models.Manager):\n \"\"\"Custom entry manager that shows only unapproved records.\"\"\"\n\n def get_queryset(self):\n \"\"\"Query set generator\"\"\"\n return super(\n UnapprovedEntryManager, self).get_queryset().filter(\n approved=False)\n\n\nclass Entry(models.Model):\n \"\"\"An entry is the basic unit of a changelog.\"\"\"\n title = models.CharField(\n help_text='Feature title for this changelog entry.',\n max_length=255,\n null=False,\n blank=False,\n unique=False) # Unique together rule applies in meta class\n\n description = models.TextField(\n null=True,\n blank=True,\n help_text='Describe the new feature. Markdown is supported.')\n\n image_file = models.ImageField(\n help_text=(\n 'A image that is related to this visual changelog entry. '\n 'Most browsers support dragging the image directly on to the '\n '\"Choose File\" button above.'),\n upload_to=os.path.join(MEDIA_ROOT, 'images/entries'),\n blank=True)\n\n image_credits = models.CharField(\n help_text='Who should be credited for this image?',\n max_length=255,\n null=True,\n blank=True)\n\n video = EmbedVideoField(\n verbose_name='Youtube video',\n help_text='Paste your youtube video link',\n null=True,\n blank=True)\n\n funded_by = models.CharField(\n help_text='Input the funder name.',\n max_length=255,\n null=True,\n blank=True)\n\n funder_url = models.CharField(\n help_text='Input the funder URL.',\n max_length=255,\n null=True,\n blank=True)\n\n developed_by = models.CharField(\n help_text='Input the developer name.',\n max_length=255,\n null=True,\n blank=True)\n\n developer_url = models.CharField(\n help_text='Input the developer URL.',\n max_length=255,\n null=True,\n blank=True)\n\n approved = models.BooleanField(\n help_text=(\n 'Whether this entry has been approved for use by the '\n 'project owner.'),\n default=False\n )\n author = models.ForeignKey(User)\n slug = models.SlugField()\n # noinspection PyUnresolvedReferences\n version = models.ForeignKey('Version')\n # noinspection PyUnresolvedReferences\n category = models.ForeignKey('Category')\n objects = models.Manager()\n approved_objects = ApprovedEntryManager()\n unapproved_objects = UnapprovedEntryManager()\n\n # noinspection PyClassicStyleClass\n class Meta:\n \"\"\"Meta options for the version class.\"\"\"\n unique_together = (\n ('title', 'version', 'category'),\n ('version', 'slug'),\n )\n app_label = 'changes'\n\n def save(self, *args, **kwargs):\n if not self.pk:\n words = self.title.split()\n filtered_words = [t for t in words if t.lower() not in STOP_WORDS]\n new_list = ' '.join(filtered_words)\n self.slug = slugify(new_list)[:50]\n super(Entry, self).save(*args, **kwargs)\n\n def __unicode__(self):\n return u'%s' % self.title\n\n def get_absolute_url(self):\n return reverse('entry-detail', kwargs={\n 'slug': self.slug,\n 'version_slug': self.version.slug,\n 'project_slug': self.version.project.slug\n })\n\n def funder_info_html(self):\n string = \"\"\n if self.funded_by and self.funder_url is None:\n string = \"\"\n return string\n elif self.funded_by and not self.funder_url:\n string = \"This feature was funded by %s \" % self.funded_by\n return string\n elif self.funder_url and not self.funded_by:\n string = \"This feature was funded by [%s](%s)\" % (\n self.funder_url, self.funder_url)\n return string\n elif self.funded_by and self.funder_url:\n string = \"This feature was funded by [%s](%s)\" % (\n self.funded_by, self.funder_url)\n return string\n else:\n return string\n\n def developer_info_html(self):\n string = \"\"\n if self.developed_by and self.developer_url is None:\n string = \"\"\n return string\n elif self.developed_by and not self.developer_url:\n string = \"This feature was developed by %s \" % self.developed_by\n return string\n elif self.developer_url and not self.developed_by:\n string = \"This feature was developed by [%s](%s)\" % (\n self.developer_url, self.developer_url)\n return string\n elif self.developed_by and self.developer_url:\n string = \"This feature was developed by [%s](%s)\" % (\n self.developed_by, self.developer_url)\n return string\n else:\n return string\n", "path": "django_project/changes/models/entry.py"}], "after_files": [{"content": "# coding=utf-8\n\"\"\"Models for changelog entries.\"\"\"\nfrom django.core.urlresolvers import reverse\nfrom django.utils.text import slugify\nimport os\nimport logging\nfrom core.settings.contrib import STOP_WORDS\nfrom django.conf.global_settings import MEDIA_ROOT\nfrom django.db import models\nfrom embed_video.fields import EmbedVideoField\nfrom django.contrib.auth.models import User\n\nlogger = logging.getLogger(__name__)\n\n\nclass ApprovedEntryManager(models.Manager):\n \"\"\"Custom entry manager that shows only approved records.\"\"\"\n\n def get_queryset(self):\n \"\"\"Query set generator\"\"\"\n return super(\n ApprovedEntryManager, self).get_queryset().filter(\n approved=True)\n\n\nclass UnapprovedEntryManager(models.Manager):\n \"\"\"Custom entry manager that shows only unapproved records.\"\"\"\n\n def get_queryset(self):\n \"\"\"Query set generator\"\"\"\n return super(\n UnapprovedEntryManager, self).get_queryset().filter(\n approved=False)\n\n\nclass Entry(models.Model):\n \"\"\"An entry is the basic unit of a changelog.\"\"\"\n title = models.CharField(\n help_text='Feature title for this changelog entry.',\n max_length=255,\n null=False,\n blank=False,\n unique=False) # Unique together rule applies in meta class\n\n description = models.TextField(\n null=True,\n blank=True,\n help_text='Describe the new feature. Markdown is supported.')\n\n image_file = models.ImageField(\n help_text=(\n 'A image that is related to this visual changelog entry. '\n 'Most browsers support dragging the image directly on to the '\n '\"Choose File\" button above.'),\n upload_to=os.path.join(MEDIA_ROOT, 'images/entries'),\n blank=True)\n\n image_credits = models.CharField(\n help_text='Who should be credited for this image?',\n max_length=255,\n null=True,\n blank=True)\n\n video = EmbedVideoField(\n verbose_name='Youtube video',\n help_text='Paste your youtube video link',\n null=True,\n blank=True)\n\n funded_by = models.CharField(\n help_text='Input the funder name.',\n max_length=255,\n null=True,\n blank=True)\n\n funder_url = models.CharField(\n help_text='Input the funder URL.',\n max_length=255,\n null=True,\n blank=True)\n\n developed_by = models.CharField(\n help_text='Input the developer name.',\n max_length=255,\n null=True,\n blank=True)\n\n developer_url = models.CharField(\n help_text='Input the developer URL.',\n max_length=255,\n null=True,\n blank=True)\n\n approved = models.BooleanField(\n help_text=(\n 'Whether this entry has been approved for use by the '\n 'project owner.'),\n default=False\n )\n author = models.ForeignKey(User)\n slug = models.SlugField()\n # noinspection PyUnresolvedReferences\n version = models.ForeignKey('Version')\n # noinspection PyUnresolvedReferences\n category = models.ForeignKey('Category')\n objects = models.Manager()\n approved_objects = ApprovedEntryManager()\n unapproved_objects = UnapprovedEntryManager()\n\n # noinspection PyClassicStyleClass\n class Meta:\n \"\"\"Meta options for the version class.\"\"\"\n unique_together = (\n ('title', 'version', 'category'),\n ('version', 'slug'),\n )\n app_label = 'changes'\n\n def save(self, *args, **kwargs):\n if not self.pk:\n words = self.title.split()\n filtered_words = [t for t in words if t.lower() not in STOP_WORDS]\n new_list = ' '.join(filtered_words)\n self.slug = slugify(new_list)[:50]\n super(Entry, self).save(*args, **kwargs)\n\n def __unicode__(self):\n return u'%s' % self.title\n\n def get_absolute_url(self):\n return reverse('entry-detail', kwargs={\n 'pk': self.id\n })\n\n def funder_info_html(self):\n string = \"\"\n if self.funded_by and self.funder_url is None:\n string = \"\"\n return string\n elif self.funded_by and not self.funder_url:\n string = \"This feature was funded by %s \" % self.funded_by\n return string\n elif self.funder_url and not self.funded_by:\n string = \"This feature was funded by [%s](%s)\" % (\n self.funder_url, self.funder_url)\n return string\n elif self.funded_by and self.funder_url:\n string = \"This feature was funded by [%s](%s)\" % (\n self.funded_by, self.funder_url)\n return string\n else:\n return string\n\n def developer_info_html(self):\n string = \"\"\n if self.developed_by and self.developer_url is None:\n string = \"\"\n return string\n elif self.developed_by and not self.developer_url:\n string = \"This feature was developed by %s \" % self.developed_by\n return string\n elif self.developer_url and not self.developed_by:\n string = \"This feature was developed by [%s](%s)\" % (\n self.developer_url, self.developer_url)\n return string\n elif self.developed_by and self.developer_url:\n string = \"This feature was developed by [%s](%s)\" % (\n self.developed_by, self.developer_url)\n return string\n else:\n return string\n", "path": "django_project/changes/models/entry.py"}]} | 1,938 | 127 |
gh_patches_debug_490 | rasdani/github-patches | git_diff | scikit-hep__awkward-2009 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
`ak.type` does not accept "anything `ak.to_layout` recognizes"
### Version of Awkward Array
main
### Description and code to reproduce
```python
>>> import awkward as ak
>>> ak.type([1,2,3])
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
Cell In[12], line 1
----> 1 ak.type([1,2,3])
File /lib/python3.10/site-packages/awkward/operations/ak_type.py:77, in type(array)
15 """
16 Args:
17 array: Array-like data (anything #ak.to_layout recognizes).
(...)
71 to the language.)
72 """
73 with ak._errors.OperationErrorContext(
74 "ak.type",
75 dict(array=array),
76 ):
---> 77 return _impl(array)
File /lib/python3.10/site-packages/awkward/operations/ak_type.py:144, in _impl(array)
141 return array.form.type
143 else:
--> 144 raise ak._errors.wrap_error(TypeError(f"unrecognized array type: {array!r}"))
TypeError: while calling
ak.type(
array = [1, 2, 3]
)
Error details: unrecognized array type: [1, 2, 3]
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/awkward/operations/ak_type.py`
Content:
```
1 # BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
2
3 import builtins
4 import numbers
5 from datetime import datetime, timedelta
6
7 from awkward_cpp.lib import _ext
8
9 import awkward as ak
10
11 np = ak._nplikes.NumpyMetadata.instance()
12
13
14 def type(array):
15 """
16 Args:
17 array: Array-like data (anything #ak.to_layout recognizes).
18
19 The high-level type of an `array` (many types supported, including all
20 Awkward Arrays and Records) as #ak.types.Type objects.
21
22 The high-level type ignores layout differences like
23 #ak.contents.ListArray versus #ak.contents.ListOffsetArray, but
24 not differences like "regular-sized lists" (i.e.
25 #ak.contents.RegularArray) versus "variable-sized lists" (i.e.
26 #ak.contents.ListArray and similar).
27
28 Types are rendered as [Datashape](https://datashape.readthedocs.io/)
29 strings, which makes the same distinctions.
30
31 For example,
32
33 >>> array = ak.Array([[{"x": 1.1, "y": [1]}, {"x": 2.2, "y": [2, 2]}],
34 ... [],
35 ... [{"x": 3.3, "y": [3, 3, 3]}]])
36
37 has type
38
39 >>> ak.type(array).show()
40 3 * var * {
41 x: float64,
42 y: var * int64
43 }
44
45 but
46
47 >>> array = ak.Array(np.arange(2*3*5).reshape(2, 3, 5))
48
49 has type
50
51 >>> ak.type(array).show()
52 2 * 3 * 5 * int64
53
54 Some cases, like heterogeneous data, require [extensions beyond the
55 Datashape specification](https://github.com/blaze/datashape/issues/237).
56 For example,
57
58 >>> array = ak.Array([1, "two", [3, 3, 3]])
59
60 has type
61
62 >>> ak.type(array).show()
63 3 * union[
64 int64,
65 string,
66 var * int64
67 ]
68
69 but "union" is not a Datashape type-constructor. (Its syntax is
70 similar to existing type-constructors, so it's a plausible addition
71 to the language.)
72 """
73 with ak._errors.OperationErrorContext(
74 "ak.type",
75 dict(array=array),
76 ):
77 return _impl(array)
78
79
80 def _impl(array):
81 if array is None:
82 return ak.types.UnknownType()
83
84 elif isinstance(array, np.dtype):
85 return ak.types.NumpyType(ak.types.numpytype.dtype_to_primitive(array))
86
87 elif (
88 isinstance(array, np.generic)
89 or isinstance(array, builtins.type)
90 and issubclass(array, np.generic)
91 ):
92 primitive = ak.types.numpytype.dtype_to_primitive(np.dtype(array))
93 return ak.types.NumpyType(primitive)
94
95 elif isinstance(array, bool): # np.bool_ in np.generic (above)
96 return ak.types.NumpyType("bool")
97
98 elif isinstance(array, numbers.Integral):
99 return ak.types.NumpyType("int64")
100
101 elif isinstance(array, numbers.Real):
102 return ak.types.NumpyType("float64")
103
104 elif isinstance(array, numbers.Complex):
105 return ak.types.NumpyType("complex128")
106
107 elif isinstance(array, datetime): # np.datetime64 in np.generic (above)
108 return ak.types.NumpyType("datetime64")
109
110 elif isinstance(array, timedelta): # np.timedelta64 in np.generic (above)
111 return ak.types.NumpyType("timedelta")
112
113 elif isinstance(
114 array,
115 (
116 ak.highlevel.Array,
117 ak.highlevel.Record,
118 ak.highlevel.ArrayBuilder,
119 ),
120 ):
121 return array.type
122
123 elif isinstance(array, np.ndarray):
124 if len(array.shape) == 0:
125 return _impl(array.reshape((1,))[0])
126 else:
127 primitive = ak.types.numpytype.dtype_to_primitive(array.dtype)
128 out = ak.types.NumpyType(primitive)
129 for x in array.shape[-1:0:-1]:
130 out = ak.types.RegularType(out, x)
131 return ak.types.ArrayType(out, array.shape[0])
132
133 elif isinstance(array, _ext.ArrayBuilder):
134 form = ak.forms.from_json(array.form())
135 return ak.types.ArrayType(form.type_from_behavior(None), len(array))
136
137 elif isinstance(array, ak.record.Record):
138 return array.array.form.type
139
140 elif isinstance(array, ak.contents.Content):
141 return array.form.type
142
143 else:
144 raise ak._errors.wrap_error(TypeError(f"unrecognized array type: {array!r}"))
145
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/awkward/operations/ak_type.py b/src/awkward/operations/ak_type.py
--- a/src/awkward/operations/ak_type.py
+++ b/src/awkward/operations/ak_type.py
@@ -141,4 +141,5 @@
return array.form.type
else:
- raise ak._errors.wrap_error(TypeError(f"unrecognized array type: {array!r}"))
+ layout = ak.to_layout(array, allow_other=False)
+ return _impl(ak._util.wrap(layout))
| {"golden_diff": "diff --git a/src/awkward/operations/ak_type.py b/src/awkward/operations/ak_type.py\n--- a/src/awkward/operations/ak_type.py\n+++ b/src/awkward/operations/ak_type.py\n@@ -141,4 +141,5 @@\n return array.form.type\n \n else:\n- raise ak._errors.wrap_error(TypeError(f\"unrecognized array type: {array!r}\"))\n+ layout = ak.to_layout(array, allow_other=False)\n+ return _impl(ak._util.wrap(layout))\n", "issue": "`ak.type` does not accept \"anything `ak.to_layout` recognizes\"\n### Version of Awkward Array\n\nmain\n\n### Description and code to reproduce\n\n```python\r\n>>> import awkward as ak\r\n>>> ak.type([1,2,3])\r\n\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\nCell In[12], line 1\r\n----> 1 ak.type([1,2,3])\r\n\r\nFile /lib/python3.10/site-packages/awkward/operations/ak_type.py:77, in type(array)\r\n 15 \"\"\"\r\n 16 Args:\r\n 17 array: Array-like data (anything #ak.to_layout recognizes).\r\n (...)\r\n 71 to the language.)\r\n 72 \"\"\"\r\n 73 with ak._errors.OperationErrorContext(\r\n 74 \"ak.type\",\r\n 75 dict(array=array),\r\n 76 ):\r\n---> 77 return _impl(array)\r\n\r\nFile /lib/python3.10/site-packages/awkward/operations/ak_type.py:144, in _impl(array)\r\n 141 return array.form.type\r\n 143 else:\r\n--> 144 raise ak._errors.wrap_error(TypeError(f\"unrecognized array type: {array!r}\"))\r\n\r\nTypeError: while calling\r\n\r\n ak.type(\r\n array = [1, 2, 3]\r\n )\r\n\r\nError details: unrecognized array type: [1, 2, 3]\r\n```\n", "before_files": [{"content": "# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE\n\nimport builtins\nimport numbers\nfrom datetime import datetime, timedelta\n\nfrom awkward_cpp.lib import _ext\n\nimport awkward as ak\n\nnp = ak._nplikes.NumpyMetadata.instance()\n\n\ndef type(array):\n \"\"\"\n Args:\n array: Array-like data (anything #ak.to_layout recognizes).\n\n The high-level type of an `array` (many types supported, including all\n Awkward Arrays and Records) as #ak.types.Type objects.\n\n The high-level type ignores layout differences like\n #ak.contents.ListArray versus #ak.contents.ListOffsetArray, but\n not differences like \"regular-sized lists\" (i.e.\n #ak.contents.RegularArray) versus \"variable-sized lists\" (i.e.\n #ak.contents.ListArray and similar).\n\n Types are rendered as [Datashape](https://datashape.readthedocs.io/)\n strings, which makes the same distinctions.\n\n For example,\n\n >>> array = ak.Array([[{\"x\": 1.1, \"y\": [1]}, {\"x\": 2.2, \"y\": [2, 2]}],\n ... [],\n ... [{\"x\": 3.3, \"y\": [3, 3, 3]}]])\n\n has type\n\n >>> ak.type(array).show()\n 3 * var * {\n x: float64,\n y: var * int64\n }\n\n but\n\n >>> array = ak.Array(np.arange(2*3*5).reshape(2, 3, 5))\n\n has type\n\n >>> ak.type(array).show()\n 2 * 3 * 5 * int64\n\n Some cases, like heterogeneous data, require [extensions beyond the\n Datashape specification](https://github.com/blaze/datashape/issues/237).\n For example,\n\n >>> array = ak.Array([1, \"two\", [3, 3, 3]])\n\n has type\n\n >>> ak.type(array).show()\n 3 * union[\n int64,\n string,\n var * int64\n ]\n\n but \"union\" is not a Datashape type-constructor. (Its syntax is\n similar to existing type-constructors, so it's a plausible addition\n to the language.)\n \"\"\"\n with ak._errors.OperationErrorContext(\n \"ak.type\",\n dict(array=array),\n ):\n return _impl(array)\n\n\ndef _impl(array):\n if array is None:\n return ak.types.UnknownType()\n\n elif isinstance(array, np.dtype):\n return ak.types.NumpyType(ak.types.numpytype.dtype_to_primitive(array))\n\n elif (\n isinstance(array, np.generic)\n or isinstance(array, builtins.type)\n and issubclass(array, np.generic)\n ):\n primitive = ak.types.numpytype.dtype_to_primitive(np.dtype(array))\n return ak.types.NumpyType(primitive)\n\n elif isinstance(array, bool): # np.bool_ in np.generic (above)\n return ak.types.NumpyType(\"bool\")\n\n elif isinstance(array, numbers.Integral):\n return ak.types.NumpyType(\"int64\")\n\n elif isinstance(array, numbers.Real):\n return ak.types.NumpyType(\"float64\")\n\n elif isinstance(array, numbers.Complex):\n return ak.types.NumpyType(\"complex128\")\n\n elif isinstance(array, datetime): # np.datetime64 in np.generic (above)\n return ak.types.NumpyType(\"datetime64\")\n\n elif isinstance(array, timedelta): # np.timedelta64 in np.generic (above)\n return ak.types.NumpyType(\"timedelta\")\n\n elif isinstance(\n array,\n (\n ak.highlevel.Array,\n ak.highlevel.Record,\n ak.highlevel.ArrayBuilder,\n ),\n ):\n return array.type\n\n elif isinstance(array, np.ndarray):\n if len(array.shape) == 0:\n return _impl(array.reshape((1,))[0])\n else:\n primitive = ak.types.numpytype.dtype_to_primitive(array.dtype)\n out = ak.types.NumpyType(primitive)\n for x in array.shape[-1:0:-1]:\n out = ak.types.RegularType(out, x)\n return ak.types.ArrayType(out, array.shape[0])\n\n elif isinstance(array, _ext.ArrayBuilder):\n form = ak.forms.from_json(array.form())\n return ak.types.ArrayType(form.type_from_behavior(None), len(array))\n\n elif isinstance(array, ak.record.Record):\n return array.array.form.type\n\n elif isinstance(array, ak.contents.Content):\n return array.form.type\n\n else:\n raise ak._errors.wrap_error(TypeError(f\"unrecognized array type: {array!r}\"))\n", "path": "src/awkward/operations/ak_type.py"}], "after_files": [{"content": "# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE\n\nimport builtins\nimport numbers\nfrom datetime import datetime, timedelta\n\nfrom awkward_cpp.lib import _ext\n\nimport awkward as ak\n\nnp = ak._nplikes.NumpyMetadata.instance()\n\n\ndef type(array):\n \"\"\"\n Args:\n array: Array-like data (anything #ak.to_layout recognizes).\n\n The high-level type of an `array` (many types supported, including all\n Awkward Arrays and Records) as #ak.types.Type objects.\n\n The high-level type ignores layout differences like\n #ak.contents.ListArray versus #ak.contents.ListOffsetArray, but\n not differences like \"regular-sized lists\" (i.e.\n #ak.contents.RegularArray) versus \"variable-sized lists\" (i.e.\n #ak.contents.ListArray and similar).\n\n Types are rendered as [Datashape](https://datashape.readthedocs.io/)\n strings, which makes the same distinctions.\n\n For example,\n\n >>> array = ak.Array([[{\"x\": 1.1, \"y\": [1]}, {\"x\": 2.2, \"y\": [2, 2]}],\n ... [],\n ... [{\"x\": 3.3, \"y\": [3, 3, 3]}]])\n\n has type\n\n >>> ak.type(array).show()\n 3 * var * {\n x: float64,\n y: var * int64\n }\n\n but\n\n >>> array = ak.Array(np.arange(2*3*5).reshape(2, 3, 5))\n\n has type\n\n >>> ak.type(array).show()\n 2 * 3 * 5 * int64\n\n Some cases, like heterogeneous data, require [extensions beyond the\n Datashape specification](https://github.com/blaze/datashape/issues/237).\n For example,\n\n >>> array = ak.Array([1, \"two\", [3, 3, 3]])\n\n has type\n\n >>> ak.type(array).show()\n 3 * union[\n int64,\n string,\n var * int64\n ]\n\n but \"union\" is not a Datashape type-constructor. (Its syntax is\n similar to existing type-constructors, so it's a plausible addition\n to the language.)\n \"\"\"\n with ak._errors.OperationErrorContext(\n \"ak.type\",\n dict(array=array),\n ):\n return _impl(array)\n\n\ndef _impl(array):\n if array is None:\n return ak.types.UnknownType()\n\n elif isinstance(array, np.dtype):\n return ak.types.NumpyType(ak.types.numpytype.dtype_to_primitive(array))\n\n elif (\n isinstance(array, np.generic)\n or isinstance(array, builtins.type)\n and issubclass(array, np.generic)\n ):\n primitive = ak.types.numpytype.dtype_to_primitive(np.dtype(array))\n return ak.types.NumpyType(primitive)\n\n elif isinstance(array, bool): # np.bool_ in np.generic (above)\n return ak.types.NumpyType(\"bool\")\n\n elif isinstance(array, numbers.Integral):\n return ak.types.NumpyType(\"int64\")\n\n elif isinstance(array, numbers.Real):\n return ak.types.NumpyType(\"float64\")\n\n elif isinstance(array, numbers.Complex):\n return ak.types.NumpyType(\"complex128\")\n\n elif isinstance(array, datetime): # np.datetime64 in np.generic (above)\n return ak.types.NumpyType(\"datetime64\")\n\n elif isinstance(array, timedelta): # np.timedelta64 in np.generic (above)\n return ak.types.NumpyType(\"timedelta\")\n\n elif isinstance(\n array,\n (\n ak.highlevel.Array,\n ak.highlevel.Record,\n ak.highlevel.ArrayBuilder,\n ),\n ):\n return array.type\n\n elif isinstance(array, np.ndarray):\n if len(array.shape) == 0:\n return _impl(array.reshape((1,))[0])\n else:\n primitive = ak.types.numpytype.dtype_to_primitive(array.dtype)\n out = ak.types.NumpyType(primitive)\n for x in array.shape[-1:0:-1]:\n out = ak.types.RegularType(out, x)\n return ak.types.ArrayType(out, array.shape[0])\n\n elif isinstance(array, _ext.ArrayBuilder):\n form = ak.forms.from_json(array.form())\n return ak.types.ArrayType(form.type_from_behavior(None), len(array))\n\n elif isinstance(array, ak.record.Record):\n return array.array.form.type\n\n elif isinstance(array, ak.contents.Content):\n return array.form.type\n\n else:\n layout = ak.to_layout(array, allow_other=False)\n return _impl(ak._util.wrap(layout))\n", "path": "src/awkward/operations/ak_type.py"}]} | 1,964 | 122 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.