problem_id
stringlengths
18
22
source
stringclasses
1 value
task_type
stringclasses
1 value
in_source_id
stringlengths
13
58
prompt
stringlengths
1.1k
10.2k
golden_diff
stringlengths
151
4.94k
verification_info
stringlengths
582
21k
num_tokens
int64
271
2.05k
num_tokens_diff
int64
47
1.02k
gh_patches_debug_8093
rasdani/github-patches
git_diff
scrapy__scrapy-1979
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- empty WARNING message in scrapy.core.downloader.tls (1.1.0rc4/master) Sometimes I'm getting empty warnings now, on 1.1.0rc4 and master branch. (at least on rc3 as well) ``` 2016-05-07 00:33:46 [scrapy.core.downloader.tls] WARNING: 2016-05-07 00:33:47 [scrapy.core.downloader.tls] WARNING: 2016-05-07 00:33:48 [scrapy.core.downloader.tls] WARNING: ``` It happens in a broad linkcheck crawl; so I couldn't pinpoint what URLs might be responsible for that, at this time. The only other observation so far is, that it doesn't happen on a cache-replayed run (which might be obvious, as there is no TLS there). --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `scrapy/core/downloader/tls.py` Content: ``` 1 import logging 2 from OpenSSL import SSL 3 4 5 logger = logging.getLogger(__name__) 6 7 METHOD_SSLv3 = 'SSLv3' 8 METHOD_TLS = 'TLS' 9 METHOD_TLSv10 = 'TLSv1.0' 10 METHOD_TLSv11 = 'TLSv1.1' 11 METHOD_TLSv12 = 'TLSv1.2' 12 13 openssl_methods = { 14 METHOD_TLS: SSL.SSLv23_METHOD, # protocol negotiation (recommended) 15 METHOD_SSLv3: SSL.SSLv3_METHOD, # SSL 3 (NOT recommended) 16 METHOD_TLSv10: SSL.TLSv1_METHOD, # TLS 1.0 only 17 METHOD_TLSv11: getattr(SSL, 'TLSv1_1_METHOD', 5), # TLS 1.1 only 18 METHOD_TLSv12: getattr(SSL, 'TLSv1_2_METHOD', 6), # TLS 1.2 only 19 } 20 21 # ClientTLSOptions requires a recent-enough version of Twisted 22 try: 23 24 # taken from twisted/twisted/internet/_sslverify.py 25 try: 26 from OpenSSL.SSL import SSL_CB_HANDSHAKE_DONE, SSL_CB_HANDSHAKE_START 27 except ImportError: 28 SSL_CB_HANDSHAKE_START = 0x10 29 SSL_CB_HANDSHAKE_DONE = 0x20 30 31 from twisted.internet._sslverify import (ClientTLSOptions, 32 _maybeSetHostNameIndication, 33 verifyHostname, 34 VerificationError) 35 36 class ScrapyClientTLSOptions(ClientTLSOptions): 37 # same as Twisted's ClientTLSOptions, 38 # except that VerificationError is caught 39 # and doesn't close the connection 40 def _identityVerifyingInfoCallback(self, connection, where, ret): 41 if where & SSL_CB_HANDSHAKE_START: 42 _maybeSetHostNameIndication(connection, self._hostnameBytes) 43 elif where & SSL_CB_HANDSHAKE_DONE: 44 try: 45 verifyHostname(connection, self._hostnameASCII) 46 except VerificationError as e: 47 logger.warning(e) 48 49 except ImportError: 50 # ImportError should not matter for older Twisted versions 51 # as the above is not used in the fallback ScrapyClientContextFactory 52 pass 53 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/scrapy/core/downloader/tls.py b/scrapy/core/downloader/tls.py --- a/scrapy/core/downloader/tls.py +++ b/scrapy/core/downloader/tls.py @@ -44,7 +44,9 @@ try: verifyHostname(connection, self._hostnameASCII) except VerificationError as e: - logger.warning(e) + logger.warning( + 'Remote certificate is not valid for hostname "{}"; {}'.format( + self._hostnameASCII, e)) except ImportError: # ImportError should not matter for older Twisted versions
{"golden_diff": "diff --git a/scrapy/core/downloader/tls.py b/scrapy/core/downloader/tls.py\n--- a/scrapy/core/downloader/tls.py\n+++ b/scrapy/core/downloader/tls.py\n@@ -44,7 +44,9 @@\n try:\n verifyHostname(connection, self._hostnameASCII)\n except VerificationError as e:\n- logger.warning(e)\n+ logger.warning(\n+ 'Remote certificate is not valid for hostname \"{}\"; {}'.format(\n+ self._hostnameASCII, e))\n \n except ImportError:\n # ImportError should not matter for older Twisted versions\n", "issue": "empty WARNING message in scrapy.core.downloader.tls (1.1.0rc4/master)\nSometimes I'm getting empty warnings now, on 1.1.0rc4 and master branch.\n(at least on rc3 as well)\n\n```\n2016-05-07 00:33:46 [scrapy.core.downloader.tls] WARNING: \n2016-05-07 00:33:47 [scrapy.core.downloader.tls] WARNING: \n2016-05-07 00:33:48 [scrapy.core.downloader.tls] WARNING: \n```\n\nIt happens in a broad linkcheck crawl; so I couldn't pinpoint what URLs might be responsible for that, at this time. The only other observation so far is, that it doesn't happen on a cache-replayed run (which might be obvious, as there is no TLS there).\n\n", "before_files": [{"content": "import logging\nfrom OpenSSL import SSL\n\n\nlogger = logging.getLogger(__name__)\n\nMETHOD_SSLv3 = 'SSLv3'\nMETHOD_TLS = 'TLS'\nMETHOD_TLSv10 = 'TLSv1.0'\nMETHOD_TLSv11 = 'TLSv1.1'\nMETHOD_TLSv12 = 'TLSv1.2'\n\nopenssl_methods = {\n METHOD_TLS: SSL.SSLv23_METHOD, # protocol negotiation (recommended)\n METHOD_SSLv3: SSL.SSLv3_METHOD, # SSL 3 (NOT recommended)\n METHOD_TLSv10: SSL.TLSv1_METHOD, # TLS 1.0 only\n METHOD_TLSv11: getattr(SSL, 'TLSv1_1_METHOD', 5), # TLS 1.1 only\n METHOD_TLSv12: getattr(SSL, 'TLSv1_2_METHOD', 6), # TLS 1.2 only\n}\n\n# ClientTLSOptions requires a recent-enough version of Twisted\ntry:\n\n # taken from twisted/twisted/internet/_sslverify.py\n try:\n from OpenSSL.SSL import SSL_CB_HANDSHAKE_DONE, SSL_CB_HANDSHAKE_START\n except ImportError:\n SSL_CB_HANDSHAKE_START = 0x10\n SSL_CB_HANDSHAKE_DONE = 0x20\n\n from twisted.internet._sslverify import (ClientTLSOptions,\n _maybeSetHostNameIndication,\n verifyHostname,\n VerificationError)\n\n class ScrapyClientTLSOptions(ClientTLSOptions):\n # same as Twisted's ClientTLSOptions,\n # except that VerificationError is caught\n # and doesn't close the connection\n def _identityVerifyingInfoCallback(self, connection, where, ret):\n if where & SSL_CB_HANDSHAKE_START:\n _maybeSetHostNameIndication(connection, self._hostnameBytes)\n elif where & SSL_CB_HANDSHAKE_DONE:\n try:\n verifyHostname(connection, self._hostnameASCII)\n except VerificationError as e:\n logger.warning(e)\n\nexcept ImportError:\n # ImportError should not matter for older Twisted versions\n # as the above is not used in the fallback ScrapyClientContextFactory\n pass\n", "path": "scrapy/core/downloader/tls.py"}], "after_files": [{"content": "import logging\nfrom OpenSSL import SSL\n\n\nlogger = logging.getLogger(__name__)\n\nMETHOD_SSLv3 = 'SSLv3'\nMETHOD_TLS = 'TLS'\nMETHOD_TLSv10 = 'TLSv1.0'\nMETHOD_TLSv11 = 'TLSv1.1'\nMETHOD_TLSv12 = 'TLSv1.2'\n\nopenssl_methods = {\n METHOD_TLS: SSL.SSLv23_METHOD, # protocol negotiation (recommended)\n METHOD_SSLv3: SSL.SSLv3_METHOD, # SSL 3 (NOT recommended)\n METHOD_TLSv10: SSL.TLSv1_METHOD, # TLS 1.0 only\n METHOD_TLSv11: getattr(SSL, 'TLSv1_1_METHOD', 5), # TLS 1.1 only\n METHOD_TLSv12: getattr(SSL, 'TLSv1_2_METHOD', 6), # TLS 1.2 only\n}\n\n# ClientTLSOptions requires a recent-enough version of Twisted\ntry:\n\n # taken from twisted/twisted/internet/_sslverify.py\n try:\n from OpenSSL.SSL import SSL_CB_HANDSHAKE_DONE, SSL_CB_HANDSHAKE_START\n except ImportError:\n SSL_CB_HANDSHAKE_START = 0x10\n SSL_CB_HANDSHAKE_DONE = 0x20\n\n from twisted.internet._sslverify import (ClientTLSOptions,\n _maybeSetHostNameIndication,\n verifyHostname,\n VerificationError)\n\n class ScrapyClientTLSOptions(ClientTLSOptions):\n # same as Twisted's ClientTLSOptions,\n # except that VerificationError is caught\n # and doesn't close the connection\n def _identityVerifyingInfoCallback(self, connection, where, ret):\n if where & SSL_CB_HANDSHAKE_START:\n _maybeSetHostNameIndication(connection, self._hostnameBytes)\n elif where & SSL_CB_HANDSHAKE_DONE:\n try:\n verifyHostname(connection, self._hostnameASCII)\n except VerificationError as e:\n logger.warning(\n 'Remote certificate is not valid for hostname \"{}\"; {}'.format(\n self._hostnameASCII, e))\n\nexcept ImportError:\n # ImportError should not matter for older Twisted versions\n # as the above is not used in the fallback ScrapyClientContextFactory\n pass\n", "path": "scrapy/core/downloader/tls.py"}]}
1,042
127
gh_patches_debug_16913
rasdani/github-patches
git_diff
Kinto__kinto-809
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- deadlock on __heartbeat__ If I set up the same postgresql database/user for the storage, cache and permission connectors, I get a thread deadlock on the second call to **heartbeat**, leading to a blocking call, that makes the HB fails. Looks like a DB connector race condition issue --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `kinto/core/views/heartbeat.py` Content: ``` 1 from concurrent.futures import ThreadPoolExecutor, wait 2 from pyramid.security import NO_PERMISSION_REQUIRED 3 4 from kinto import logger 5 from kinto.core import Service 6 7 8 heartbeat = Service(name="heartbeat", path='/__heartbeat__', 9 description="Server health") 10 11 12 @heartbeat.get(permission=NO_PERMISSION_REQUIRED) 13 def get_heartbeat(request): 14 """Return information about server health.""" 15 status = {} 16 17 def heartbeat_check(name, func): 18 status[name] = False 19 status[name] = func(request) 20 21 # Start executing heartbeats concurrently. 22 heartbeats = request.registry.heartbeats 23 pool = ThreadPoolExecutor(max_workers=max(1, len(heartbeats.keys()))) 24 futures = [] 25 for name, func in heartbeats.items(): 26 future = pool.submit(heartbeat_check, name, func) 27 future.__heartbeat_name = name # For logging purposes. 28 futures.append(future) 29 30 # Wait for the results, with timeout. 31 seconds = float(request.registry.settings['heartbeat_timeout_seconds']) 32 done, not_done = wait(futures, timeout=seconds) 33 34 # A heartbeat is supposed to return True or False, and never raise. 35 # Just in case, go though results to spot any potential exception. 36 for future in done: 37 exc = future.exception() 38 if exc is not None: 39 logger.error("%r heartbeat failed." % future.__heartbeat_name) 40 logger.error(exc) 41 42 # Log timed-out heartbeats. 43 for future in not_done: 44 name = future.__heartbeat_name 45 error_msg = "%r heartbeat has exceeded timeout of %s seconds." 46 logger.error(error_msg % (name, seconds)) 47 48 # If any has failed, return a 503 error response. 49 has_error = not all([v or v is None for v in status.values()]) 50 if has_error: 51 request.response.status = 503 52 53 return status 54 55 56 lbheartbeat = Service(name="lbheartbeat", path='/__lbheartbeat__', 57 description="Web head health") 58 59 60 @lbheartbeat.get(permission=NO_PERMISSION_REQUIRED) 61 def get_lbheartbeat(request): 62 """Return successful healthy response. 63 64 If the load-balancer tries to access this URL and fails, this means the 65 Web head is not operational and should be dropped. 66 """ 67 status = {} 68 return status 69 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/kinto/core/views/heartbeat.py b/kinto/core/views/heartbeat.py --- a/kinto/core/views/heartbeat.py +++ b/kinto/core/views/heartbeat.py @@ -1,4 +1,6 @@ from concurrent.futures import ThreadPoolExecutor, wait + +import transaction from pyramid.security import NO_PERMISSION_REQUIRED from kinto import logger @@ -17,6 +19,11 @@ def heartbeat_check(name, func): status[name] = False status[name] = func(request) + # Since the heartbeat checks run concurrently, their transactions + # overlap and might end in shared lock errors. By aborting here + # we clean-up the state on each heartbeat call instead of once at the + # end of the request. See bug Kinto/kinto#804 + transaction.abort() # Start executing heartbeats concurrently. heartbeats = request.registry.heartbeats
{"golden_diff": "diff --git a/kinto/core/views/heartbeat.py b/kinto/core/views/heartbeat.py\n--- a/kinto/core/views/heartbeat.py\n+++ b/kinto/core/views/heartbeat.py\n@@ -1,4 +1,6 @@\n from concurrent.futures import ThreadPoolExecutor, wait\n+\n+import transaction\n from pyramid.security import NO_PERMISSION_REQUIRED\n \n from kinto import logger\n@@ -17,6 +19,11 @@\n def heartbeat_check(name, func):\n status[name] = False\n status[name] = func(request)\n+ # Since the heartbeat checks run concurrently, their transactions\n+ # overlap and might end in shared lock errors. By aborting here\n+ # we clean-up the state on each heartbeat call instead of once at the\n+ # end of the request. See bug Kinto/kinto#804\n+ transaction.abort()\n \n # Start executing heartbeats concurrently.\n heartbeats = request.registry.heartbeats\n", "issue": "deadlock on __heartbeat__\nIf I set up the same postgresql database/user for the storage, cache and permission connectors, I get a thread deadlock on the second call to **heartbeat**, leading to a blocking call, that makes the HB fails.\n\nLooks like a DB connector race condition issue\n\n", "before_files": [{"content": "from concurrent.futures import ThreadPoolExecutor, wait\nfrom pyramid.security import NO_PERMISSION_REQUIRED\n\nfrom kinto import logger\nfrom kinto.core import Service\n\n\nheartbeat = Service(name=\"heartbeat\", path='/__heartbeat__',\n description=\"Server health\")\n\n\[email protected](permission=NO_PERMISSION_REQUIRED)\ndef get_heartbeat(request):\n \"\"\"Return information about server health.\"\"\"\n status = {}\n\n def heartbeat_check(name, func):\n status[name] = False\n status[name] = func(request)\n\n # Start executing heartbeats concurrently.\n heartbeats = request.registry.heartbeats\n pool = ThreadPoolExecutor(max_workers=max(1, len(heartbeats.keys())))\n futures = []\n for name, func in heartbeats.items():\n future = pool.submit(heartbeat_check, name, func)\n future.__heartbeat_name = name # For logging purposes.\n futures.append(future)\n\n # Wait for the results, with timeout.\n seconds = float(request.registry.settings['heartbeat_timeout_seconds'])\n done, not_done = wait(futures, timeout=seconds)\n\n # A heartbeat is supposed to return True or False, and never raise.\n # Just in case, go though results to spot any potential exception.\n for future in done:\n exc = future.exception()\n if exc is not None:\n logger.error(\"%r heartbeat failed.\" % future.__heartbeat_name)\n logger.error(exc)\n\n # Log timed-out heartbeats.\n for future in not_done:\n name = future.__heartbeat_name\n error_msg = \"%r heartbeat has exceeded timeout of %s seconds.\"\n logger.error(error_msg % (name, seconds))\n\n # If any has failed, return a 503 error response.\n has_error = not all([v or v is None for v in status.values()])\n if has_error:\n request.response.status = 503\n\n return status\n\n\nlbheartbeat = Service(name=\"lbheartbeat\", path='/__lbheartbeat__',\n description=\"Web head health\")\n\n\[email protected](permission=NO_PERMISSION_REQUIRED)\ndef get_lbheartbeat(request):\n \"\"\"Return successful healthy response.\n\n If the load-balancer tries to access this URL and fails, this means the\n Web head is not operational and should be dropped.\n \"\"\"\n status = {}\n return status\n", "path": "kinto/core/views/heartbeat.py"}], "after_files": [{"content": "from concurrent.futures import ThreadPoolExecutor, wait\n\nimport transaction\nfrom pyramid.security import NO_PERMISSION_REQUIRED\n\nfrom kinto import logger\nfrom kinto.core import Service\n\n\nheartbeat = Service(name=\"heartbeat\", path='/__heartbeat__',\n description=\"Server health\")\n\n\[email protected](permission=NO_PERMISSION_REQUIRED)\ndef get_heartbeat(request):\n \"\"\"Return information about server health.\"\"\"\n status = {}\n\n def heartbeat_check(name, func):\n status[name] = False\n status[name] = func(request)\n # Since the heartbeat checks run concurrently, their transactions\n # overlap and might end in shared lock errors. By aborting here\n # we clean-up the state on each heartbeat call instead of once at the\n # end of the request. See bug Kinto/kinto#804\n transaction.abort()\n\n # Start executing heartbeats concurrently.\n heartbeats = request.registry.heartbeats\n pool = ThreadPoolExecutor(max_workers=max(1, len(heartbeats.keys())))\n futures = []\n for name, func in heartbeats.items():\n future = pool.submit(heartbeat_check, name, func)\n future.__heartbeat_name = name # For logging purposes.\n futures.append(future)\n\n # Wait for the results, with timeout.\n seconds = float(request.registry.settings['heartbeat_timeout_seconds'])\n done, not_done = wait(futures, timeout=seconds)\n\n # A heartbeat is supposed to return True or False, and never raise.\n # Just in case, go though results to spot any potential exception.\n for future in done:\n exc = future.exception()\n if exc is not None:\n logger.error(\"%r heartbeat failed.\" % future.__heartbeat_name)\n logger.error(exc)\n\n # Log timed-out heartbeats.\n for future in not_done:\n name = future.__heartbeat_name\n error_msg = \"%r heartbeat has exceeded timeout of %s seconds.\"\n logger.error(error_msg % (name, seconds))\n\n # If any has failed, return a 503 error response.\n has_error = not all([v or v is None for v in status.values()])\n if has_error:\n request.response.status = 503\n\n return status\n\n\nlbheartbeat = Service(name=\"lbheartbeat\", path='/__lbheartbeat__',\n description=\"Web head health\")\n\n\[email protected](permission=NO_PERMISSION_REQUIRED)\ndef get_lbheartbeat(request):\n \"\"\"Return successful healthy response.\n\n If the load-balancer tries to access this URL and fails, this means the\n Web head is not operational and should be dropped.\n \"\"\"\n status = {}\n return status\n", "path": "kinto/core/views/heartbeat.py"}]}
945
209
gh_patches_debug_6517
rasdani/github-patches
git_diff
ivy-llc__ivy-22309
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- fft2 --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `ivy/functional/frontends/jax/numpy/fft.py` Content: ``` 1 # local 2 import ivy 3 from ivy.functional.frontends.jax.func_wrapper import to_ivy_arrays_and_back 4 from ivy.func_wrapper import with_unsupported_dtypes 5 6 7 @to_ivy_arrays_and_back 8 def fft(a, n=None, axis=-1, norm=None): 9 if norm is None: 10 norm = "backward" 11 return ivy.fft(a, axis, norm=norm, n=n) 12 13 14 @to_ivy_arrays_and_back 15 @with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") 16 def fftshift(x, axes=None, name=None): 17 shape = x.shape 18 19 if axes is None: 20 axes = tuple(range(x.ndim)) 21 shifts = [(dim // 2) for dim in shape] 22 elif isinstance(axes, int): 23 shifts = shape[axes] // 2 24 else: 25 shifts = [shape[ax] // 2 for ax in axes] 26 27 roll = ivy.roll(x, shifts, axis=axes) 28 29 return roll 30 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/ivy/functional/frontends/jax/numpy/fft.py b/ivy/functional/frontends/jax/numpy/fft.py --- a/ivy/functional/frontends/jax/numpy/fft.py +++ b/ivy/functional/frontends/jax/numpy/fft.py @@ -11,6 +11,13 @@ return ivy.fft(a, axis, norm=norm, n=n) +@to_ivy_arrays_and_back +def fft2(a, s=None, axes=(-2, -1), norm=None): + if norm is None: + norm = "backward" + return ivy.array(ivy.fft2(a, s=s, dim=axes, norm=norm), dtype=ivy.dtype(a)) + + @to_ivy_arrays_and_back @with_unsupported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle") def fftshift(x, axes=None, name=None):
{"golden_diff": "diff --git a/ivy/functional/frontends/jax/numpy/fft.py b/ivy/functional/frontends/jax/numpy/fft.py\n--- a/ivy/functional/frontends/jax/numpy/fft.py\n+++ b/ivy/functional/frontends/jax/numpy/fft.py\n@@ -11,6 +11,13 @@\n return ivy.fft(a, axis, norm=norm, n=n)\n \n \n+@to_ivy_arrays_and_back\n+def fft2(a, s=None, axes=(-2, -1), norm=None):\n+ if norm is None:\n+ norm = \"backward\"\n+ return ivy.array(ivy.fft2(a, s=s, dim=axes, norm=norm), dtype=ivy.dtype(a))\n+\n+\n @to_ivy_arrays_and_back\n @with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\n def fftshift(x, axes=None, name=None):\n", "issue": "fft2\n\n", "before_files": [{"content": "# local\nimport ivy\nfrom ivy.functional.frontends.jax.func_wrapper import to_ivy_arrays_and_back\nfrom ivy.func_wrapper import with_unsupported_dtypes\n\n\n@to_ivy_arrays_and_back\ndef fft(a, n=None, axis=-1, norm=None):\n if norm is None:\n norm = \"backward\"\n return ivy.fft(a, axis, norm=norm, n=n)\n\n\n@to_ivy_arrays_and_back\n@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\ndef fftshift(x, axes=None, name=None):\n shape = x.shape\n\n if axes is None:\n axes = tuple(range(x.ndim))\n shifts = [(dim // 2) for dim in shape]\n elif isinstance(axes, int):\n shifts = shape[axes] // 2\n else:\n shifts = [shape[ax] // 2 for ax in axes]\n\n roll = ivy.roll(x, shifts, axis=axes)\n\n return roll\n", "path": "ivy/functional/frontends/jax/numpy/fft.py"}], "after_files": [{"content": "# local\nimport ivy\nfrom ivy.functional.frontends.jax.func_wrapper import to_ivy_arrays_and_back\nfrom ivy.func_wrapper import with_unsupported_dtypes\n\n\n@to_ivy_arrays_and_back\ndef fft(a, n=None, axis=-1, norm=None):\n if norm is None:\n norm = \"backward\"\n return ivy.fft(a, axis, norm=norm, n=n)\n\n\n@to_ivy_arrays_and_back\ndef fft2(a, s=None, axes=(-2, -1), norm=None):\n if norm is None:\n norm = \"backward\"\n return ivy.array(ivy.fft2(a, s=s, dim=axes, norm=norm), dtype=ivy.dtype(a))\n\n\n@to_ivy_arrays_and_back\n@with_unsupported_dtypes({\"2.4.2 and below\": (\"float16\", \"bfloat16\")}, \"paddle\")\ndef fftshift(x, axes=None, name=None):\n shape = x.shape\n\n if axes is None:\n axes = tuple(range(x.ndim))\n shifts = [(dim // 2) for dim in shape]\n elif isinstance(axes, int):\n shifts = shape[axes] // 2\n else:\n shifts = [shape[ax] // 2 for ax in axes]\n\n roll = ivy.roll(x, shifts, axis=axes)\n\n return roll\n", "path": "ivy/functional/frontends/jax/numpy/fft.py"}]}
554
217
gh_patches_debug_10463
rasdani/github-patches
git_diff
lutris__lutris-3739
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Gamescope failure Spot the issue, I can't. Just enabled gamescope with latest version but it seems there is invalid syntax. Started initial process 34578 from gamescope -f -- gamemoderun /home/theriddick/.local/share/lutris/runners/wine/lutris-ge-6.18-1-x86_64/bin/wine /mnt/GamesNVMe/Games/Encased A Sci-Fi Post-Apocalyptic RPG/Encased.exe Start monitoring process. gamescope: invalid option -- ' ' See --help for a list of options. Monitored process exited. Initial process has exited (return code: 256) All processes have quit Exit with return code 256 --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `lutris/runner_interpreter.py` Content: ``` 1 """Transform runner parameters to data usable for runtime execution""" 2 import os 3 import shlex 4 import stat 5 6 from lutris.util import system 7 from lutris.util.linux import LINUX_SYSTEM 8 from lutris.util.log import logger 9 10 11 def get_mangohud_conf(system_config): 12 """Return correct launch arguments and environment variables for Mangohud.""" 13 env = {"MANGOHUD": "1"} 14 mango_args = [] 15 mangohud = system_config.get("mangohud") or "" 16 if mangohud and system.find_executable("mangohud"): 17 if mangohud == "gl64": 18 mango_args = ["mangohud"] 19 env["MANGOHUD_DLSYM"] = "1" 20 elif mangohud == "gl32": 21 mango_args = ["mangohud.x86"] 22 env["MANGOHUD_DLSYM"] = "1" 23 else: 24 mango_args = ["mangohud"] 25 return mango_args, env 26 27 28 def get_launch_parameters(runner, gameplay_info): 29 system_config = runner.system_config 30 launch_arguments = gameplay_info["command"] 31 env = { 32 "DISABLE_LAYER_AMD_SWITCHABLE_GRAPHICS_1": "1" 33 } 34 35 # Steam compatibility 36 if os.environ.get("SteamAppId"): 37 logger.info("Game launched from steam (AppId: %s)", os.environ["SteamAppId"]) 38 env["LC_ALL"] = "" 39 40 # Optimus 41 optimus = system_config.get("optimus") 42 if optimus == "primusrun" and system.find_executable("primusrun"): 43 launch_arguments.insert(0, "primusrun") 44 elif optimus == "optirun" and system.find_executable("optirun"): 45 launch_arguments.insert(0, "virtualgl") 46 launch_arguments.insert(0, "-b") 47 launch_arguments.insert(0, "optirun") 48 elif optimus == "pvkrun" and system.find_executable("pvkrun"): 49 launch_arguments.insert(0, "pvkrun") 50 51 mango_args, mango_env = get_mangohud_conf(system_config) 52 if mango_args: 53 launch_arguments = mango_args + launch_arguments 54 env.update(mango_env) 55 56 # Libstrangle 57 fps_limit = system_config.get("fps_limit") or "" 58 if fps_limit: 59 strangle_cmd = system.find_executable("strangle") 60 if strangle_cmd: 61 launch_arguments = [strangle_cmd, fps_limit] + launch_arguments 62 else: 63 logger.warning("libstrangle is not available on this system, FPS limiter disabled") 64 65 prefix_command = system_config.get("prefix_command") or "" 66 if prefix_command: 67 launch_arguments = (shlex.split(os.path.expandvars(prefix_command)) + launch_arguments) 68 69 single_cpu = system_config.get("single_cpu") or False 70 if single_cpu: 71 logger.info("The game will run on a single CPU core") 72 launch_arguments.insert(0, "0") 73 launch_arguments.insert(0, "-c") 74 launch_arguments.insert(0, "taskset") 75 76 env.update(runner.get_env()) 77 78 env.update(gameplay_info.get("env") or {}) 79 80 # Set environment variables dependent on gameplay info 81 82 # LD_PRELOAD 83 ld_preload = gameplay_info.get("ld_preload") 84 if ld_preload: 85 env["LD_PRELOAD"] = ld_preload 86 87 # LD_LIBRARY_PATH 88 game_ld_libary_path = gameplay_info.get("ld_library_path") 89 if game_ld_libary_path: 90 ld_library_path = env.get("LD_LIBRARY_PATH") 91 if not ld_library_path: 92 ld_library_path = "$LD_LIBRARY_PATH" 93 env["LD_LIBRARY_PATH"] = ":".join([game_ld_libary_path, ld_library_path]) 94 95 # Feral gamemode 96 gamemode = system_config.get("gamemode") and LINUX_SYSTEM.gamemode_available() 97 if gamemode: 98 launch_arguments.insert(0, "gamemoderun") 99 100 # Gamescope 101 gamescope = system_config.get("gamescope") and system.find_executable("gamescope") 102 if gamescope: 103 launch_arguments = get_gamescope_args(launch_arguments, system_config) 104 105 return launch_arguments, env 106 107 108 def get_gamescope_args(launch_arguments, system_config): 109 """Insert gamescope at the start of the launch arguments""" 110 launch_arguments.insert(0, "-f --") 111 if system_config.get("gamescope_output_res"): 112 output_width, output_height = system_config["gamescope_output_res"].lower().split("x") 113 launch_arguments.insert(0, output_height) 114 launch_arguments.insert(0, "-H") 115 launch_arguments.insert(0, output_width) 116 launch_arguments.insert(0, "-W") 117 if system_config.get("gamescope_game_res"): 118 game_width, game_height = system_config["gamescope_game_res"].lower().split("x") 119 launch_arguments.insert(0, game_height) 120 launch_arguments.insert(0, "-h") 121 launch_arguments.insert(0, game_width) 122 launch_arguments.insert(0, "-w") 123 launch_arguments.insert(0, "gamescope") 124 return launch_arguments 125 126 127 def export_bash_script(runner, gameplay_info, script_path): 128 """Convert runner configuration into a bash script""" 129 command, env = get_launch_parameters(runner, gameplay_info) 130 # Override TERM otherwise the script might not run 131 env["TERM"] = "xterm" 132 script_content = "#!/bin/bash\n\n\n" 133 script_content += "# Environment variables\n" 134 for env_var in env: 135 script_content += "export %s=\"%s\"\n" % (env_var, env[env_var]) 136 script_content += "\n# Command\n" 137 script_content += " ".join([shlex.quote(c) for c in command]) 138 with open(script_path, "w") as script_file: 139 script_file.write(script_content) 140 141 os.chmod(script_path, os.stat(script_path).st_mode | stat.S_IEXEC) 142 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/lutris/runner_interpreter.py b/lutris/runner_interpreter.py --- a/lutris/runner_interpreter.py +++ b/lutris/runner_interpreter.py @@ -107,7 +107,8 @@ def get_gamescope_args(launch_arguments, system_config): """Insert gamescope at the start of the launch arguments""" - launch_arguments.insert(0, "-f --") + launch_arguments.insert(0, "--") + launch_arguments.insert(0, "-f") if system_config.get("gamescope_output_res"): output_width, output_height = system_config["gamescope_output_res"].lower().split("x") launch_arguments.insert(0, output_height)
{"golden_diff": "diff --git a/lutris/runner_interpreter.py b/lutris/runner_interpreter.py\n--- a/lutris/runner_interpreter.py\n+++ b/lutris/runner_interpreter.py\n@@ -107,7 +107,8 @@\n \n def get_gamescope_args(launch_arguments, system_config):\n \"\"\"Insert gamescope at the start of the launch arguments\"\"\"\n- launch_arguments.insert(0, \"-f --\")\n+ launch_arguments.insert(0, \"--\")\n+ launch_arguments.insert(0, \"-f\")\n if system_config.get(\"gamescope_output_res\"):\n output_width, output_height = system_config[\"gamescope_output_res\"].lower().split(\"x\")\n launch_arguments.insert(0, output_height)\n", "issue": "Gamescope failure\nSpot the issue, I can't. Just enabled gamescope with latest version but it seems there is invalid syntax. \r\n\r\nStarted initial process 34578 from gamescope -f -- gamemoderun /home/theriddick/.local/share/lutris/runners/wine/lutris-ge-6.18-1-x86_64/bin/wine /mnt/GamesNVMe/Games/Encased A Sci-Fi Post-Apocalyptic RPG/Encased.exe\r\nStart monitoring process.\r\ngamescope: invalid option -- ' '\r\nSee --help for a list of options.\r\nMonitored process exited.\r\nInitial process has exited (return code: 256)\r\nAll processes have quit\r\nExit with return code 256\r\n\n", "before_files": [{"content": "\"\"\"Transform runner parameters to data usable for runtime execution\"\"\"\nimport os\nimport shlex\nimport stat\n\nfrom lutris.util import system\nfrom lutris.util.linux import LINUX_SYSTEM\nfrom lutris.util.log import logger\n\n\ndef get_mangohud_conf(system_config):\n \"\"\"Return correct launch arguments and environment variables for Mangohud.\"\"\"\n env = {\"MANGOHUD\": \"1\"}\n mango_args = []\n mangohud = system_config.get(\"mangohud\") or \"\"\n if mangohud and system.find_executable(\"mangohud\"):\n if mangohud == \"gl64\":\n mango_args = [\"mangohud\"]\n env[\"MANGOHUD_DLSYM\"] = \"1\"\n elif mangohud == \"gl32\":\n mango_args = [\"mangohud.x86\"]\n env[\"MANGOHUD_DLSYM\"] = \"1\"\n else:\n mango_args = [\"mangohud\"]\n return mango_args, env\n\n\ndef get_launch_parameters(runner, gameplay_info):\n system_config = runner.system_config\n launch_arguments = gameplay_info[\"command\"]\n env = {\n \"DISABLE_LAYER_AMD_SWITCHABLE_GRAPHICS_1\": \"1\"\n }\n\n # Steam compatibility\n if os.environ.get(\"SteamAppId\"):\n logger.info(\"Game launched from steam (AppId: %s)\", os.environ[\"SteamAppId\"])\n env[\"LC_ALL\"] = \"\"\n\n # Optimus\n optimus = system_config.get(\"optimus\")\n if optimus == \"primusrun\" and system.find_executable(\"primusrun\"):\n launch_arguments.insert(0, \"primusrun\")\n elif optimus == \"optirun\" and system.find_executable(\"optirun\"):\n launch_arguments.insert(0, \"virtualgl\")\n launch_arguments.insert(0, \"-b\")\n launch_arguments.insert(0, \"optirun\")\n elif optimus == \"pvkrun\" and system.find_executable(\"pvkrun\"):\n launch_arguments.insert(0, \"pvkrun\")\n\n mango_args, mango_env = get_mangohud_conf(system_config)\n if mango_args:\n launch_arguments = mango_args + launch_arguments\n env.update(mango_env)\n\n # Libstrangle\n fps_limit = system_config.get(\"fps_limit\") or \"\"\n if fps_limit:\n strangle_cmd = system.find_executable(\"strangle\")\n if strangle_cmd:\n launch_arguments = [strangle_cmd, fps_limit] + launch_arguments\n else:\n logger.warning(\"libstrangle is not available on this system, FPS limiter disabled\")\n\n prefix_command = system_config.get(\"prefix_command\") or \"\"\n if prefix_command:\n launch_arguments = (shlex.split(os.path.expandvars(prefix_command)) + launch_arguments)\n\n single_cpu = system_config.get(\"single_cpu\") or False\n if single_cpu:\n logger.info(\"The game will run on a single CPU core\")\n launch_arguments.insert(0, \"0\")\n launch_arguments.insert(0, \"-c\")\n launch_arguments.insert(0, \"taskset\")\n\n env.update(runner.get_env())\n\n env.update(gameplay_info.get(\"env\") or {})\n\n # Set environment variables dependent on gameplay info\n\n # LD_PRELOAD\n ld_preload = gameplay_info.get(\"ld_preload\")\n if ld_preload:\n env[\"LD_PRELOAD\"] = ld_preload\n\n # LD_LIBRARY_PATH\n game_ld_libary_path = gameplay_info.get(\"ld_library_path\")\n if game_ld_libary_path:\n ld_library_path = env.get(\"LD_LIBRARY_PATH\")\n if not ld_library_path:\n ld_library_path = \"$LD_LIBRARY_PATH\"\n env[\"LD_LIBRARY_PATH\"] = \":\".join([game_ld_libary_path, ld_library_path])\n\n # Feral gamemode\n gamemode = system_config.get(\"gamemode\") and LINUX_SYSTEM.gamemode_available()\n if gamemode:\n launch_arguments.insert(0, \"gamemoderun\")\n\n # Gamescope\n gamescope = system_config.get(\"gamescope\") and system.find_executable(\"gamescope\")\n if gamescope:\n launch_arguments = get_gamescope_args(launch_arguments, system_config)\n\n return launch_arguments, env\n\n\ndef get_gamescope_args(launch_arguments, system_config):\n \"\"\"Insert gamescope at the start of the launch arguments\"\"\"\n launch_arguments.insert(0, \"-f --\")\n if system_config.get(\"gamescope_output_res\"):\n output_width, output_height = system_config[\"gamescope_output_res\"].lower().split(\"x\")\n launch_arguments.insert(0, output_height)\n launch_arguments.insert(0, \"-H\")\n launch_arguments.insert(0, output_width)\n launch_arguments.insert(0, \"-W\")\n if system_config.get(\"gamescope_game_res\"):\n game_width, game_height = system_config[\"gamescope_game_res\"].lower().split(\"x\")\n launch_arguments.insert(0, game_height)\n launch_arguments.insert(0, \"-h\")\n launch_arguments.insert(0, game_width)\n launch_arguments.insert(0, \"-w\")\n launch_arguments.insert(0, \"gamescope\")\n return launch_arguments\n\n\ndef export_bash_script(runner, gameplay_info, script_path):\n \"\"\"Convert runner configuration into a bash script\"\"\"\n command, env = get_launch_parameters(runner, gameplay_info)\n # Override TERM otherwise the script might not run\n env[\"TERM\"] = \"xterm\"\n script_content = \"#!/bin/bash\\n\\n\\n\"\n script_content += \"# Environment variables\\n\"\n for env_var in env:\n script_content += \"export %s=\\\"%s\\\"\\n\" % (env_var, env[env_var])\n script_content += \"\\n# Command\\n\"\n script_content += \" \".join([shlex.quote(c) for c in command])\n with open(script_path, \"w\") as script_file:\n script_file.write(script_content)\n\n os.chmod(script_path, os.stat(script_path).st_mode | stat.S_IEXEC)\n", "path": "lutris/runner_interpreter.py"}], "after_files": [{"content": "\"\"\"Transform runner parameters to data usable for runtime execution\"\"\"\nimport os\nimport shlex\nimport stat\n\nfrom lutris.util import system\nfrom lutris.util.linux import LINUX_SYSTEM\nfrom lutris.util.log import logger\n\n\ndef get_mangohud_conf(system_config):\n \"\"\"Return correct launch arguments and environment variables for Mangohud.\"\"\"\n env = {\"MANGOHUD\": \"1\"}\n mango_args = []\n mangohud = system_config.get(\"mangohud\") or \"\"\n if mangohud and system.find_executable(\"mangohud\"):\n if mangohud == \"gl64\":\n mango_args = [\"mangohud\"]\n env[\"MANGOHUD_DLSYM\"] = \"1\"\n elif mangohud == \"gl32\":\n mango_args = [\"mangohud.x86\"]\n env[\"MANGOHUD_DLSYM\"] = \"1\"\n else:\n mango_args = [\"mangohud\"]\n return mango_args, env\n\n\ndef get_launch_parameters(runner, gameplay_info):\n system_config = runner.system_config\n launch_arguments = gameplay_info[\"command\"]\n env = {\n \"DISABLE_LAYER_AMD_SWITCHABLE_GRAPHICS_1\": \"1\"\n }\n\n # Steam compatibility\n if os.environ.get(\"SteamAppId\"):\n logger.info(\"Game launched from steam (AppId: %s)\", os.environ[\"SteamAppId\"])\n env[\"LC_ALL\"] = \"\"\n\n # Optimus\n optimus = system_config.get(\"optimus\")\n if optimus == \"primusrun\" and system.find_executable(\"primusrun\"):\n launch_arguments.insert(0, \"primusrun\")\n elif optimus == \"optirun\" and system.find_executable(\"optirun\"):\n launch_arguments.insert(0, \"virtualgl\")\n launch_arguments.insert(0, \"-b\")\n launch_arguments.insert(0, \"optirun\")\n elif optimus == \"pvkrun\" and system.find_executable(\"pvkrun\"):\n launch_arguments.insert(0, \"pvkrun\")\n\n mango_args, mango_env = get_mangohud_conf(system_config)\n if mango_args:\n launch_arguments = mango_args + launch_arguments\n env.update(mango_env)\n\n # Libstrangle\n fps_limit = system_config.get(\"fps_limit\") or \"\"\n if fps_limit:\n strangle_cmd = system.find_executable(\"strangle\")\n if strangle_cmd:\n launch_arguments = [strangle_cmd, fps_limit] + launch_arguments\n else:\n logger.warning(\"libstrangle is not available on this system, FPS limiter disabled\")\n\n prefix_command = system_config.get(\"prefix_command\") or \"\"\n if prefix_command:\n launch_arguments = (shlex.split(os.path.expandvars(prefix_command)) + launch_arguments)\n\n single_cpu = system_config.get(\"single_cpu\") or False\n if single_cpu:\n logger.info(\"The game will run on a single CPU core\")\n launch_arguments.insert(0, \"0\")\n launch_arguments.insert(0, \"-c\")\n launch_arguments.insert(0, \"taskset\")\n\n env.update(runner.get_env())\n\n env.update(gameplay_info.get(\"env\") or {})\n\n # Set environment variables dependent on gameplay info\n\n # LD_PRELOAD\n ld_preload = gameplay_info.get(\"ld_preload\")\n if ld_preload:\n env[\"LD_PRELOAD\"] = ld_preload\n\n # LD_LIBRARY_PATH\n game_ld_libary_path = gameplay_info.get(\"ld_library_path\")\n if game_ld_libary_path:\n ld_library_path = env.get(\"LD_LIBRARY_PATH\")\n if not ld_library_path:\n ld_library_path = \"$LD_LIBRARY_PATH\"\n env[\"LD_LIBRARY_PATH\"] = \":\".join([game_ld_libary_path, ld_library_path])\n\n # Feral gamemode\n gamemode = system_config.get(\"gamemode\") and LINUX_SYSTEM.gamemode_available()\n if gamemode:\n launch_arguments.insert(0, \"gamemoderun\")\n\n # Gamescope\n gamescope = system_config.get(\"gamescope\") and system.find_executable(\"gamescope\")\n if gamescope:\n launch_arguments = get_gamescope_args(launch_arguments, system_config)\n\n return launch_arguments, env\n\n\ndef get_gamescope_args(launch_arguments, system_config):\n \"\"\"Insert gamescope at the start of the launch arguments\"\"\"\n launch_arguments.insert(0, \"--\")\n launch_arguments.insert(0, \"-f\")\n if system_config.get(\"gamescope_output_res\"):\n output_width, output_height = system_config[\"gamescope_output_res\"].lower().split(\"x\")\n launch_arguments.insert(0, output_height)\n launch_arguments.insert(0, \"-H\")\n launch_arguments.insert(0, output_width)\n launch_arguments.insert(0, \"-W\")\n if system_config.get(\"gamescope_game_res\"):\n game_width, game_height = system_config[\"gamescope_game_res\"].lower().split(\"x\")\n launch_arguments.insert(0, game_height)\n launch_arguments.insert(0, \"-h\")\n launch_arguments.insert(0, game_width)\n launch_arguments.insert(0, \"-w\")\n launch_arguments.insert(0, \"gamescope\")\n return launch_arguments\n\n\ndef export_bash_script(runner, gameplay_info, script_path):\n \"\"\"Convert runner configuration into a bash script\"\"\"\n command, env = get_launch_parameters(runner, gameplay_info)\n # Override TERM otherwise the script might not run\n env[\"TERM\"] = \"xterm\"\n script_content = \"#!/bin/bash\\n\\n\\n\"\n script_content += \"# Environment variables\\n\"\n for env_var in env:\n script_content += \"export %s=\\\"%s\\\"\\n\" % (env_var, env[env_var])\n script_content += \"\\n# Command\\n\"\n script_content += \" \".join([shlex.quote(c) for c in command])\n with open(script_path, \"w\") as script_file:\n script_file.write(script_content)\n\n os.chmod(script_path, os.stat(script_path).st_mode | stat.S_IEXEC)\n", "path": "lutris/runner_interpreter.py"}]}
2,039
159
gh_patches_debug_49166
rasdani/github-patches
git_diff
scoutapp__scout_apm_python-489
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Installation seems to be broken on python3.6.4 <img width="1125" alt="Screen Shot 2020-02-26 at 12 31 00 PM" src="https://user-images.githubusercontent.com/17484350/75380353-e2224900-58a4-11ea-96b3-2629b94c7107.png"> --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `setup.py` Content: ``` 1 # coding=utf-8 2 from __future__ import absolute_import, division, print_function, unicode_literals 3 4 import sys 5 6 from setuptools import Extension, find_packages, setup 7 8 with open("README.md", "r") as fp: 9 long_description = fp.read() 10 11 packages = find_packages("src") 12 if sys.version_info < (3, 6): 13 packages = [p for p in packages if not p.startswith("scout_apm.async_")] 14 15 compile_extensions = ( 16 # Python 3+ 17 sys.version_info >= (3,) 18 # Not Jython 19 and not sys.platform.startswith("java") 20 # Not PyPy 21 and "__pypy__" not in sys.builtin_module_names 22 ) 23 if compile_extensions: 24 ext_modules = [ 25 Extension( 26 str("scout_apm.core._objtrace"), [str("src/scout_apm/core/_objtrace.c")] 27 ) 28 ] 29 else: 30 ext_modules = [] 31 32 setup( 33 name="scout_apm", 34 version="2.11.0", 35 description="Scout Application Performance Monitoring Agent", 36 long_description=long_description, 37 long_description_content_type="text/markdown", 38 url="https://github.com/scoutapp/scout_apm_python", 39 project_urls={ 40 "Documentation": "https://docs.scoutapm.com/#python-agent", 41 "Changelog": ( 42 "https://github.com/scoutapp/scout_apm_python/blob/master/CHANGELOG.md" 43 ), 44 }, 45 author="Scout", 46 author_email="[email protected]", 47 license="MIT", 48 zip_safe=False, 49 python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4", 50 packages=packages, 51 package_dir={str(""): str("src")}, 52 ext_modules=ext_modules, 53 entry_points={ 54 "console_scripts": [ 55 "core-agent-manager = scout_apm.core.cli.core_agent_manager:main" 56 ] 57 }, 58 install_requires=[ 59 'asgiref ; python_version >= "3.5"', 60 'importlib-metadata ; python_version < "3.8"', 61 "psutil>=5,<6", 62 'urllib3[secure] < 1.25 ; python_version < "3.5"', 63 'urllib3[secure] < 2 ; python_version >= "3.5"', 64 "wrapt>=1.10,<2.0", 65 ], 66 keywords="apm performance monitoring development", 67 classifiers=[ 68 "Development Status :: 5 - Production/Stable", 69 "Framework :: Bottle", 70 "Framework :: Django", 71 "Framework :: Django :: 1.8", 72 "Framework :: Django :: 1.9", 73 "Framework :: Django :: 1.10", 74 "Framework :: Django :: 1.11", 75 "Framework :: Django :: 2.0", 76 "Framework :: Django :: 2.1", 77 "Framework :: Django :: 2.2", 78 "Framework :: Django :: 3.0", 79 "Framework :: Flask", 80 "Framework :: Pyramid", 81 "Intended Audience :: Developers", 82 "Topic :: System :: Monitoring", 83 "License :: OSI Approved :: MIT License", 84 "Operating System :: MacOS", 85 "Operating System :: POSIX", 86 "Operating System :: POSIX :: Linux", 87 "Programming Language :: Python :: 2", 88 "Programming Language :: Python :: 2.7", 89 "Programming Language :: Python :: 3", 90 "Programming Language :: Python :: 3.4", 91 "Programming Language :: Python :: 3.5", 92 "Programming Language :: Python :: 3.6", 93 "Programming Language :: Python :: 3.7", 94 "Programming Language :: Python :: 3.8", 95 ], 96 ) 97 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -23,7 +23,9 @@ if compile_extensions: ext_modules = [ Extension( - str("scout_apm.core._objtrace"), [str("src/scout_apm/core/_objtrace.c")] + name=str("scout_apm.core._objtrace"), + sources=[str("src/scout_apm/core/_objtrace.c")], + optional=True, ) ] else:
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -23,7 +23,9 @@\n if compile_extensions:\n ext_modules = [\n Extension(\n- str(\"scout_apm.core._objtrace\"), [str(\"src/scout_apm/core/_objtrace.c\")]\n+ name=str(\"scout_apm.core._objtrace\"),\n+ sources=[str(\"src/scout_apm/core/_objtrace.c\")],\n+ optional=True,\n )\n ]\n else:\n", "issue": "Installation seems to be broken on python3.6.4\n<img width=\"1125\" alt=\"Screen Shot 2020-02-26 at 12 31 00 PM\" src=\"https://user-images.githubusercontent.com/17484350/75380353-e2224900-58a4-11ea-96b3-2629b94c7107.png\">\r\n\n", "before_files": [{"content": "# coding=utf-8\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport sys\n\nfrom setuptools import Extension, find_packages, setup\n\nwith open(\"README.md\", \"r\") as fp:\n long_description = fp.read()\n\npackages = find_packages(\"src\")\nif sys.version_info < (3, 6):\n packages = [p for p in packages if not p.startswith(\"scout_apm.async_\")]\n\ncompile_extensions = (\n # Python 3+\n sys.version_info >= (3,)\n # Not Jython\n and not sys.platform.startswith(\"java\")\n # Not PyPy\n and \"__pypy__\" not in sys.builtin_module_names\n)\nif compile_extensions:\n ext_modules = [\n Extension(\n str(\"scout_apm.core._objtrace\"), [str(\"src/scout_apm/core/_objtrace.c\")]\n )\n ]\nelse:\n ext_modules = []\n\nsetup(\n name=\"scout_apm\",\n version=\"2.11.0\",\n description=\"Scout Application Performance Monitoring Agent\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/scoutapp/scout_apm_python\",\n project_urls={\n \"Documentation\": \"https://docs.scoutapm.com/#python-agent\",\n \"Changelog\": (\n \"https://github.com/scoutapp/scout_apm_python/blob/master/CHANGELOG.md\"\n ),\n },\n author=\"Scout\",\n author_email=\"[email protected]\",\n license=\"MIT\",\n zip_safe=False,\n python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4\",\n packages=packages,\n package_dir={str(\"\"): str(\"src\")},\n ext_modules=ext_modules,\n entry_points={\n \"console_scripts\": [\n \"core-agent-manager = scout_apm.core.cli.core_agent_manager:main\"\n ]\n },\n install_requires=[\n 'asgiref ; python_version >= \"3.5\"',\n 'importlib-metadata ; python_version < \"3.8\"',\n \"psutil>=5,<6\",\n 'urllib3[secure] < 1.25 ; python_version < \"3.5\"',\n 'urllib3[secure] < 2 ; python_version >= \"3.5\"',\n \"wrapt>=1.10,<2.0\",\n ],\n keywords=\"apm performance monitoring development\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Framework :: Bottle\",\n \"Framework :: Django\",\n \"Framework :: Django :: 1.8\",\n \"Framework :: Django :: 1.9\",\n \"Framework :: Django :: 1.10\",\n \"Framework :: Django :: 1.11\",\n \"Framework :: Django :: 2.0\",\n \"Framework :: Django :: 2.1\",\n \"Framework :: Django :: 2.2\",\n \"Framework :: Django :: 3.0\",\n \"Framework :: Flask\",\n \"Framework :: Pyramid\",\n \"Intended Audience :: Developers\",\n \"Topic :: System :: Monitoring\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: MacOS\",\n \"Operating System :: POSIX\",\n \"Operating System :: POSIX :: Linux\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n ],\n)\n", "path": "setup.py"}], "after_files": [{"content": "# coding=utf-8\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport sys\n\nfrom setuptools import Extension, find_packages, setup\n\nwith open(\"README.md\", \"r\") as fp:\n long_description = fp.read()\n\npackages = find_packages(\"src\")\nif sys.version_info < (3, 6):\n packages = [p for p in packages if not p.startswith(\"scout_apm.async_\")]\n\ncompile_extensions = (\n # Python 3+\n sys.version_info >= (3,)\n # Not Jython\n and not sys.platform.startswith(\"java\")\n # Not PyPy\n and \"__pypy__\" not in sys.builtin_module_names\n)\nif compile_extensions:\n ext_modules = [\n Extension(\n name=str(\"scout_apm.core._objtrace\"),\n sources=[str(\"src/scout_apm/core/_objtrace.c\")],\n optional=True,\n )\n ]\nelse:\n ext_modules = []\n\nsetup(\n name=\"scout_apm\",\n version=\"2.11.0\",\n description=\"Scout Application Performance Monitoring Agent\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/scoutapp/scout_apm_python\",\n project_urls={\n \"Documentation\": \"https://docs.scoutapm.com/#python-agent\",\n \"Changelog\": (\n \"https://github.com/scoutapp/scout_apm_python/blob/master/CHANGELOG.md\"\n ),\n },\n author=\"Scout\",\n author_email=\"[email protected]\",\n license=\"MIT\",\n zip_safe=False,\n python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4\",\n packages=packages,\n package_dir={str(\"\"): str(\"src\")},\n ext_modules=ext_modules,\n entry_points={\n \"console_scripts\": [\n \"core-agent-manager = scout_apm.core.cli.core_agent_manager:main\"\n ]\n },\n install_requires=[\n 'asgiref ; python_version >= \"3.5\"',\n 'importlib-metadata ; python_version < \"3.8\"',\n \"psutil>=5,<6\",\n 'urllib3[secure] < 1.25 ; python_version < \"3.5\"',\n 'urllib3[secure] < 2 ; python_version >= \"3.5\"',\n \"wrapt>=1.10,<2.0\",\n ],\n keywords=\"apm performance monitoring development\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Framework :: Bottle\",\n \"Framework :: Django\",\n \"Framework :: Django :: 1.8\",\n \"Framework :: Django :: 1.9\",\n \"Framework :: Django :: 1.10\",\n \"Framework :: Django :: 1.11\",\n \"Framework :: Django :: 2.0\",\n \"Framework :: Django :: 2.1\",\n \"Framework :: Django :: 2.2\",\n \"Framework :: Django :: 3.0\",\n \"Framework :: Flask\",\n \"Framework :: Pyramid\",\n \"Intended Audience :: Developers\",\n \"Topic :: System :: Monitoring\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: MacOS\",\n \"Operating System :: POSIX\",\n \"Operating System :: POSIX :: Linux\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n ],\n)\n", "path": "setup.py"}]}
1,369
114
gh_patches_debug_19929
rasdani/github-patches
git_diff
OpenEnergyPlatform__oeplatform-1173
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- clear sanbox command doesnot remove tables in `_sandbox` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `dataedit/management/commands/clear_sandbox.py` Content: ``` 1 from typing import List 2 3 import sqlalchemy as sqla 4 from django.core.management.base import BaseCommand 5 6 from api.connection import _get_engine 7 from dataedit.models import Table 8 from oeplatform.securitysettings import PLAYGROUNDS 9 10 SANDBOX_SCHEMA = "sandbox" 11 assert SANDBOX_SCHEMA in PLAYGROUNDS, f"{SANDBOX_SCHEMA} not in playground schemas" 12 13 14 def get_sandbox_tables_django() -> List[Table]: 15 """ 16 Returns: 17 List[Table]: list of table objects in django db in sandbox schema 18 """ 19 return Table.objects.filter(schema__name=SANDBOX_SCHEMA).all() 20 21 22 def get_sandbox_table_names_oedb() -> List[str]: 23 """ 24 Returns: 25 List[str]: list of table names in oedb in sandbox schema 26 """ 27 engine = _get_engine() 28 return sqla.inspect(engine).get_table_names(schema=SANDBOX_SCHEMA) 29 30 31 def clear_sandbox(output: bool = False) -> None: 32 """delete all tables from the sandbox schema. 33 34 Maybe we should use the API (not just django objects) 35 so all the other actions like deleting the meta tables 36 are also performed properly 37 38 For now, we delete tables in oedb and django individually 39 40 !!! DANGER ZONE !!! MAKE SURE YOU KNOW WHAT YOU ARE DOING! 41 42 43 Args: 44 output: if True, print actions 45 46 """ 47 48 # delete all from oedb 49 engine = _get_engine() 50 for table_name in get_sandbox_table_names_oedb(): 51 sql = f'DROP TABLE "{SANDBOX_SCHEMA}"."{table_name}" CASCADE;' 52 if output: 53 print(f"oedb: {sql}") 54 engine.execute(sql) 55 56 # delete all from django 57 for table in get_sandbox_tables_django(): 58 if output: 59 print(f"django: delete {table.schema.name}.{table.name}") 60 table.delete() 61 62 63 class Command(BaseCommand): 64 def handle(self, *args, **options): 65 # ask for confirmation 66 answ = input(f"Delete all tables from {SANDBOX_SCHEMA} [y|n]: ") 67 if not answ == "y": 68 print("Abort") 69 return 70 71 clear_sandbox(output=True) 72 73 print("Done") 74 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/dataedit/management/commands/clear_sandbox.py b/dataedit/management/commands/clear_sandbox.py --- a/dataedit/management/commands/clear_sandbox.py +++ b/dataedit/management/commands/clear_sandbox.py @@ -28,6 +28,15 @@ return sqla.inspect(engine).get_table_names(schema=SANDBOX_SCHEMA) +def get_sandbox_meta_table_names_oedb() -> List[str]: + """ + Returns: + List[str]: list of table names in oedb in sandbox meta schema + """ + engine = _get_engine() + return sqla.inspect(engine).get_table_names(schema="_" + SANDBOX_SCHEMA) + + def clear_sandbox(output: bool = False) -> None: """delete all tables from the sandbox schema. @@ -53,6 +62,12 @@ print(f"oedb: {sql}") engine.execute(sql) + for table_name in get_sandbox_meta_table_names_oedb(): + sql = f'DROP TABLE "_{SANDBOX_SCHEMA}"."{table_name}" CASCADE;' + if output: + print(f"oedb: {sql}") + engine.execute(sql) + # delete all from django for table in get_sandbox_tables_django(): if output:
{"golden_diff": "diff --git a/dataedit/management/commands/clear_sandbox.py b/dataedit/management/commands/clear_sandbox.py\n--- a/dataedit/management/commands/clear_sandbox.py\n+++ b/dataedit/management/commands/clear_sandbox.py\n@@ -28,6 +28,15 @@\n return sqla.inspect(engine).get_table_names(schema=SANDBOX_SCHEMA)\n \n \n+def get_sandbox_meta_table_names_oedb() -> List[str]:\n+ \"\"\"\n+ Returns:\n+ List[str]: list of table names in oedb in sandbox meta schema\n+ \"\"\"\n+ engine = _get_engine()\n+ return sqla.inspect(engine).get_table_names(schema=\"_\" + SANDBOX_SCHEMA)\n+\n+\n def clear_sandbox(output: bool = False) -> None:\n \"\"\"delete all tables from the sandbox schema.\n \n@@ -53,6 +62,12 @@\n print(f\"oedb: {sql}\")\n engine.execute(sql)\n \n+ for table_name in get_sandbox_meta_table_names_oedb():\n+ sql = f'DROP TABLE \"_{SANDBOX_SCHEMA}\".\"{table_name}\" CASCADE;'\n+ if output:\n+ print(f\"oedb: {sql}\")\n+ engine.execute(sql)\n+\n # delete all from django\n for table in get_sandbox_tables_django():\n if output:\n", "issue": "clear sanbox command doesnot remove tables in `_sandbox`\n\n", "before_files": [{"content": "from typing import List\n\nimport sqlalchemy as sqla\nfrom django.core.management.base import BaseCommand\n\nfrom api.connection import _get_engine\nfrom dataedit.models import Table\nfrom oeplatform.securitysettings import PLAYGROUNDS\n\nSANDBOX_SCHEMA = \"sandbox\"\nassert SANDBOX_SCHEMA in PLAYGROUNDS, f\"{SANDBOX_SCHEMA} not in playground schemas\"\n\n\ndef get_sandbox_tables_django() -> List[Table]:\n \"\"\"\n Returns:\n List[Table]: list of table objects in django db in sandbox schema\n \"\"\"\n return Table.objects.filter(schema__name=SANDBOX_SCHEMA).all()\n\n\ndef get_sandbox_table_names_oedb() -> List[str]:\n \"\"\"\n Returns:\n List[str]: list of table names in oedb in sandbox schema\n \"\"\"\n engine = _get_engine()\n return sqla.inspect(engine).get_table_names(schema=SANDBOX_SCHEMA)\n\n\ndef clear_sandbox(output: bool = False) -> None:\n \"\"\"delete all tables from the sandbox schema.\n\n Maybe we should use the API (not just django objects)\n so all the other actions like deleting the meta tables\n are also performed properly\n\n For now, we delete tables in oedb and django individually\n\n !!! DANGER ZONE !!! MAKE SURE YOU KNOW WHAT YOU ARE DOING!\n\n\n Args:\n output: if True, print actions\n\n \"\"\"\n\n # delete all from oedb\n engine = _get_engine()\n for table_name in get_sandbox_table_names_oedb():\n sql = f'DROP TABLE \"{SANDBOX_SCHEMA}\".\"{table_name}\" CASCADE;'\n if output:\n print(f\"oedb: {sql}\")\n engine.execute(sql)\n\n # delete all from django\n for table in get_sandbox_tables_django():\n if output:\n print(f\"django: delete {table.schema.name}.{table.name}\")\n table.delete()\n\n\nclass Command(BaseCommand):\n def handle(self, *args, **options):\n # ask for confirmation\n answ = input(f\"Delete all tables from {SANDBOX_SCHEMA} [y|n]: \")\n if not answ == \"y\":\n print(\"Abort\")\n return\n\n clear_sandbox(output=True)\n\n print(\"Done\")\n", "path": "dataedit/management/commands/clear_sandbox.py"}], "after_files": [{"content": "from typing import List\n\nimport sqlalchemy as sqla\nfrom django.core.management.base import BaseCommand\n\nfrom api.connection import _get_engine\nfrom dataedit.models import Table\nfrom oeplatform.securitysettings import PLAYGROUNDS\n\nSANDBOX_SCHEMA = \"sandbox\"\nassert SANDBOX_SCHEMA in PLAYGROUNDS, f\"{SANDBOX_SCHEMA} not in playground schemas\"\n\n\ndef get_sandbox_tables_django() -> List[Table]:\n \"\"\"\n Returns:\n List[Table]: list of table objects in django db in sandbox schema\n \"\"\"\n return Table.objects.filter(schema__name=SANDBOX_SCHEMA).all()\n\n\ndef get_sandbox_table_names_oedb() -> List[str]:\n \"\"\"\n Returns:\n List[str]: list of table names in oedb in sandbox schema\n \"\"\"\n engine = _get_engine()\n return sqla.inspect(engine).get_table_names(schema=SANDBOX_SCHEMA)\n\n\ndef get_sandbox_meta_table_names_oedb() -> List[str]:\n \"\"\"\n Returns:\n List[str]: list of table names in oedb in sandbox meta schema\n \"\"\"\n engine = _get_engine()\n return sqla.inspect(engine).get_table_names(schema=\"_\" + SANDBOX_SCHEMA)\n\n\ndef clear_sandbox(output: bool = False) -> None:\n \"\"\"delete all tables from the sandbox schema.\n\n Maybe we should use the API (not just django objects)\n so all the other actions like deleting the meta tables\n are also performed properly\n\n For now, we delete tables in oedb and django individually\n\n !!! DANGER ZONE !!! MAKE SURE YOU KNOW WHAT YOU ARE DOING!\n\n\n Args:\n output: if True, print actions\n\n \"\"\"\n\n # delete all from oedb\n engine = _get_engine()\n for table_name in get_sandbox_table_names_oedb():\n sql = f'DROP TABLE \"{SANDBOX_SCHEMA}\".\"{table_name}\" CASCADE;'\n if output:\n print(f\"oedb: {sql}\")\n engine.execute(sql)\n\n for table_name in get_sandbox_meta_table_names_oedb():\n sql = f'DROP TABLE \"_{SANDBOX_SCHEMA}\".\"{table_name}\" CASCADE;'\n if output:\n print(f\"oedb: {sql}\")\n engine.execute(sql)\n\n # delete all from django\n for table in get_sandbox_tables_django():\n if output:\n print(f\"django: delete {table.schema.name}.{table.name}\")\n table.delete()\n\n\nclass Command(BaseCommand):\n def handle(self, *args, **options):\n # ask for confirmation\n answ = input(f\"Delete all tables from {SANDBOX_SCHEMA} [y|n]: \")\n if not answ == \"y\":\n print(\"Abort\")\n return\n\n clear_sandbox(output=True)\n\n print(\"Done\")\n", "path": "dataedit/management/commands/clear_sandbox.py"}]}
910
294
gh_patches_debug_26574
rasdani/github-patches
git_diff
streamlink__streamlink-3205
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- cdnbg can't open new BNT links ## Bug Report - [x] This is a bug report and I have read the contribution guidelines. ### Description There have been changes to the bnt.bg live channel links, which have made them unrecognizable by the cdnbg plugin. **Note:** Streamlink can still open these links, which are now hidden away in a small part of the website and are not protected by an SSL certificate: ``` http://tv.bnt.bg/bnt1 http://tv.bnt.bg/bnt2 http://tv.bnt.bg/bnt3 http://tv.bnt.bg/bnt4 ``` **Other plugin issues:** 1. https://mmtvmusic.com/live/ has moved away to another service provider and hence can be deleted from cdnbg. Can't be opened with anything else atm. 2. https://chernomore.bg/ can be removed - the owner of the media group closed down the newspaper and television and converted the website into an information agency. ### Expected / Actual behavior When I input them through CLI, they should open. ### Reproduction steps / Explicit stream URLs to test ``` streamlink https://bnt.bg/live best streamlink https://bnt.bg/live/bnt1 best streamlink https://bnt.bg/live/bnt2 best streamlink https://bnt.bg/live/bnt3 best streamlink https://bnt.bg/live/bnt4 best ``` ### Log output ``` C:\Users\XXXX> streamlink https://bnt.bg/live/bnt1 best --loglevel debug [cli][debug] OS: Windows 7 [cli][debug] Python: 3.6.6 [cli][debug] Streamlink: 1.6.0 [cli][debug] Requests(2.24.0), Socks(1.7.1), Websocket(0.57.0) error: No plugin can handle URL: https://bnt.bg/live/bnt1 ``` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `src/streamlink/plugins/cdnbg.py` Content: ``` 1 import logging 2 import re 3 4 from streamlink.compat import urlparse 5 from streamlink.plugin import Plugin 6 from streamlink.plugin.api import useragents 7 from streamlink.plugin.api import validate 8 from streamlink.stream import HLSStream 9 from streamlink.utils import update_scheme 10 11 log = logging.getLogger(__name__) 12 13 14 class CDNBG(Plugin): 15 url_re = re.compile(r""" 16 https?://(?:www\.)?(?: 17 tv\.bnt\.bg/\w+(?:/\w+)?| 18 nova\.bg/live| 19 bgonair\.bg/tvonline| 20 mmtvmusic\.com/live| 21 mu-vi\.tv/LiveStreams/pages/Live\.aspx| 22 live\.bstv\.bg| 23 bloombergtv.bg/video| 24 armymedia.bg| 25 chernomore.bg| 26 i.cdn.bg/live/ 27 )/? 28 """, re.VERBOSE) 29 iframe_re = re.compile(r"iframe .*?src=\"((?:https?(?::|&#58;))?//(?:\w+\.)?cdn.bg/live[^\"]+)\"", re.DOTALL) 30 sdata_re = re.compile(r"sdata\.src.*?=.*?(?P<q>[\"'])(?P<url>http.*?)(?P=q)") 31 hls_file_re = re.compile(r"(src|file): (?P<q>[\"'])(?P<url>(https?:)?//.+?m3u8.*?)(?P=q)") 32 hls_src_re = re.compile(r"video src=(?P<url>http[^ ]+m3u8[^ ]*)") 33 34 stream_schema = validate.Schema( 35 validate.any( 36 validate.all(validate.transform(sdata_re.search), validate.get("url")), 37 validate.all(validate.transform(hls_file_re.search), validate.get("url")), 38 validate.all(validate.transform(hls_src_re.search), validate.get("url")), 39 ) 40 ) 41 42 @classmethod 43 def can_handle_url(cls, url): 44 return cls.url_re.match(url) is not None 45 46 def find_iframe(self, url): 47 self.session.http.headers.update({"User-Agent": useragents.CHROME}) 48 res = self.session.http.get(self.url) 49 p = urlparse(url) 50 for iframe_url in self.iframe_re.findall(res.text): 51 if "googletagmanager" not in iframe_url: 52 log.debug("Found iframe: {0}", iframe_url) 53 iframe_url = iframe_url.replace("&#58;", ":") 54 if iframe_url.startswith("//"): 55 return "{0}:{1}".format(p.scheme, iframe_url) 56 else: 57 return iframe_url 58 59 def _get_streams(self): 60 if "i.cdn.bg/live/" in self.url: 61 iframe_url = self.url 62 else: 63 iframe_url = self.find_iframe(self.url) 64 65 if iframe_url: 66 res = self.session.http.get(iframe_url, headers={"Referer": self.url}) 67 stream_url = update_scheme(self.url, self.stream_schema.validate(res.text)) 68 log.warning("SSL Verification disabled.") 69 return HLSStream.parse_variant_playlist(self.session, 70 stream_url, 71 verify=False) 72 73 74 __plugin__ = CDNBG 75 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/src/streamlink/plugins/cdnbg.py b/src/streamlink/plugins/cdnbg.py --- a/src/streamlink/plugins/cdnbg.py +++ b/src/streamlink/plugins/cdnbg.py @@ -14,16 +14,14 @@ class CDNBG(Plugin): url_re = re.compile(r""" https?://(?:www\.)?(?: - tv\.bnt\.bg/\w+(?:/\w+)?| - nova\.bg/live| + armymedia\.bg| bgonair\.bg/tvonline| - mmtvmusic\.com/live| - mu-vi\.tv/LiveStreams/pages/Live\.aspx| + bloombergtv\.bg/video| + (?:tv\.)?bnt\.bg/\w+(?:/\w+)?| live\.bstv\.bg| - bloombergtv.bg/video| - armymedia.bg| - chernomore.bg| - i.cdn.bg/live/ + i\.cdn\.bg/live/| + nova\.bg/live| + mu-vi\.tv/LiveStreams/pages/Live\.aspx )/? """, re.VERBOSE) iframe_re = re.compile(r"iframe .*?src=\"((?:https?(?::|&#58;))?//(?:\w+\.)?cdn.bg/live[^\"]+)\"", re.DOTALL) @@ -52,7 +50,7 @@ log.debug("Found iframe: {0}", iframe_url) iframe_url = iframe_url.replace("&#58;", ":") if iframe_url.startswith("//"): - return "{0}:{1}".format(p.scheme, iframe_url) + return update_scheme(p.scheme, iframe_url) else: return iframe_url
{"golden_diff": "diff --git a/src/streamlink/plugins/cdnbg.py b/src/streamlink/plugins/cdnbg.py\n--- a/src/streamlink/plugins/cdnbg.py\n+++ b/src/streamlink/plugins/cdnbg.py\n@@ -14,16 +14,14 @@\n class CDNBG(Plugin):\n url_re = re.compile(r\"\"\"\n https?://(?:www\\.)?(?:\n- tv\\.bnt\\.bg/\\w+(?:/\\w+)?|\n- nova\\.bg/live|\n+ armymedia\\.bg|\n bgonair\\.bg/tvonline|\n- mmtvmusic\\.com/live|\n- mu-vi\\.tv/LiveStreams/pages/Live\\.aspx|\n+ bloombergtv\\.bg/video|\n+ (?:tv\\.)?bnt\\.bg/\\w+(?:/\\w+)?|\n live\\.bstv\\.bg|\n- bloombergtv.bg/video|\n- armymedia.bg|\n- chernomore.bg|\n- i.cdn.bg/live/\n+ i\\.cdn\\.bg/live/|\n+ nova\\.bg/live|\n+ mu-vi\\.tv/LiveStreams/pages/Live\\.aspx\n )/?\n \"\"\", re.VERBOSE)\n iframe_re = re.compile(r\"iframe .*?src=\\\"((?:https?(?::|&#58;))?//(?:\\w+\\.)?cdn.bg/live[^\\\"]+)\\\"\", re.DOTALL)\n@@ -52,7 +50,7 @@\n log.debug(\"Found iframe: {0}\", iframe_url)\n iframe_url = iframe_url.replace(\"&#58;\", \":\")\n if iframe_url.startswith(\"//\"):\n- return \"{0}:{1}\".format(p.scheme, iframe_url)\n+ return update_scheme(p.scheme, iframe_url)\n else:\n return iframe_url\n", "issue": "cdnbg can't open new BNT links\n## Bug Report\r\n- [x] This is a bug report and I have read the contribution guidelines.\r\n\r\n\r\n### Description\r\nThere have been changes to the bnt.bg live channel links, which have made them unrecognizable by the cdnbg plugin.\r\n**Note:** Streamlink can still open these links, which are now hidden away in a small part of the website and are not protected by an SSL certificate:\r\n```\r\nhttp://tv.bnt.bg/bnt1\r\nhttp://tv.bnt.bg/bnt2\r\nhttp://tv.bnt.bg/bnt3\r\nhttp://tv.bnt.bg/bnt4\r\n```\r\n\r\n**Other plugin issues:**\r\n1. https://mmtvmusic.com/live/ has moved away to another service provider and hence can be deleted from cdnbg. Can't be opened with anything else atm.\r\n2. https://chernomore.bg/ can be removed - the owner of the media group closed down the newspaper and television and converted the website into an information agency.\r\n### Expected / Actual behavior\r\nWhen I input them through CLI, they should open.\r\n\r\n\r\n### Reproduction steps / Explicit stream URLs to test\r\n```\r\nstreamlink https://bnt.bg/live best\r\nstreamlink https://bnt.bg/live/bnt1 best\r\nstreamlink https://bnt.bg/live/bnt2 best\r\nstreamlink https://bnt.bg/live/bnt3 best\r\nstreamlink https://bnt.bg/live/bnt4 best\r\n```\r\n\r\n\r\n### Log output\r\n```\r\nC:\\Users\\XXXX> streamlink https://bnt.bg/live/bnt1 best --loglevel debug\r\n[cli][debug] OS: Windows 7\r\n[cli][debug] Python: 3.6.6\r\n[cli][debug] Streamlink: 1.6.0\r\n[cli][debug] Requests(2.24.0), Socks(1.7.1), Websocket(0.57.0)\r\nerror: No plugin can handle URL: https://bnt.bg/live/bnt1\r\n```\n", "before_files": [{"content": "import logging\nimport re\n\nfrom streamlink.compat import urlparse\nfrom streamlink.plugin import Plugin\nfrom streamlink.plugin.api import useragents\nfrom streamlink.plugin.api import validate\nfrom streamlink.stream import HLSStream\nfrom streamlink.utils import update_scheme\n\nlog = logging.getLogger(__name__)\n\n\nclass CDNBG(Plugin):\n url_re = re.compile(r\"\"\"\n https?://(?:www\\.)?(?:\n tv\\.bnt\\.bg/\\w+(?:/\\w+)?|\n nova\\.bg/live|\n bgonair\\.bg/tvonline|\n mmtvmusic\\.com/live|\n mu-vi\\.tv/LiveStreams/pages/Live\\.aspx|\n live\\.bstv\\.bg|\n bloombergtv.bg/video|\n armymedia.bg|\n chernomore.bg|\n i.cdn.bg/live/\n )/?\n \"\"\", re.VERBOSE)\n iframe_re = re.compile(r\"iframe .*?src=\\\"((?:https?(?::|&#58;))?//(?:\\w+\\.)?cdn.bg/live[^\\\"]+)\\\"\", re.DOTALL)\n sdata_re = re.compile(r\"sdata\\.src.*?=.*?(?P<q>[\\\"'])(?P<url>http.*?)(?P=q)\")\n hls_file_re = re.compile(r\"(src|file): (?P<q>[\\\"'])(?P<url>(https?:)?//.+?m3u8.*?)(?P=q)\")\n hls_src_re = re.compile(r\"video src=(?P<url>http[^ ]+m3u8[^ ]*)\")\n\n stream_schema = validate.Schema(\n validate.any(\n validate.all(validate.transform(sdata_re.search), validate.get(\"url\")),\n validate.all(validate.transform(hls_file_re.search), validate.get(\"url\")),\n validate.all(validate.transform(hls_src_re.search), validate.get(\"url\")),\n )\n )\n\n @classmethod\n def can_handle_url(cls, url):\n return cls.url_re.match(url) is not None\n\n def find_iframe(self, url):\n self.session.http.headers.update({\"User-Agent\": useragents.CHROME})\n res = self.session.http.get(self.url)\n p = urlparse(url)\n for iframe_url in self.iframe_re.findall(res.text):\n if \"googletagmanager\" not in iframe_url:\n log.debug(\"Found iframe: {0}\", iframe_url)\n iframe_url = iframe_url.replace(\"&#58;\", \":\")\n if iframe_url.startswith(\"//\"):\n return \"{0}:{1}\".format(p.scheme, iframe_url)\n else:\n return iframe_url\n\n def _get_streams(self):\n if \"i.cdn.bg/live/\" in self.url:\n iframe_url = self.url\n else:\n iframe_url = self.find_iframe(self.url)\n\n if iframe_url:\n res = self.session.http.get(iframe_url, headers={\"Referer\": self.url})\n stream_url = update_scheme(self.url, self.stream_schema.validate(res.text))\n log.warning(\"SSL Verification disabled.\")\n return HLSStream.parse_variant_playlist(self.session,\n stream_url,\n verify=False)\n\n\n__plugin__ = CDNBG\n", "path": "src/streamlink/plugins/cdnbg.py"}], "after_files": [{"content": "import logging\nimport re\n\nfrom streamlink.compat import urlparse\nfrom streamlink.plugin import Plugin\nfrom streamlink.plugin.api import useragents\nfrom streamlink.plugin.api import validate\nfrom streamlink.stream import HLSStream\nfrom streamlink.utils import update_scheme\n\nlog = logging.getLogger(__name__)\n\n\nclass CDNBG(Plugin):\n url_re = re.compile(r\"\"\"\n https?://(?:www\\.)?(?:\n armymedia\\.bg|\n bgonair\\.bg/tvonline|\n bloombergtv\\.bg/video|\n (?:tv\\.)?bnt\\.bg/\\w+(?:/\\w+)?|\n live\\.bstv\\.bg|\n i\\.cdn\\.bg/live/|\n nova\\.bg/live|\n mu-vi\\.tv/LiveStreams/pages/Live\\.aspx\n )/?\n \"\"\", re.VERBOSE)\n iframe_re = re.compile(r\"iframe .*?src=\\\"((?:https?(?::|&#58;))?//(?:\\w+\\.)?cdn.bg/live[^\\\"]+)\\\"\", re.DOTALL)\n sdata_re = re.compile(r\"sdata\\.src.*?=.*?(?P<q>[\\\"'])(?P<url>http.*?)(?P=q)\")\n hls_file_re = re.compile(r\"(src|file): (?P<q>[\\\"'])(?P<url>(https?:)?//.+?m3u8.*?)(?P=q)\")\n hls_src_re = re.compile(r\"video src=(?P<url>http[^ ]+m3u8[^ ]*)\")\n\n stream_schema = validate.Schema(\n validate.any(\n validate.all(validate.transform(sdata_re.search), validate.get(\"url\")),\n validate.all(validate.transform(hls_file_re.search), validate.get(\"url\")),\n validate.all(validate.transform(hls_src_re.search), validate.get(\"url\")),\n )\n )\n\n @classmethod\n def can_handle_url(cls, url):\n return cls.url_re.match(url) is not None\n\n def find_iframe(self, url):\n self.session.http.headers.update({\"User-Agent\": useragents.CHROME})\n res = self.session.http.get(self.url)\n p = urlparse(url)\n for iframe_url in self.iframe_re.findall(res.text):\n if \"googletagmanager\" not in iframe_url:\n log.debug(\"Found iframe: {0}\", iframe_url)\n iframe_url = iframe_url.replace(\"&#58;\", \":\")\n if iframe_url.startswith(\"//\"):\n return update_scheme(p.scheme, iframe_url)\n else:\n return iframe_url\n\n def _get_streams(self):\n if \"i.cdn.bg/live/\" in self.url:\n iframe_url = self.url\n else:\n iframe_url = self.find_iframe(self.url)\n\n if iframe_url:\n res = self.session.http.get(iframe_url, headers={\"Referer\": self.url})\n stream_url = update_scheme(self.url, self.stream_schema.validate(res.text))\n log.warning(\"SSL Verification disabled.\")\n return HLSStream.parse_variant_playlist(self.session,\n stream_url,\n verify=False)\n\n\n__plugin__ = CDNBG\n", "path": "src/streamlink/plugins/cdnbg.py"}]}
1,517
393
gh_patches_debug_12532
rasdani/github-patches
git_diff
explosion__spaCy-866
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- 💫 Lemmatizer should apply rules on OOV words @juanmirocks points out in #327 that the lemmatizer fails on OOV words: ```python >>> nlp.vocab.morphology.lemmatizer(u'endosomes', 'noun', morphology={'number': 'plur'})set([u'endosomes']) >>> nlp.vocab.morphology.lemmatizer(u'chromosomes', 'noun', morphology={'number': 'plur'}) set([u'chromosome']) ``` Suggested patch to lemmatizer.py ```python oov_forms = [] for old, new in rules: if string.endswith(old): form = string[:len(string) - len(old)] + new if form in index or not form.isalpha(): forms.append(form) else: oov_forms.append(form) if not forms: forms.extend(oov_forms) ``` ## Your Environment <!-- Include details of your environment --> * Operating System: * Python Version Used: * spaCy Version Used: * Environment Information: --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `spacy/lemmatizer.py` Content: ``` 1 from __future__ import unicode_literals, print_function 2 import codecs 3 import pathlib 4 5 import ujson as json 6 7 from .symbols import POS, NOUN, VERB, ADJ, PUNCT 8 9 10 class Lemmatizer(object): 11 @classmethod 12 def load(cls, path, rules=None): 13 index = {} 14 exc = {} 15 for pos in ['adj', 'noun', 'verb']: 16 pos_index_path = path / 'wordnet' / 'index.{pos}'.format(pos=pos) 17 if pos_index_path.exists(): 18 with pos_index_path.open() as file_: 19 index[pos] = read_index(file_) 20 else: 21 index[pos] = set() 22 pos_exc_path = path / 'wordnet' / '{pos}.exc'.format(pos=pos) 23 if pos_exc_path.exists(): 24 with pos_exc_path.open() as file_: 25 exc[pos] = read_exc(file_) 26 else: 27 exc[pos] = {} 28 if rules is None and (path / 'vocab' / 'lemma_rules.json').exists(): 29 with (path / 'vocab' / 'lemma_rules.json').open('r', encoding='utf8') as file_: 30 rules = json.load(file_) 31 elif rules is None: 32 rules = {} 33 return cls(index, exc, rules) 34 35 def __init__(self, index, exceptions, rules): 36 self.index = index 37 self.exc = exceptions 38 self.rules = rules 39 40 def __call__(self, string, univ_pos, morphology=None): 41 if univ_pos == NOUN: 42 univ_pos = 'noun' 43 elif univ_pos == VERB: 44 univ_pos = 'verb' 45 elif univ_pos == ADJ: 46 univ_pos = 'adj' 47 elif univ_pos == PUNCT: 48 univ_pos = 'punct' 49 # See Issue #435 for example of where this logic is requied. 50 if self.is_base_form(univ_pos, morphology): 51 return set([string.lower()]) 52 lemmas = lemmatize(string, self.index.get(univ_pos, {}), 53 self.exc.get(univ_pos, {}), 54 self.rules.get(univ_pos, [])) 55 return lemmas 56 57 def is_base_form(self, univ_pos, morphology=None): 58 '''Check whether we're dealing with an uninflected paradigm, so we can 59 avoid lemmatization entirely.''' 60 morphology = {} if morphology is None else morphology 61 others = [key for key in morphology if key not in (POS, 'number', 'pos', 'verbform')] 62 if univ_pos == 'noun' and morphology.get('number') == 'sing' and not others: 63 return True 64 elif univ_pos == 'verb' and morphology.get('verbform') == 'inf' and not others: 65 return True 66 else: 67 return False 68 69 def noun(self, string, morphology=None): 70 return self(string, 'noun', morphology) 71 72 def verb(self, string, morphology=None): 73 return self(string, 'verb', morphology) 74 75 def adj(self, string, morphology=None): 76 return self(string, 'adj', morphology) 77 78 def punct(self, string, morphology=None): 79 return self(string, 'punct', morphology) 80 81 82 def lemmatize(string, index, exceptions, rules): 83 string = string.lower() 84 forms = [] 85 # TODO: Is this correct? See discussion in Issue #435. 86 #if string in index: 87 # forms.append(string) 88 forms.extend(exceptions.get(string, [])) 89 for old, new in rules: 90 if string.endswith(old): 91 form = string[:len(string) - len(old)] + new 92 if form in index or not form.isalpha(): 93 forms.append(form) 94 if not forms: 95 forms.append(string) 96 return set(forms) 97 98 99 def read_index(fileobj): 100 index = set() 101 for line in fileobj: 102 if line.startswith(' '): 103 continue 104 pieces = line.split() 105 word = pieces[0] 106 if word.count('_') == 0: 107 index.add(word) 108 return index 109 110 111 def read_exc(fileobj): 112 exceptions = {} 113 for line in fileobj: 114 if line.startswith(' '): 115 continue 116 pieces = line.split() 117 exceptions[pieces[0]] = tuple(pieces[1:]) 118 return exceptions 119 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/spacy/lemmatizer.py b/spacy/lemmatizer.py --- a/spacy/lemmatizer.py +++ b/spacy/lemmatizer.py @@ -86,13 +86,16 @@ #if string in index: # forms.append(string) forms.extend(exceptions.get(string, [])) + oov_forms = [] for old, new in rules: if string.endswith(old): form = string[:len(string) - len(old)] + new if form in index or not form.isalpha(): forms.append(form) + else: + oov_forms.append(form) if not forms: - forms.append(string) + forms.extend(oov_forms) return set(forms)
{"golden_diff": "diff --git a/spacy/lemmatizer.py b/spacy/lemmatizer.py\n--- a/spacy/lemmatizer.py\n+++ b/spacy/lemmatizer.py\n@@ -86,13 +86,16 @@\n #if string in index:\n # forms.append(string)\n forms.extend(exceptions.get(string, []))\n+ oov_forms = []\n for old, new in rules:\n if string.endswith(old):\n form = string[:len(string) - len(old)] + new\n if form in index or not form.isalpha():\n forms.append(form)\n+ else:\n+ oov_forms.append(form)\n if not forms:\n- forms.append(string)\n+ forms.extend(oov_forms)\n return set(forms)\n", "issue": "\ud83d\udcab Lemmatizer should apply rules on OOV words\n@juanmirocks points out in #327 that the lemmatizer fails on OOV words:\r\n\r\n```python\r\n\r\n>>> nlp.vocab.morphology.lemmatizer(u'endosomes', 'noun', morphology={'number': 'plur'})set([u'endosomes'])\r\n>>> nlp.vocab.morphology.lemmatizer(u'chromosomes', 'noun', morphology={'number': 'plur'})\r\nset([u'chromosome'])\r\n```\r\n\r\nSuggested patch to lemmatizer.py\r\n\r\n```python\r\n\r\n oov_forms = []\r\n for old, new in rules:\r\n if string.endswith(old):\r\n form = string[:len(string) - len(old)] + new\r\n if form in index or not form.isalpha():\r\n forms.append(form)\r\n else:\r\n oov_forms.append(form)\r\n if not forms:\r\n forms.extend(oov_forms)\r\n```\r\n\r\n\r\n## Your Environment\r\n<!-- Include details of your environment -->\r\n* Operating System: \r\n* Python Version Used: \r\n* spaCy Version Used: \r\n* Environment Information: \r\n\n", "before_files": [{"content": "from __future__ import unicode_literals, print_function\nimport codecs\nimport pathlib\n\nimport ujson as json\n\nfrom .symbols import POS, NOUN, VERB, ADJ, PUNCT\n\n\nclass Lemmatizer(object):\n @classmethod\n def load(cls, path, rules=None):\n index = {}\n exc = {}\n for pos in ['adj', 'noun', 'verb']:\n pos_index_path = path / 'wordnet' / 'index.{pos}'.format(pos=pos)\n if pos_index_path.exists():\n with pos_index_path.open() as file_:\n index[pos] = read_index(file_)\n else:\n index[pos] = set()\n pos_exc_path = path / 'wordnet' / '{pos}.exc'.format(pos=pos)\n if pos_exc_path.exists():\n with pos_exc_path.open() as file_:\n exc[pos] = read_exc(file_)\n else:\n exc[pos] = {}\n if rules is None and (path / 'vocab' / 'lemma_rules.json').exists():\n with (path / 'vocab' / 'lemma_rules.json').open('r', encoding='utf8') as file_:\n rules = json.load(file_)\n elif rules is None:\n rules = {}\n return cls(index, exc, rules)\n\n def __init__(self, index, exceptions, rules):\n self.index = index\n self.exc = exceptions\n self.rules = rules\n\n def __call__(self, string, univ_pos, morphology=None):\n if univ_pos == NOUN:\n univ_pos = 'noun'\n elif univ_pos == VERB:\n univ_pos = 'verb'\n elif univ_pos == ADJ:\n univ_pos = 'adj'\n elif univ_pos == PUNCT:\n univ_pos = 'punct'\n # See Issue #435 for example of where this logic is requied.\n if self.is_base_form(univ_pos, morphology):\n return set([string.lower()])\n lemmas = lemmatize(string, self.index.get(univ_pos, {}),\n self.exc.get(univ_pos, {}),\n self.rules.get(univ_pos, []))\n return lemmas\n\n def is_base_form(self, univ_pos, morphology=None):\n '''Check whether we're dealing with an uninflected paradigm, so we can\n avoid lemmatization entirely.'''\n morphology = {} if morphology is None else morphology\n others = [key for key in morphology if key not in (POS, 'number', 'pos', 'verbform')]\n if univ_pos == 'noun' and morphology.get('number') == 'sing' and not others:\n return True\n elif univ_pos == 'verb' and morphology.get('verbform') == 'inf' and not others:\n return True\n else:\n return False\n\n def noun(self, string, morphology=None):\n return self(string, 'noun', morphology)\n\n def verb(self, string, morphology=None):\n return self(string, 'verb', morphology)\n\n def adj(self, string, morphology=None):\n return self(string, 'adj', morphology)\n\n def punct(self, string, morphology=None):\n return self(string, 'punct', morphology)\n\n\ndef lemmatize(string, index, exceptions, rules):\n string = string.lower()\n forms = []\n # TODO: Is this correct? See discussion in Issue #435.\n #if string in index:\n # forms.append(string)\n forms.extend(exceptions.get(string, []))\n for old, new in rules:\n if string.endswith(old):\n form = string[:len(string) - len(old)] + new\n if form in index or not form.isalpha():\n forms.append(form)\n if not forms:\n forms.append(string)\n return set(forms)\n\n\ndef read_index(fileobj):\n index = set()\n for line in fileobj:\n if line.startswith(' '):\n continue\n pieces = line.split()\n word = pieces[0]\n if word.count('_') == 0:\n index.add(word)\n return index\n\n\ndef read_exc(fileobj):\n exceptions = {}\n for line in fileobj:\n if line.startswith(' '):\n continue\n pieces = line.split()\n exceptions[pieces[0]] = tuple(pieces[1:])\n return exceptions\n", "path": "spacy/lemmatizer.py"}], "after_files": [{"content": "from __future__ import unicode_literals, print_function\nimport codecs\nimport pathlib\n\nimport ujson as json\n\nfrom .symbols import POS, NOUN, VERB, ADJ, PUNCT\n\n\nclass Lemmatizer(object):\n @classmethod\n def load(cls, path, rules=None):\n index = {}\n exc = {}\n for pos in ['adj', 'noun', 'verb']:\n pos_index_path = path / 'wordnet' / 'index.{pos}'.format(pos=pos)\n if pos_index_path.exists():\n with pos_index_path.open() as file_:\n index[pos] = read_index(file_)\n else:\n index[pos] = set()\n pos_exc_path = path / 'wordnet' / '{pos}.exc'.format(pos=pos)\n if pos_exc_path.exists():\n with pos_exc_path.open() as file_:\n exc[pos] = read_exc(file_)\n else:\n exc[pos] = {}\n if rules is None and (path / 'vocab' / 'lemma_rules.json').exists():\n with (path / 'vocab' / 'lemma_rules.json').open('r', encoding='utf8') as file_:\n rules = json.load(file_)\n elif rules is None:\n rules = {}\n return cls(index, exc, rules)\n\n def __init__(self, index, exceptions, rules):\n self.index = index\n self.exc = exceptions\n self.rules = rules\n\n def __call__(self, string, univ_pos, morphology=None):\n if univ_pos == NOUN:\n univ_pos = 'noun'\n elif univ_pos == VERB:\n univ_pos = 'verb'\n elif univ_pos == ADJ:\n univ_pos = 'adj'\n elif univ_pos == PUNCT:\n univ_pos = 'punct'\n # See Issue #435 for example of where this logic is requied.\n if self.is_base_form(univ_pos, morphology):\n return set([string.lower()])\n lemmas = lemmatize(string, self.index.get(univ_pos, {}),\n self.exc.get(univ_pos, {}),\n self.rules.get(univ_pos, []))\n return lemmas\n\n def is_base_form(self, univ_pos, morphology=None):\n '''Check whether we're dealing with an uninflected paradigm, so we can\n avoid lemmatization entirely.'''\n morphology = {} if morphology is None else morphology\n others = [key for key in morphology if key not in (POS, 'number', 'pos', 'verbform')]\n if univ_pos == 'noun' and morphology.get('number') == 'sing' and not others:\n return True\n elif univ_pos == 'verb' and morphology.get('verbform') == 'inf' and not others:\n return True\n else:\n return False\n\n def noun(self, string, morphology=None):\n return self(string, 'noun', morphology)\n\n def verb(self, string, morphology=None):\n return self(string, 'verb', morphology)\n\n def adj(self, string, morphology=None):\n return self(string, 'adj', morphology)\n\n def punct(self, string, morphology=None):\n return self(string, 'punct', morphology)\n\n\ndef lemmatize(string, index, exceptions, rules):\n string = string.lower()\n forms = []\n # TODO: Is this correct? See discussion in Issue #435.\n #if string in index:\n # forms.append(string)\n forms.extend(exceptions.get(string, []))\n oov_forms = []\n for old, new in rules:\n if string.endswith(old):\n form = string[:len(string) - len(old)] + new\n if form in index or not form.isalpha():\n forms.append(form)\n else:\n oov_forms.append(form)\n if not forms:\n forms.extend(oov_forms)\n return set(forms)\n\n\ndef read_index(fileobj):\n index = set()\n for line in fileobj:\n if line.startswith(' '):\n continue\n pieces = line.split()\n word = pieces[0]\n if word.count('_') == 0:\n index.add(word)\n return index\n\n\ndef read_exc(fileobj):\n exceptions = {}\n for line in fileobj:\n if line.startswith(' '):\n continue\n pieces = line.split()\n exceptions[pieces[0]] = tuple(pieces[1:])\n return exceptions\n", "path": "spacy/lemmatizer.py"}]}
1,681
166
gh_patches_debug_63189
rasdani/github-patches
git_diff
OpenEnergyPlatform__oeplatform-605
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Add OEO Steering Committee Subpage The OEO Steering Committee needs its own web page, which should be a sub page of the OEP. Please create such a sub page @jh-RLI . I think it makes sense to link it somewhere under ontology. https://openenergy-platform.org/ontology/ The URL would then be https://openenergy-platform.org/ontology/oeo-steering-committee Content for the page is here: https://github.com/OpenEnergyPlatform/ontology/wiki/OEO-Steering-Committee An issue to create an English translation is open here: https://github.com/OpenEnergyPlatform/ontology/issues/313 Creating the page and making it look simple, but decent enough are priorities. The final text and location can easily be changed later on. Contact me if you have any questions. Feel free to give feedback make changes to this issue @Ludee --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `ontology/urls.py` Content: ``` 1 from django.conf.urls import url 2 from django.conf.urls.static import static 3 from django.views.generic import TemplateView 4 5 from modelview import views 6 from oeplatform import settings 7 8 urlpatterns = [ 9 url(r"^$", TemplateView.as_view(template_name="ontology/about.html")), 10 ] 11 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/ontology/urls.py b/ontology/urls.py --- a/ontology/urls.py +++ b/ontology/urls.py @@ -7,4 +7,7 @@ urlpatterns = [ url(r"^$", TemplateView.as_view(template_name="ontology/about.html")), + url(r"^ontology/oeo-steering-committee$", + TemplateView.as_view(template_name="ontology/oeo-steering-committee.html"), + name="oeo-s-c"), ]
{"golden_diff": "diff --git a/ontology/urls.py b/ontology/urls.py\n--- a/ontology/urls.py\n+++ b/ontology/urls.py\n@@ -7,4 +7,7 @@\n \n urlpatterns = [\n url(r\"^$\", TemplateView.as_view(template_name=\"ontology/about.html\")),\n+ url(r\"^ontology/oeo-steering-committee$\",\n+ TemplateView.as_view(template_name=\"ontology/oeo-steering-committee.html\"),\n+ name=\"oeo-s-c\"),\n ]\n", "issue": "Add OEO Steering Committee Subpage\nThe OEO Steering Committee needs its own web page, which should be a sub page of the OEP. Please create such a sub page @jh-RLI . I think it makes sense to link it somewhere under ontology.\r\n\r\nhttps://openenergy-platform.org/ontology/\r\n\r\nThe URL would then be \r\n\r\nhttps://openenergy-platform.org/ontology/oeo-steering-committee\r\n\r\nContent for the page is here:\r\n\r\nhttps://github.com/OpenEnergyPlatform/ontology/wiki/OEO-Steering-Committee\r\n\r\nAn issue to create an English translation is open here: https://github.com/OpenEnergyPlatform/ontology/issues/313\r\n\r\nCreating the page and making it look simple, but decent enough are priorities. The final text and location can easily be changed later on. Contact me if you have any questions. \r\n\r\nFeel free to give feedback make changes to this issue @Ludee \n", "before_files": [{"content": "from django.conf.urls import url\nfrom django.conf.urls.static import static\nfrom django.views.generic import TemplateView\n\nfrom modelview import views\nfrom oeplatform import settings\n\nurlpatterns = [\n url(r\"^$\", TemplateView.as_view(template_name=\"ontology/about.html\")),\n]\n", "path": "ontology/urls.py"}], "after_files": [{"content": "from django.conf.urls import url\nfrom django.conf.urls.static import static\nfrom django.views.generic import TemplateView\n\nfrom modelview import views\nfrom oeplatform import settings\n\nurlpatterns = [\n url(r\"^$\", TemplateView.as_view(template_name=\"ontology/about.html\")),\n url(r\"^ontology/oeo-steering-committee$\",\n TemplateView.as_view(template_name=\"ontology/oeo-steering-committee.html\"),\n name=\"oeo-s-c\"),\n]\n", "path": "ontology/urls.py"}]}
516
106
gh_patches_debug_986
rasdani/github-patches
git_diff
marshmallow-code__webargs-482
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Fix simple typo: objec -> object There is a small typo in src/webargs/flaskparser.py. Should read `object` rather than `objec`. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `src/webargs/flaskparser.py` Content: ``` 1 """Flask request argument parsing module. 2 3 Example: :: 4 5 from flask import Flask 6 7 from webargs import fields 8 from webargs.flaskparser import use_args 9 10 app = Flask(__name__) 11 12 hello_args = { 13 'name': fields.Str(required=True) 14 } 15 16 @app.route('/') 17 @use_args(hello_args) 18 def index(args): 19 return 'Hello ' + args['name'] 20 """ 21 import flask 22 from werkzeug.exceptions import HTTPException 23 24 from webargs import core 25 from webargs.compat import MARSHMALLOW_VERSION_INFO 26 from webargs.multidictproxy import MultiDictProxy 27 28 29 def abort(http_status_code, exc=None, **kwargs): 30 """Raise a HTTPException for the given http_status_code. Attach any keyword 31 arguments to the exception for later processing. 32 33 From Flask-Restful. See NOTICE file for license information. 34 """ 35 try: 36 flask.abort(http_status_code) 37 except HTTPException as err: 38 err.data = kwargs 39 err.exc = exc 40 raise err 41 42 43 def is_json_request(req): 44 return core.is_json(req.mimetype) 45 46 47 class FlaskParser(core.Parser): 48 """Flask request argument parser.""" 49 50 __location_map__ = dict( 51 view_args="load_view_args", 52 path="load_view_args", 53 **core.Parser.__location_map__, 54 ) 55 56 def _raw_load_json(self, req): 57 """Return a json payload from the request for the core parser's load_json 58 59 Checks the input mimetype and may return 'missing' if the mimetype is 60 non-json, even if the request body is parseable as json.""" 61 if not is_json_request(req): 62 return core.missing 63 64 return core.parse_json(req.get_data(cache=True)) 65 66 def _handle_invalid_json_error(self, error, req, *args, **kwargs): 67 abort(400, exc=error, messages={"json": ["Invalid JSON body."]}) 68 69 def load_view_args(self, req, schema): 70 """Return the request's ``view_args`` or ``missing`` if there are none.""" 71 return req.view_args or core.missing 72 73 def load_querystring(self, req, schema): 74 """Return query params from the request as a MultiDictProxy.""" 75 return MultiDictProxy(req.args, schema) 76 77 def load_form(self, req, schema): 78 """Return form values from the request as a MultiDictProxy.""" 79 return MultiDictProxy(req.form, schema) 80 81 def load_headers(self, req, schema): 82 """Return headers from the request as a MultiDictProxy.""" 83 return MultiDictProxy(req.headers, schema) 84 85 def load_cookies(self, req, schema): 86 """Return cookies from the request.""" 87 return req.cookies 88 89 def load_files(self, req, schema): 90 """Return files from the request as a MultiDictProxy.""" 91 return MultiDictProxy(req.files, schema) 92 93 def handle_error(self, error, req, schema, *, error_status_code, error_headers): 94 """Handles errors during parsing. Aborts the current HTTP request and 95 responds with a 422 error. 96 """ 97 status_code = error_status_code or self.DEFAULT_VALIDATION_STATUS 98 # on marshmallow 2, a many schema receiving a non-list value will 99 # produce this specific error back -- reformat it to match the 100 # marshmallow 3 message so that Flask can properly encode it 101 messages = error.messages 102 if ( 103 MARSHMALLOW_VERSION_INFO[0] < 3 104 and schema.many 105 and messages == {0: {}, "_schema": ["Invalid input type."]} 106 ): 107 messages.pop(0) 108 abort( 109 status_code, 110 exc=error, 111 messages=error.messages, 112 schema=schema, 113 headers=error_headers, 114 ) 115 116 def get_default_request(self): 117 """Override to use Flask's thread-local request objec by default""" 118 return flask.request 119 120 121 parser = FlaskParser() 122 use_args = parser.use_args 123 use_kwargs = parser.use_kwargs 124 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/src/webargs/flaskparser.py b/src/webargs/flaskparser.py --- a/src/webargs/flaskparser.py +++ b/src/webargs/flaskparser.py @@ -114,7 +114,7 @@ ) def get_default_request(self): - """Override to use Flask's thread-local request objec by default""" + """Override to use Flask's thread-local request object by default""" return flask.request
{"golden_diff": "diff --git a/src/webargs/flaskparser.py b/src/webargs/flaskparser.py\n--- a/src/webargs/flaskparser.py\n+++ b/src/webargs/flaskparser.py\n@@ -114,7 +114,7 @@\n )\n \n def get_default_request(self):\n- \"\"\"Override to use Flask's thread-local request objec by default\"\"\"\n+ \"\"\"Override to use Flask's thread-local request object by default\"\"\"\n return flask.request\n", "issue": "Fix simple typo: objec -> object\nThere is a small typo in src/webargs/flaskparser.py.\nShould read `object` rather than `objec`.\n\n\n", "before_files": [{"content": "\"\"\"Flask request argument parsing module.\n\nExample: ::\n\n from flask import Flask\n\n from webargs import fields\n from webargs.flaskparser import use_args\n\n app = Flask(__name__)\n\n hello_args = {\n 'name': fields.Str(required=True)\n }\n\n @app.route('/')\n @use_args(hello_args)\n def index(args):\n return 'Hello ' + args['name']\n\"\"\"\nimport flask\nfrom werkzeug.exceptions import HTTPException\n\nfrom webargs import core\nfrom webargs.compat import MARSHMALLOW_VERSION_INFO\nfrom webargs.multidictproxy import MultiDictProxy\n\n\ndef abort(http_status_code, exc=None, **kwargs):\n \"\"\"Raise a HTTPException for the given http_status_code. Attach any keyword\n arguments to the exception for later processing.\n\n From Flask-Restful. See NOTICE file for license information.\n \"\"\"\n try:\n flask.abort(http_status_code)\n except HTTPException as err:\n err.data = kwargs\n err.exc = exc\n raise err\n\n\ndef is_json_request(req):\n return core.is_json(req.mimetype)\n\n\nclass FlaskParser(core.Parser):\n \"\"\"Flask request argument parser.\"\"\"\n\n __location_map__ = dict(\n view_args=\"load_view_args\",\n path=\"load_view_args\",\n **core.Parser.__location_map__,\n )\n\n def _raw_load_json(self, req):\n \"\"\"Return a json payload from the request for the core parser's load_json\n\n Checks the input mimetype and may return 'missing' if the mimetype is\n non-json, even if the request body is parseable as json.\"\"\"\n if not is_json_request(req):\n return core.missing\n\n return core.parse_json(req.get_data(cache=True))\n\n def _handle_invalid_json_error(self, error, req, *args, **kwargs):\n abort(400, exc=error, messages={\"json\": [\"Invalid JSON body.\"]})\n\n def load_view_args(self, req, schema):\n \"\"\"Return the request's ``view_args`` or ``missing`` if there are none.\"\"\"\n return req.view_args or core.missing\n\n def load_querystring(self, req, schema):\n \"\"\"Return query params from the request as a MultiDictProxy.\"\"\"\n return MultiDictProxy(req.args, schema)\n\n def load_form(self, req, schema):\n \"\"\"Return form values from the request as a MultiDictProxy.\"\"\"\n return MultiDictProxy(req.form, schema)\n\n def load_headers(self, req, schema):\n \"\"\"Return headers from the request as a MultiDictProxy.\"\"\"\n return MultiDictProxy(req.headers, schema)\n\n def load_cookies(self, req, schema):\n \"\"\"Return cookies from the request.\"\"\"\n return req.cookies\n\n def load_files(self, req, schema):\n \"\"\"Return files from the request as a MultiDictProxy.\"\"\"\n return MultiDictProxy(req.files, schema)\n\n def handle_error(self, error, req, schema, *, error_status_code, error_headers):\n \"\"\"Handles errors during parsing. Aborts the current HTTP request and\n responds with a 422 error.\n \"\"\"\n status_code = error_status_code or self.DEFAULT_VALIDATION_STATUS\n # on marshmallow 2, a many schema receiving a non-list value will\n # produce this specific error back -- reformat it to match the\n # marshmallow 3 message so that Flask can properly encode it\n messages = error.messages\n if (\n MARSHMALLOW_VERSION_INFO[0] < 3\n and schema.many\n and messages == {0: {}, \"_schema\": [\"Invalid input type.\"]}\n ):\n messages.pop(0)\n abort(\n status_code,\n exc=error,\n messages=error.messages,\n schema=schema,\n headers=error_headers,\n )\n\n def get_default_request(self):\n \"\"\"Override to use Flask's thread-local request objec by default\"\"\"\n return flask.request\n\n\nparser = FlaskParser()\nuse_args = parser.use_args\nuse_kwargs = parser.use_kwargs\n", "path": "src/webargs/flaskparser.py"}], "after_files": [{"content": "\"\"\"Flask request argument parsing module.\n\nExample: ::\n\n from flask import Flask\n\n from webargs import fields\n from webargs.flaskparser import use_args\n\n app = Flask(__name__)\n\n hello_args = {\n 'name': fields.Str(required=True)\n }\n\n @app.route('/')\n @use_args(hello_args)\n def index(args):\n return 'Hello ' + args['name']\n\"\"\"\nimport flask\nfrom werkzeug.exceptions import HTTPException\n\nfrom webargs import core\nfrom webargs.compat import MARSHMALLOW_VERSION_INFO\nfrom webargs.multidictproxy import MultiDictProxy\n\n\ndef abort(http_status_code, exc=None, **kwargs):\n \"\"\"Raise a HTTPException for the given http_status_code. Attach any keyword\n arguments to the exception for later processing.\n\n From Flask-Restful. See NOTICE file for license information.\n \"\"\"\n try:\n flask.abort(http_status_code)\n except HTTPException as err:\n err.data = kwargs\n err.exc = exc\n raise err\n\n\ndef is_json_request(req):\n return core.is_json(req.mimetype)\n\n\nclass FlaskParser(core.Parser):\n \"\"\"Flask request argument parser.\"\"\"\n\n __location_map__ = dict(\n view_args=\"load_view_args\",\n path=\"load_view_args\",\n **core.Parser.__location_map__,\n )\n\n def _raw_load_json(self, req):\n \"\"\"Return a json payload from the request for the core parser's load_json\n\n Checks the input mimetype and may return 'missing' if the mimetype is\n non-json, even if the request body is parseable as json.\"\"\"\n if not is_json_request(req):\n return core.missing\n\n return core.parse_json(req.get_data(cache=True))\n\n def _handle_invalid_json_error(self, error, req, *args, **kwargs):\n abort(400, exc=error, messages={\"json\": [\"Invalid JSON body.\"]})\n\n def load_view_args(self, req, schema):\n \"\"\"Return the request's ``view_args`` or ``missing`` if there are none.\"\"\"\n return req.view_args or core.missing\n\n def load_querystring(self, req, schema):\n \"\"\"Return query params from the request as a MultiDictProxy.\"\"\"\n return MultiDictProxy(req.args, schema)\n\n def load_form(self, req, schema):\n \"\"\"Return form values from the request as a MultiDictProxy.\"\"\"\n return MultiDictProxy(req.form, schema)\n\n def load_headers(self, req, schema):\n \"\"\"Return headers from the request as a MultiDictProxy.\"\"\"\n return MultiDictProxy(req.headers, schema)\n\n def load_cookies(self, req, schema):\n \"\"\"Return cookies from the request.\"\"\"\n return req.cookies\n\n def load_files(self, req, schema):\n \"\"\"Return files from the request as a MultiDictProxy.\"\"\"\n return MultiDictProxy(req.files, schema)\n\n def handle_error(self, error, req, schema, *, error_status_code, error_headers):\n \"\"\"Handles errors during parsing. Aborts the current HTTP request and\n responds with a 422 error.\n \"\"\"\n status_code = error_status_code or self.DEFAULT_VALIDATION_STATUS\n # on marshmallow 2, a many schema receiving a non-list value will\n # produce this specific error back -- reformat it to match the\n # marshmallow 3 message so that Flask can properly encode it\n messages = error.messages\n if (\n MARSHMALLOW_VERSION_INFO[0] < 3\n and schema.many\n and messages == {0: {}, \"_schema\": [\"Invalid input type.\"]}\n ):\n messages.pop(0)\n abort(\n status_code,\n exc=error,\n messages=error.messages,\n schema=schema,\n headers=error_headers,\n )\n\n def get_default_request(self):\n \"\"\"Override to use Flask's thread-local request object by default\"\"\"\n return flask.request\n\n\nparser = FlaskParser()\nuse_args = parser.use_args\nuse_kwargs = parser.use_kwargs\n", "path": "src/webargs/flaskparser.py"}]}
1,427
100
gh_patches_debug_40085
rasdani/github-patches
git_diff
sql-machine-learning__elasticdl-510
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Implement additional proto definitions and service for reporting evaluation metrics This is part of https://github.com/wangkuiyi/elasticdl/issues/384. Implement `MasterServicer.ReportEvaluationMetrics()` and additional proto definitions such as `ReportEvaluationMetricsReply` and `ReportEvaluationMetricsRequest`. Implement additional proto definitions and service for reporting evaluation metrics This is part of https://github.com/wangkuiyi/elasticdl/issues/384. Implement `MasterServicer.ReportEvaluationMetrics()` and additional proto definitions such as `ReportEvaluationMetricsReply` and `ReportEvaluationMetricsRequest`. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `elasticdl/python/elasticdl/master/servicer.py` Content: ``` 1 import threading 2 import numpy as np 3 4 import tensorflow as tf 5 6 assert tf.executing_eagerly() 7 8 from google.protobuf import empty_pb2 9 10 from elasticdl.proto import elasticdl_pb2 11 from elasticdl.proto import elasticdl_pb2_grpc 12 from elasticdl.python.elasticdl.common.ndarray import ndarray_to_tensor, tensor_to_ndarray 13 14 15 class MasterServicer(elasticdl_pb2_grpc.MasterServicer): 16 """Master service implementation""" 17 18 def __init__( 19 self, 20 logger, 21 grads_to_wait, 22 minibatch_size, 23 optimizer, 24 task_q, 25 *, 26 init_var=[] 27 ): 28 # TODO: group params together into a single object. 29 self.logger = logger 30 self._opt = optimizer 31 self._task_q = task_q 32 self._lock = threading.Lock() 33 # A <string, tf.ResourceVariable> map. We use tf.ResourceVariable 34 # instead ndarray to avoid copying and conversion when calling 35 # optimizer's apply_gradients() function. 36 self._model = {} 37 self._version = 0 38 self._gradient_sum = {} 39 self._grad_to_wait = grads_to_wait 40 self._grad_n = 0 41 self._minibatch_size = minibatch_size 42 for var in init_var: 43 self.set_model_var(var.name, var.numpy()) 44 45 def set_model_var(self, name, value): 46 """Add or set model variable. Value should be a float32 ndarray""" 47 if value.dtype != np.float32: 48 raise ValueError("Value should be a float32 numpy array") 49 self._model[name] = tf.Variable( 50 value, name=MasterServicer.var_name_encode(name) 51 ) 52 53 @staticmethod 54 def var_name_encode(name): 55 return name.replace(":", "-") 56 57 def GetTask(self, request, _): 58 res = elasticdl_pb2.Task() 59 res.model_version = self._version 60 res.minibatch_size = self._minibatch_size 61 task_id, task = self._task_q.get(request.worker_id) 62 if task: 63 res.task_id = task_id 64 res.shard_file_name = task.file_name 65 res.start = task.start 66 res.end = task.end 67 return res 68 69 def GetModel(self, request, _): 70 if request.min_version > self._version: 71 err_msg = ( 72 "Requested version %d not available yet, current version: %d" 73 % (request.min_version, self._version) 74 ) 75 self.logger.warning(err_msg) 76 raise ValueError(err_msg) 77 78 res = elasticdl_pb2.Model() 79 with self._lock: 80 res.version = self._version 81 for k, v in self._model.items(): 82 res.param[k].CopyFrom(ndarray_to_tensor(v.numpy())) 83 return res 84 85 def _update_model(self): 86 assert self._lock.locked() 87 grad_var = [] 88 for k in self._gradient_sum: 89 self._gradient_sum[k] = self._gradient_sum[k] / self._grad_to_wait 90 grad_var.append((self._gradient_sum[k], self._model[k])) 91 self._opt.apply_gradients(grad_var) 92 self._version += 1 93 self._gradient_sum.clear() 94 self._grad_n = 0 95 96 def ReportGradient(self, request, _): 97 if request.model_version > self._version: 98 err_msg = "Model version %d out of range, current version: %d" % ( 99 request.model_version, 100 self._version, 101 ) 102 self.logger.warning(err_msg) 103 raise ValueError(err_msg) 104 105 res = elasticdl_pb2.ReportGradientReply() 106 if request.model_version < self._version: 107 self.logger.warning( 108 "Task result for outdated version %d dropped", 109 request.model_version, 110 ) 111 res.accepted = False 112 res.model_version = self._version 113 return res 114 115 # TODO: Update task queue with task_id 116 with self._lock: 117 tmp = {} 118 # Do sanity check before accumulating gradients. 119 for k, v in request.gradient.items(): 120 if k not in self._model: 121 raise ValueError( 122 "Gradient key: %s is not part of model", k 123 ) 124 arr = tensor_to_ndarray(v) 125 if arr.shape != self._model[k].numpy().shape: 126 raise ValueError( 127 "Gradient key: %s has incompatible dimension", k 128 ) 129 tmp[k] = arr 130 131 for k, v in tmp.items(): 132 if k in self._gradient_sum: 133 self._gradient_sum[k] = self._gradient_sum[k] + v 134 else: 135 self._gradient_sum[k] = v 136 137 self._grad_n += 1 138 if self._grad_n >= self._grad_to_wait: 139 self._update_model() 140 res.accepted = True 141 res.model_version = self._version 142 return res 143 144 def ReportTaskResult(self, request, _): 145 if request.err_message: 146 self.logger.warning( 147 "Worker reported error: " + request.err_message 148 ) 149 self._task_q.report(request.task_id, False) 150 else: 151 self._task_q.report(request.task_id, True) 152 return empty_pb2.Empty() 153 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/elasticdl/python/elasticdl/master/servicer.py b/elasticdl/python/elasticdl/master/servicer.py --- a/elasticdl/python/elasticdl/master/servicer.py +++ b/elasticdl/python/elasticdl/master/servicer.py @@ -39,6 +39,7 @@ self._grad_to_wait = grads_to_wait self._grad_n = 0 self._minibatch_size = minibatch_size + self._evaluation_metrics = {} for var in init_var: self.set_model_var(var.name, var.numpy()) @@ -67,13 +68,7 @@ return res def GetModel(self, request, _): - if request.min_version > self._version: - err_msg = ( - "Requested version %d not available yet, current version: %d" - % (request.min_version, self._version) - ) - self.logger.warning(err_msg) - raise ValueError(err_msg) + _ = self._validate_model_version(request.min_version) res = elasticdl_pb2.Model() with self._lock: @@ -93,21 +88,28 @@ self._gradient_sum.clear() self._grad_n = 0 - def ReportGradient(self, request, _): - if request.model_version > self._version: - err_msg = "Model version %d out of range, current version: %d" % ( - request.model_version, + def _validate_model_version(self, request_model_version): + if request_model_version > self._version: + err_msg = "Model version %d not available yet, current version: %d" % ( + request_model_version, self._version, ) self.logger.warning(err_msg) raise ValueError(err_msg) - res = elasticdl_pb2.ReportGradientReply() - if request.model_version < self._version: + invalid_model_version = request_model_version < self._version + if invalid_model_version: self.logger.warning( "Task result for outdated version %d dropped", - request.model_version, + request_model_version, ) + return invalid_model_version + + def ReportGradient(self, request, _): + invalid_model_version = self._validate_model_version(request.model_version) + + res = elasticdl_pb2.ReportGradientResponse() + if invalid_model_version: res.accepted = False res.model_version = self._version return res @@ -150,3 +152,22 @@ else: self._task_q.report(request.task_id, True) return empty_pb2.Empty() + + def ReportEvaluationMetrics(self, request, _): + invalid_model_version = self._validate_model_version(request.model_version) + + res = elasticdl_pb2.ReportEvaluationMetricsResponse() + if invalid_model_version: + res.accepted = False + res.model_version = self._version + return res + + with self._lock: + for k, v in request.evaluation_metrics.items(): + arr = tensor_to_ndarray(v) + self._evaluation_metrics[k] = arr + + self._update_model() + res.accepted = True + res.model_version = self._version + return res
{"golden_diff": "diff --git a/elasticdl/python/elasticdl/master/servicer.py b/elasticdl/python/elasticdl/master/servicer.py\n--- a/elasticdl/python/elasticdl/master/servicer.py\n+++ b/elasticdl/python/elasticdl/master/servicer.py\n@@ -39,6 +39,7 @@\n self._grad_to_wait = grads_to_wait\n self._grad_n = 0\n self._minibatch_size = minibatch_size\n+ self._evaluation_metrics = {}\n for var in init_var:\n self.set_model_var(var.name, var.numpy())\n \n@@ -67,13 +68,7 @@\n return res\n \n def GetModel(self, request, _):\n- if request.min_version > self._version:\n- err_msg = (\n- \"Requested version %d not available yet, current version: %d\"\n- % (request.min_version, self._version)\n- )\n- self.logger.warning(err_msg)\n- raise ValueError(err_msg)\n+ _ = self._validate_model_version(request.min_version)\n \n res = elasticdl_pb2.Model()\n with self._lock:\n@@ -93,21 +88,28 @@\n self._gradient_sum.clear()\n self._grad_n = 0\n \n- def ReportGradient(self, request, _):\n- if request.model_version > self._version:\n- err_msg = \"Model version %d out of range, current version: %d\" % (\n- request.model_version,\n+ def _validate_model_version(self, request_model_version):\n+ if request_model_version > self._version:\n+ err_msg = \"Model version %d not available yet, current version: %d\" % (\n+ request_model_version,\n self._version,\n )\n self.logger.warning(err_msg)\n raise ValueError(err_msg)\n \n- res = elasticdl_pb2.ReportGradientReply()\n- if request.model_version < self._version:\n+ invalid_model_version = request_model_version < self._version\n+ if invalid_model_version:\n self.logger.warning(\n \"Task result for outdated version %d dropped\",\n- request.model_version,\n+ request_model_version,\n )\n+ return invalid_model_version\n+\n+ def ReportGradient(self, request, _):\n+ invalid_model_version = self._validate_model_version(request.model_version)\n+\n+ res = elasticdl_pb2.ReportGradientResponse()\n+ if invalid_model_version:\n res.accepted = False\n res.model_version = self._version\n return res\n@@ -150,3 +152,22 @@\n else:\n self._task_q.report(request.task_id, True)\n return empty_pb2.Empty()\n+\n+ def ReportEvaluationMetrics(self, request, _):\n+ invalid_model_version = self._validate_model_version(request.model_version)\n+\n+ res = elasticdl_pb2.ReportEvaluationMetricsResponse()\n+ if invalid_model_version:\n+ res.accepted = False\n+ res.model_version = self._version\n+ return res\n+\n+ with self._lock:\n+ for k, v in request.evaluation_metrics.items():\n+ arr = tensor_to_ndarray(v)\n+ self._evaluation_metrics[k] = arr\n+\n+ self._update_model()\n+ res.accepted = True\n+ res.model_version = self._version\n+ return res\n", "issue": "Implement additional proto definitions and service for reporting evaluation metrics\nThis is part of https://github.com/wangkuiyi/elasticdl/issues/384. Implement `MasterServicer.ReportEvaluationMetrics()` and additional proto definitions such as `ReportEvaluationMetricsReply` and `ReportEvaluationMetricsRequest`.\nImplement additional proto definitions and service for reporting evaluation metrics\nThis is part of https://github.com/wangkuiyi/elasticdl/issues/384. Implement `MasterServicer.ReportEvaluationMetrics()` and additional proto definitions such as `ReportEvaluationMetricsReply` and `ReportEvaluationMetricsRequest`.\n", "before_files": [{"content": "import threading\nimport numpy as np\n\nimport tensorflow as tf\n\nassert tf.executing_eagerly()\n\nfrom google.protobuf import empty_pb2\n\nfrom elasticdl.proto import elasticdl_pb2\nfrom elasticdl.proto import elasticdl_pb2_grpc\nfrom elasticdl.python.elasticdl.common.ndarray import ndarray_to_tensor, tensor_to_ndarray\n\n\nclass MasterServicer(elasticdl_pb2_grpc.MasterServicer):\n \"\"\"Master service implementation\"\"\"\n\n def __init__(\n self,\n logger,\n grads_to_wait,\n minibatch_size,\n optimizer,\n task_q,\n *,\n init_var=[]\n ):\n # TODO: group params together into a single object.\n self.logger = logger\n self._opt = optimizer\n self._task_q = task_q\n self._lock = threading.Lock()\n # A <string, tf.ResourceVariable> map. We use tf.ResourceVariable\n # instead ndarray to avoid copying and conversion when calling\n # optimizer's apply_gradients() function.\n self._model = {}\n self._version = 0\n self._gradient_sum = {}\n self._grad_to_wait = grads_to_wait\n self._grad_n = 0\n self._minibatch_size = minibatch_size\n for var in init_var:\n self.set_model_var(var.name, var.numpy())\n\n def set_model_var(self, name, value):\n \"\"\"Add or set model variable. Value should be a float32 ndarray\"\"\"\n if value.dtype != np.float32:\n raise ValueError(\"Value should be a float32 numpy array\")\n self._model[name] = tf.Variable(\n value, name=MasterServicer.var_name_encode(name)\n )\n\n @staticmethod\n def var_name_encode(name):\n return name.replace(\":\", \"-\")\n\n def GetTask(self, request, _):\n res = elasticdl_pb2.Task()\n res.model_version = self._version\n res.minibatch_size = self._minibatch_size\n task_id, task = self._task_q.get(request.worker_id)\n if task:\n res.task_id = task_id\n res.shard_file_name = task.file_name\n res.start = task.start\n res.end = task.end\n return res\n\n def GetModel(self, request, _):\n if request.min_version > self._version:\n err_msg = (\n \"Requested version %d not available yet, current version: %d\"\n % (request.min_version, self._version)\n )\n self.logger.warning(err_msg)\n raise ValueError(err_msg)\n\n res = elasticdl_pb2.Model()\n with self._lock:\n res.version = self._version\n for k, v in self._model.items():\n res.param[k].CopyFrom(ndarray_to_tensor(v.numpy()))\n return res\n\n def _update_model(self):\n assert self._lock.locked()\n grad_var = []\n for k in self._gradient_sum:\n self._gradient_sum[k] = self._gradient_sum[k] / self._grad_to_wait\n grad_var.append((self._gradient_sum[k], self._model[k]))\n self._opt.apply_gradients(grad_var)\n self._version += 1\n self._gradient_sum.clear()\n self._grad_n = 0\n\n def ReportGradient(self, request, _):\n if request.model_version > self._version:\n err_msg = \"Model version %d out of range, current version: %d\" % (\n request.model_version,\n self._version,\n )\n self.logger.warning(err_msg)\n raise ValueError(err_msg)\n\n res = elasticdl_pb2.ReportGradientReply()\n if request.model_version < self._version:\n self.logger.warning(\n \"Task result for outdated version %d dropped\",\n request.model_version,\n )\n res.accepted = False\n res.model_version = self._version\n return res\n\n # TODO: Update task queue with task_id\n with self._lock:\n tmp = {}\n # Do sanity check before accumulating gradients.\n for k, v in request.gradient.items():\n if k not in self._model:\n raise ValueError(\n \"Gradient key: %s is not part of model\", k\n )\n arr = tensor_to_ndarray(v)\n if arr.shape != self._model[k].numpy().shape:\n raise ValueError(\n \"Gradient key: %s has incompatible dimension\", k\n )\n tmp[k] = arr\n\n for k, v in tmp.items():\n if k in self._gradient_sum:\n self._gradient_sum[k] = self._gradient_sum[k] + v\n else:\n self._gradient_sum[k] = v\n\n self._grad_n += 1\n if self._grad_n >= self._grad_to_wait:\n self._update_model()\n res.accepted = True\n res.model_version = self._version\n return res\n\n def ReportTaskResult(self, request, _):\n if request.err_message:\n self.logger.warning(\n \"Worker reported error: \" + request.err_message\n )\n self._task_q.report(request.task_id, False)\n else:\n self._task_q.report(request.task_id, True)\n return empty_pb2.Empty()\n", "path": "elasticdl/python/elasticdl/master/servicer.py"}], "after_files": [{"content": "import threading\nimport numpy as np\n\nimport tensorflow as tf\n\nassert tf.executing_eagerly()\n\nfrom google.protobuf import empty_pb2\n\nfrom elasticdl.proto import elasticdl_pb2\nfrom elasticdl.proto import elasticdl_pb2_grpc\nfrom elasticdl.python.elasticdl.common.ndarray import ndarray_to_tensor, tensor_to_ndarray\n\n\nclass MasterServicer(elasticdl_pb2_grpc.MasterServicer):\n \"\"\"Master service implementation\"\"\"\n\n def __init__(\n self,\n logger,\n grads_to_wait,\n minibatch_size,\n optimizer,\n task_q,\n *,\n init_var=[]\n ):\n # TODO: group params together into a single object.\n self.logger = logger\n self._opt = optimizer\n self._task_q = task_q\n self._lock = threading.Lock()\n # A <string, tf.ResourceVariable> map. We use tf.ResourceVariable\n # instead ndarray to avoid copying and conversion when calling\n # optimizer's apply_gradients() function.\n self._model = {}\n self._version = 0\n self._gradient_sum = {}\n self._grad_to_wait = grads_to_wait\n self._grad_n = 0\n self._minibatch_size = minibatch_size\n self._evaluation_metrics = {}\n for var in init_var:\n self.set_model_var(var.name, var.numpy())\n\n def set_model_var(self, name, value):\n \"\"\"Add or set model variable. Value should be a float32 ndarray\"\"\"\n if value.dtype != np.float32:\n raise ValueError(\"Value should be a float32 numpy array\")\n self._model[name] = tf.Variable(\n value, name=MasterServicer.var_name_encode(name)\n )\n\n @staticmethod\n def var_name_encode(name):\n return name.replace(\":\", \"-\")\n\n def GetTask(self, request, _):\n res = elasticdl_pb2.Task()\n res.model_version = self._version\n res.minibatch_size = self._minibatch_size\n task_id, task = self._task_q.get(request.worker_id)\n if task:\n res.task_id = task_id\n res.shard_file_name = task.file_name\n res.start = task.start\n res.end = task.end\n return res\n\n def GetModel(self, request, _):\n _ = self._validate_model_version(request.min_version)\n\n res = elasticdl_pb2.Model()\n with self._lock:\n res.version = self._version\n for k, v in self._model.items():\n res.param[k].CopyFrom(ndarray_to_tensor(v.numpy()))\n return res\n\n def _update_model(self):\n assert self._lock.locked()\n grad_var = []\n for k in self._gradient_sum:\n self._gradient_sum[k] = self._gradient_sum[k] / self._grad_to_wait\n grad_var.append((self._gradient_sum[k], self._model[k]))\n self._opt.apply_gradients(grad_var)\n self._version += 1\n self._gradient_sum.clear()\n self._grad_n = 0\n\n def _validate_model_version(self, request_model_version):\n if request_model_version > self._version:\n err_msg = \"Model version %d not available yet, current version: %d\" % (\n request_model_version,\n self._version,\n )\n self.logger.warning(err_msg)\n raise ValueError(err_msg)\n\n invalid_model_version = request_model_version < self._version\n if invalid_model_version:\n self.logger.warning(\n \"Task result for outdated version %d dropped\",\n request_model_version,\n )\n return invalid_model_version\n\n def ReportGradient(self, request, _):\n invalid_model_version = self._validate_model_version(request.model_version)\n\n res = elasticdl_pb2.ReportGradientResponse()\n if invalid_model_version:\n res.accepted = False\n res.model_version = self._version\n return res\n\n # TODO: Update task queue with task_id\n with self._lock:\n tmp = {}\n # Do sanity check before accumulating gradients.\n for k, v in request.gradient.items():\n if k not in self._model:\n raise ValueError(\n \"Gradient key: %s is not part of model\", k\n )\n arr = tensor_to_ndarray(v)\n if arr.shape != self._model[k].numpy().shape:\n raise ValueError(\n \"Gradient key: %s has incompatible dimension\", k\n )\n tmp[k] = arr\n\n for k, v in tmp.items():\n if k in self._gradient_sum:\n self._gradient_sum[k] = self._gradient_sum[k] + v\n else:\n self._gradient_sum[k] = v\n\n self._grad_n += 1\n if self._grad_n >= self._grad_to_wait:\n self._update_model()\n res.accepted = True\n res.model_version = self._version\n return res\n\n def ReportTaskResult(self, request, _):\n if request.err_message:\n self.logger.warning(\n \"Worker reported error: \" + request.err_message\n )\n self._task_q.report(request.task_id, False)\n else:\n self._task_q.report(request.task_id, True)\n return empty_pb2.Empty()\n\n def ReportEvaluationMetrics(self, request, _):\n invalid_model_version = self._validate_model_version(request.model_version)\n\n res = elasticdl_pb2.ReportEvaluationMetricsResponse()\n if invalid_model_version:\n res.accepted = False\n res.model_version = self._version\n return res\n\n with self._lock:\n for k, v in request.evaluation_metrics.items():\n arr = tensor_to_ndarray(v)\n self._evaluation_metrics[k] = arr\n\n self._update_model()\n res.accepted = True\n res.model_version = self._version\n return res\n", "path": "elasticdl/python/elasticdl/master/servicer.py"}]}
1,863
734
gh_patches_debug_1568
rasdani/github-patches
git_diff
cobbler__cobbler-1266
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- build_reporting fails if empty string in ignorelist The default configuration in the ubuntu 12.04 cobbler 2.6.5 package has the following in `/etc/settings`: ``` build_reporting_ignorelist = [""] ``` The code that reads this value is in `install_post_report.py`, and the condition that determines whether to send a build report email is: ``` for prefix in settings.build_reporting_ignorelist: if name.lower().startswith(prefix) == True: sendmail = False ``` With the default configuration, this check always succeeds, and **mail is not sent**. Fix the issue by modifying the condition to: ``` if prefix != '' and name.lower().startswith(prefix): ``` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `cobbler/modules/install_post_report.py` Content: ``` 1 # (c) 2008-2009 2 # Jeff Schroeder <[email protected]> 3 # Michael DeHaan <michael.dehaan AT gmail> 4 # 5 # License: GPLv2+ 6 7 # Post install trigger for cobbler to 8 # send out a pretty email report that 9 # contains target information. 10 11 import distutils.sysconfig 12 import smtplib 13 import sys 14 15 plib = distutils.sysconfig.get_python_lib() 16 mod_path = "%s/cobbler" % plib 17 sys.path.insert(0, mod_path) 18 19 from cobbler.cexceptions import CX 20 import cobbler.templar as templar 21 import utils 22 23 24 def register(): 25 # this pure python trigger acts as if it were a legacy shell-trigger, but is much faster. 26 # the return of this method indicates the trigger type 27 return "/var/lib/cobbler/triggers/install/post/*" 28 29 30 def run(api, args, logger): 31 # FIXME: make everything use the logger 32 33 settings = api.settings() 34 35 # go no further if this feature is turned off 36 if not str(settings.build_reporting_enabled).lower() in ["1", "yes", "y", "true"]: 37 return 0 38 39 objtype = args[0] # "target" or "profile" 40 name = args[1] # name of target or profile 41 boot_ip = args[2] # ip or "?" 42 43 if objtype == "system": 44 target = api.find_system(name) 45 else: 46 target = api.find_profile(name) 47 48 # collapse the object down to a rendered datastructure 49 target = utils.blender(api, False, target) 50 51 if target == {}: 52 raise CX("failure looking up target") 53 54 to_addr = settings.build_reporting_email 55 if to_addr == "": 56 return 0 57 58 # add the ability to specify an MTA for servers that don't run their own 59 smtp_server = settings.build_reporting_smtp_server 60 if smtp_server == "": 61 smtp_server = "localhost" 62 63 # use a custom from address or fall back to a reasonable default 64 from_addr = settings.build_reporting_sender 65 if from_addr == "": 66 from_addr = "cobbler@%s" % settings.server 67 68 subject = settings.build_reporting_subject 69 if subject == "": 70 subject = '[Cobbler] install complete ' 71 72 to_addr = ",".join(to_addr) 73 metadata = { 74 "from_addr": from_addr, 75 "to_addr": to_addr, 76 "subject": subject, 77 "boot_ip": boot_ip 78 } 79 metadata.update(target) 80 81 input_template = open("/etc/cobbler/reporting/build_report_email.template") 82 input_data = input_template.read() 83 input_template.close() 84 85 message = templar.Templar(api._config).render(input_data, metadata, None) 86 87 sendmail = True 88 for prefix in settings.build_reporting_ignorelist: 89 if name.lower().startswith(prefix): 90 sendmail = False 91 92 if sendmail: 93 # Send the mail 94 # FIXME: on error, return non-zero 95 server_handle = smtplib.SMTP(smtp_server) 96 server_handle.sendmail(from_addr, to_addr.split(','), message) 97 server_handle.quit() 98 99 return 0 100 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/cobbler/modules/install_post_report.py b/cobbler/modules/install_post_report.py --- a/cobbler/modules/install_post_report.py +++ b/cobbler/modules/install_post_report.py @@ -86,7 +86,7 @@ sendmail = True for prefix in settings.build_reporting_ignorelist: - if name.lower().startswith(prefix): + if prefix != '' and name.lower().startswith(prefix): sendmail = False if sendmail:
{"golden_diff": "diff --git a/cobbler/modules/install_post_report.py b/cobbler/modules/install_post_report.py\n--- a/cobbler/modules/install_post_report.py\n+++ b/cobbler/modules/install_post_report.py\n@@ -86,7 +86,7 @@\n \n sendmail = True\n for prefix in settings.build_reporting_ignorelist:\n- if name.lower().startswith(prefix):\n+ if prefix != '' and name.lower().startswith(prefix):\n sendmail = False\n \n if sendmail:\n", "issue": "build_reporting fails if empty string in ignorelist\nThe default configuration in the ubuntu 12.04 cobbler 2.6.5 package has the following in `/etc/settings`:\n\n```\nbuild_reporting_ignorelist = [\"\"]\n```\n\nThe code that reads this value is in `install_post_report.py`, and the condition that determines whether to send a build report email is:\n\n```\nfor prefix in settings.build_reporting_ignorelist:\n if name.lower().startswith(prefix) == True:\n sendmail = False\n```\n\nWith the default configuration, this check always succeeds, and **mail is not sent**.\n\nFix the issue by modifying the condition to:\n\n```\n if prefix != '' and name.lower().startswith(prefix):\n```\n\n", "before_files": [{"content": "# (c) 2008-2009\n# Jeff Schroeder <[email protected]>\n# Michael DeHaan <michael.dehaan AT gmail>\n#\n# License: GPLv2+\n\n# Post install trigger for cobbler to\n# send out a pretty email report that\n# contains target information.\n\nimport distutils.sysconfig\nimport smtplib\nimport sys\n\nplib = distutils.sysconfig.get_python_lib()\nmod_path = \"%s/cobbler\" % plib\nsys.path.insert(0, mod_path)\n\nfrom cobbler.cexceptions import CX\nimport cobbler.templar as templar\nimport utils\n\n\ndef register():\n # this pure python trigger acts as if it were a legacy shell-trigger, but is much faster.\n # the return of this method indicates the trigger type\n return \"/var/lib/cobbler/triggers/install/post/*\"\n\n\ndef run(api, args, logger):\n # FIXME: make everything use the logger\n\n settings = api.settings()\n\n # go no further if this feature is turned off\n if not str(settings.build_reporting_enabled).lower() in [\"1\", \"yes\", \"y\", \"true\"]:\n return 0\n\n objtype = args[0] # \"target\" or \"profile\"\n name = args[1] # name of target or profile\n boot_ip = args[2] # ip or \"?\"\n\n if objtype == \"system\":\n target = api.find_system(name)\n else:\n target = api.find_profile(name)\n\n # collapse the object down to a rendered datastructure\n target = utils.blender(api, False, target)\n\n if target == {}:\n raise CX(\"failure looking up target\")\n\n to_addr = settings.build_reporting_email\n if to_addr == \"\":\n return 0\n\n # add the ability to specify an MTA for servers that don't run their own\n smtp_server = settings.build_reporting_smtp_server\n if smtp_server == \"\":\n smtp_server = \"localhost\"\n\n # use a custom from address or fall back to a reasonable default\n from_addr = settings.build_reporting_sender\n if from_addr == \"\":\n from_addr = \"cobbler@%s\" % settings.server\n\n subject = settings.build_reporting_subject\n if subject == \"\":\n subject = '[Cobbler] install complete '\n\n to_addr = \",\".join(to_addr)\n metadata = {\n \"from_addr\": from_addr,\n \"to_addr\": to_addr,\n \"subject\": subject,\n \"boot_ip\": boot_ip\n }\n metadata.update(target)\n\n input_template = open(\"/etc/cobbler/reporting/build_report_email.template\")\n input_data = input_template.read()\n input_template.close()\n\n message = templar.Templar(api._config).render(input_data, metadata, None)\n\n sendmail = True\n for prefix in settings.build_reporting_ignorelist:\n if name.lower().startswith(prefix):\n sendmail = False\n\n if sendmail:\n # Send the mail\n # FIXME: on error, return non-zero\n server_handle = smtplib.SMTP(smtp_server)\n server_handle.sendmail(from_addr, to_addr.split(','), message)\n server_handle.quit()\n\n return 0\n", "path": "cobbler/modules/install_post_report.py"}], "after_files": [{"content": "# (c) 2008-2009\n# Jeff Schroeder <[email protected]>\n# Michael DeHaan <michael.dehaan AT gmail>\n#\n# License: GPLv2+\n\n# Post install trigger for cobbler to\n# send out a pretty email report that\n# contains target information.\n\nimport distutils.sysconfig\nimport smtplib\nimport sys\n\nplib = distutils.sysconfig.get_python_lib()\nmod_path = \"%s/cobbler\" % plib\nsys.path.insert(0, mod_path)\n\nfrom cobbler.cexceptions import CX\nimport cobbler.templar as templar\nimport utils\n\n\ndef register():\n # this pure python trigger acts as if it were a legacy shell-trigger, but is much faster.\n # the return of this method indicates the trigger type\n return \"/var/lib/cobbler/triggers/install/post/*\"\n\n\ndef run(api, args, logger):\n # FIXME: make everything use the logger\n\n settings = api.settings()\n\n # go no further if this feature is turned off\n if not str(settings.build_reporting_enabled).lower() in [\"1\", \"yes\", \"y\", \"true\"]:\n return 0\n\n objtype = args[0] # \"target\" or \"profile\"\n name = args[1] # name of target or profile\n boot_ip = args[2] # ip or \"?\"\n\n if objtype == \"system\":\n target = api.find_system(name)\n else:\n target = api.find_profile(name)\n\n # collapse the object down to a rendered datastructure\n target = utils.blender(api, False, target)\n\n if target == {}:\n raise CX(\"failure looking up target\")\n\n to_addr = settings.build_reporting_email\n if to_addr == \"\":\n return 0\n\n # add the ability to specify an MTA for servers that don't run their own\n smtp_server = settings.build_reporting_smtp_server\n if smtp_server == \"\":\n smtp_server = \"localhost\"\n\n # use a custom from address or fall back to a reasonable default\n from_addr = settings.build_reporting_sender\n if from_addr == \"\":\n from_addr = \"cobbler@%s\" % settings.server\n\n subject = settings.build_reporting_subject\n if subject == \"\":\n subject = '[Cobbler] install complete '\n\n to_addr = \",\".join(to_addr)\n metadata = {\n \"from_addr\": from_addr,\n \"to_addr\": to_addr,\n \"subject\": subject,\n \"boot_ip\": boot_ip\n }\n metadata.update(target)\n\n input_template = open(\"/etc/cobbler/reporting/build_report_email.template\")\n input_data = input_template.read()\n input_template.close()\n\n message = templar.Templar(api._config).render(input_data, metadata, None)\n\n sendmail = True\n for prefix in settings.build_reporting_ignorelist:\n if prefix != '' and name.lower().startswith(prefix):\n sendmail = False\n\n if sendmail:\n # Send the mail\n # FIXME: on error, return non-zero\n server_handle = smtplib.SMTP(smtp_server)\n server_handle.sendmail(from_addr, to_addr.split(','), message)\n server_handle.quit()\n\n return 0\n", "path": "cobbler/modules/install_post_report.py"}]}
1,325
106
gh_patches_debug_5535
rasdani/github-patches
git_diff
kornia__kornia-421
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Fix simple typo: suports -> supports There is a small typo in kornia/filters/laplacian.py. Should read `supports` rather than `suports`. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `kornia/filters/laplacian.py` Content: ``` 1 from typing import Tuple 2 3 import torch 4 import torch.nn as nn 5 6 import kornia 7 from kornia.filters.kernels import get_laplacian_kernel2d 8 from kornia.filters.kernels import normalize_kernel2d 9 10 11 class Laplacian(nn.Module): 12 r"""Creates an operator that returns a tensor using a Laplacian filter. 13 14 The operator smooths the given tensor with a laplacian kernel by convolving 15 it to each channel. It suports batched operation. 16 17 Arguments: 18 kernel_size (int): the size of the kernel. 19 border_type (str): the padding mode to be applied before convolving. 20 The expected modes are: ``'constant'``, ``'reflect'``, 21 ``'replicate'`` or ``'circular'``. Default: ``'reflect'``. 22 normalized (bool): if True, L1 norm of the kernel is set to 1. 23 24 Returns: 25 Tensor: the tensor. 26 27 Shape: 28 - Input: :math:`(B, C, H, W)` 29 - Output: :math:`(B, C, H, W)` 30 31 Examples:: 32 33 >>> input = torch.rand(2, 4, 5, 5) 34 >>> laplace = kornia.filters.Laplacian(5) 35 >>> output = laplace(input) # 2x4x5x5 36 """ 37 38 def __init__(self, 39 kernel_size: int, border_type: str = 'reflect', 40 normalized: bool = True) -> None: 41 super(Laplacian, self).__init__() 42 self.kernel_size: int = kernel_size 43 self.border_type: str = border_type 44 self.normalized: bool = normalized 45 self.kernel: torch.Tensor = torch.unsqueeze( 46 get_laplacian_kernel2d(kernel_size), dim=0) 47 if self.normalized: 48 self.kernel = normalize_kernel2d(self.kernel) 49 50 def __repr__(self) -> str: 51 return self.__class__.__name__ +\ 52 '(kernel_size=' + str(self.kernel_size) + ', ' +\ 53 'normalized=' + str(self.normalized) + ', ' + \ 54 'border_type=' + self.border_type + ')' 55 56 def forward(self, input: torch.Tensor): # type: ignore 57 return kornia.filter2D(input, self.kernel, self.border_type) 58 59 60 ###################### 61 # functional interface 62 ###################### 63 64 65 def laplacian( 66 input: torch.Tensor, 67 kernel_size: int, 68 border_type: str = 'reflect', 69 normalized: bool = True) -> torch.Tensor: 70 r"""Function that returns a tensor using a Laplacian filter. 71 72 See :class:`~kornia.filters.Laplacian` for details. 73 """ 74 return Laplacian(kernel_size, border_type, normalized)(input) 75 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/kornia/filters/laplacian.py b/kornia/filters/laplacian.py --- a/kornia/filters/laplacian.py +++ b/kornia/filters/laplacian.py @@ -12,7 +12,7 @@ r"""Creates an operator that returns a tensor using a Laplacian filter. The operator smooths the given tensor with a laplacian kernel by convolving - it to each channel. It suports batched operation. + it to each channel. It supports batched operation. Arguments: kernel_size (int): the size of the kernel.
{"golden_diff": "diff --git a/kornia/filters/laplacian.py b/kornia/filters/laplacian.py\n--- a/kornia/filters/laplacian.py\n+++ b/kornia/filters/laplacian.py\n@@ -12,7 +12,7 @@\n r\"\"\"Creates an operator that returns a tensor using a Laplacian filter.\n \n The operator smooths the given tensor with a laplacian kernel by convolving\n- it to each channel. It suports batched operation.\n+ it to each channel. It supports batched operation.\n \n Arguments:\n kernel_size (int): the size of the kernel.\n", "issue": "Fix simple typo: suports -> supports\nThere is a small typo in kornia/filters/laplacian.py.\nShould read `supports` rather than `suports`.\n\n\n", "before_files": [{"content": "from typing import Tuple\n\nimport torch\nimport torch.nn as nn\n\nimport kornia\nfrom kornia.filters.kernels import get_laplacian_kernel2d\nfrom kornia.filters.kernels import normalize_kernel2d\n\n\nclass Laplacian(nn.Module):\n r\"\"\"Creates an operator that returns a tensor using a Laplacian filter.\n\n The operator smooths the given tensor with a laplacian kernel by convolving\n it to each channel. It suports batched operation.\n\n Arguments:\n kernel_size (int): the size of the kernel.\n border_type (str): the padding mode to be applied before convolving.\n The expected modes are: ``'constant'``, ``'reflect'``,\n ``'replicate'`` or ``'circular'``. Default: ``'reflect'``.\n normalized (bool): if True, L1 norm of the kernel is set to 1.\n\n Returns:\n Tensor: the tensor.\n\n Shape:\n - Input: :math:`(B, C, H, W)`\n - Output: :math:`(B, C, H, W)`\n\n Examples::\n\n >>> input = torch.rand(2, 4, 5, 5)\n >>> laplace = kornia.filters.Laplacian(5)\n >>> output = laplace(input) # 2x4x5x5\n \"\"\"\n\n def __init__(self,\n kernel_size: int, border_type: str = 'reflect',\n normalized: bool = True) -> None:\n super(Laplacian, self).__init__()\n self.kernel_size: int = kernel_size\n self.border_type: str = border_type\n self.normalized: bool = normalized\n self.kernel: torch.Tensor = torch.unsqueeze(\n get_laplacian_kernel2d(kernel_size), dim=0)\n if self.normalized:\n self.kernel = normalize_kernel2d(self.kernel)\n\n def __repr__(self) -> str:\n return self.__class__.__name__ +\\\n '(kernel_size=' + str(self.kernel_size) + ', ' +\\\n 'normalized=' + str(self.normalized) + ', ' + \\\n 'border_type=' + self.border_type + ')'\n\n def forward(self, input: torch.Tensor): # type: ignore\n return kornia.filter2D(input, self.kernel, self.border_type)\n\n\n######################\n# functional interface\n######################\n\n\ndef laplacian(\n input: torch.Tensor,\n kernel_size: int,\n border_type: str = 'reflect',\n normalized: bool = True) -> torch.Tensor:\n r\"\"\"Function that returns a tensor using a Laplacian filter.\n\n See :class:`~kornia.filters.Laplacian` for details.\n \"\"\"\n return Laplacian(kernel_size, border_type, normalized)(input)\n", "path": "kornia/filters/laplacian.py"}], "after_files": [{"content": "from typing import Tuple\n\nimport torch\nimport torch.nn as nn\n\nimport kornia\nfrom kornia.filters.kernels import get_laplacian_kernel2d\nfrom kornia.filters.kernels import normalize_kernel2d\n\n\nclass Laplacian(nn.Module):\n r\"\"\"Creates an operator that returns a tensor using a Laplacian filter.\n\n The operator smooths the given tensor with a laplacian kernel by convolving\n it to each channel. It supports batched operation.\n\n Arguments:\n kernel_size (int): the size of the kernel.\n border_type (str): the padding mode to be applied before convolving.\n The expected modes are: ``'constant'``, ``'reflect'``,\n ``'replicate'`` or ``'circular'``. Default: ``'reflect'``.\n normalized (bool): if True, L1 norm of the kernel is set to 1.\n\n Returns:\n Tensor: the tensor.\n\n Shape:\n - Input: :math:`(B, C, H, W)`\n - Output: :math:`(B, C, H, W)`\n\n Examples::\n\n >>> input = torch.rand(2, 4, 5, 5)\n >>> laplace = kornia.filters.Laplacian(5)\n >>> output = laplace(input) # 2x4x5x5\n \"\"\"\n\n def __init__(self,\n kernel_size: int, border_type: str = 'reflect',\n normalized: bool = True) -> None:\n super(Laplacian, self).__init__()\n self.kernel_size: int = kernel_size\n self.border_type: str = border_type\n self.normalized: bool = normalized\n self.kernel: torch.Tensor = torch.unsqueeze(\n get_laplacian_kernel2d(kernel_size), dim=0)\n if self.normalized:\n self.kernel = normalize_kernel2d(self.kernel)\n\n def __repr__(self) -> str:\n return self.__class__.__name__ +\\\n '(kernel_size=' + str(self.kernel_size) + ', ' +\\\n 'normalized=' + str(self.normalized) + ', ' + \\\n 'border_type=' + self.border_type + ')'\n\n def forward(self, input: torch.Tensor): # type: ignore\n return kornia.filter2D(input, self.kernel, self.border_type)\n\n\n######################\n# functional interface\n######################\n\n\ndef laplacian(\n input: torch.Tensor,\n kernel_size: int,\n border_type: str = 'reflect',\n normalized: bool = True) -> torch.Tensor:\n r\"\"\"Function that returns a tensor using a Laplacian filter.\n\n See :class:`~kornia.filters.Laplacian` for details.\n \"\"\"\n return Laplacian(kernel_size, border_type, normalized)(input)\n", "path": "kornia/filters/laplacian.py"}]}
1,059
143
gh_patches_debug_10134
rasdani/github-patches
git_diff
encode__starlette-1346
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Cannot define a custom HEAD handler in HTTPEndpoint ### Checklist <!-- Please make sure you check all these items before submitting your bug report. --> - [x] The bug is reproducible against the latest release and/or `master`. - [x] There are no similar issues or pull requests to fix it yet. ### Describe the bug I am unable to set a custom head method for a HTTPEndpoint ### How to reproduce Python: ```python from starlette.applications import Starlette from starlette.routing import Route from starlette.responses import Response from starlette.endpoints import HTTPEndpoint class TestHandler(HTTPEndpoint): async def head(self, request): print("Called head") return Response("head") async def get(self, request): print("Called get") return Response("get") routes = [ Route("/", TestHandler), ] app = Starlette(routes=routes) ``` Then run uvicorn, and call `curl -XHEAD localhost:8000/` and `curl -XGET localhost:8000/` You will see in uvicorn logs, both times the get handler gets called. ``` Called get INFO: 127.0.0.1:53438 - "HEAD / HTTP/1.1" 200 OK Called get INFO: 127.0.0.1:53440 - "GET / HTTP/1.1" 200 OK ``` ### Expected behavior When calling the HEAD method, it should invoke the head() function ### Actual behavior When calling the HEAD method, it invokes the get() function ### Environment - OS: Linux - Python version: Python 3.9.5 - Starlette version: 0.15.0 ### Additional context The issue is caused by the following code: https://github.com/encode/starlette/blob/master/starlette/endpoints.py#L26 It should see if there is a head method, and if so, don't call the get method when head is requested Eg. linked issues, or a description of what you were trying to achieve. --> --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `starlette/endpoints.py` Content: ``` 1 import asyncio 2 import json 3 import typing 4 5 from starlette import status 6 from starlette.concurrency import run_in_threadpool 7 from starlette.exceptions import HTTPException 8 from starlette.requests import Request 9 from starlette.responses import PlainTextResponse, Response 10 from starlette.types import Message, Receive, Scope, Send 11 from starlette.websockets import WebSocket 12 13 14 class HTTPEndpoint: 15 def __init__(self, scope: Scope, receive: Receive, send: Send) -> None: 16 assert scope["type"] == "http" 17 self.scope = scope 18 self.receive = receive 19 self.send = send 20 21 def __await__(self) -> typing.Generator: 22 return self.dispatch().__await__() 23 24 async def dispatch(self) -> None: 25 request = Request(self.scope, receive=self.receive) 26 handler_name = "get" if request.method == "HEAD" else request.method.lower() 27 handler = getattr(self, handler_name, self.method_not_allowed) 28 is_async = asyncio.iscoroutinefunction(handler) 29 if is_async: 30 response = await handler(request) 31 else: 32 response = await run_in_threadpool(handler, request) 33 await response(self.scope, self.receive, self.send) 34 35 async def method_not_allowed(self, request: Request) -> Response: 36 # If we're running inside a starlette application then raise an 37 # exception, so that the configurable exception handler can deal with 38 # returning the response. For plain ASGI apps, just return the response. 39 if "app" in self.scope: 40 raise HTTPException(status_code=405) 41 return PlainTextResponse("Method Not Allowed", status_code=405) 42 43 44 class WebSocketEndpoint: 45 46 encoding: typing.Optional[str] = None # May be "text", "bytes", or "json". 47 48 def __init__(self, scope: Scope, receive: Receive, send: Send) -> None: 49 assert scope["type"] == "websocket" 50 self.scope = scope 51 self.receive = receive 52 self.send = send 53 54 def __await__(self) -> typing.Generator: 55 return self.dispatch().__await__() 56 57 async def dispatch(self) -> None: 58 websocket = WebSocket(self.scope, receive=self.receive, send=self.send) 59 await self.on_connect(websocket) 60 61 close_code = status.WS_1000_NORMAL_CLOSURE 62 63 try: 64 while True: 65 message = await websocket.receive() 66 if message["type"] == "websocket.receive": 67 data = await self.decode(websocket, message) 68 await self.on_receive(websocket, data) 69 elif message["type"] == "websocket.disconnect": 70 close_code = int(message.get("code", status.WS_1000_NORMAL_CLOSURE)) 71 break 72 except Exception as exc: 73 close_code = status.WS_1011_INTERNAL_ERROR 74 raise exc 75 finally: 76 await self.on_disconnect(websocket, close_code) 77 78 async def decode(self, websocket: WebSocket, message: Message) -> typing.Any: 79 80 if self.encoding == "text": 81 if "text" not in message: 82 await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA) 83 raise RuntimeError("Expected text websocket messages, but got bytes") 84 return message["text"] 85 86 elif self.encoding == "bytes": 87 if "bytes" not in message: 88 await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA) 89 raise RuntimeError("Expected bytes websocket messages, but got text") 90 return message["bytes"] 91 92 elif self.encoding == "json": 93 if message.get("text") is not None: 94 text = message["text"] 95 else: 96 text = message["bytes"].decode("utf-8") 97 98 try: 99 return json.loads(text) 100 except json.decoder.JSONDecodeError: 101 await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA) 102 raise RuntimeError("Malformed JSON data received.") 103 104 assert ( 105 self.encoding is None 106 ), f"Unsupported 'encoding' attribute {self.encoding}" 107 return message["text"] if message.get("text") else message["bytes"] 108 109 async def on_connect(self, websocket: WebSocket) -> None: 110 """Override to handle an incoming websocket connection""" 111 await websocket.accept() 112 113 async def on_receive(self, websocket: WebSocket, data: typing.Any) -> None: 114 """Override to handle an incoming websocket message""" 115 116 async def on_disconnect(self, websocket: WebSocket, close_code: int) -> None: 117 """Override to handle a disconnecting websocket""" 118 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/starlette/endpoints.py b/starlette/endpoints.py --- a/starlette/endpoints.py +++ b/starlette/endpoints.py @@ -23,7 +23,12 @@ async def dispatch(self) -> None: request = Request(self.scope, receive=self.receive) - handler_name = "get" if request.method == "HEAD" else request.method.lower() + handler_name = ( + "get" + if request.method == "HEAD" and not hasattr(self, "head") + else request.method.lower() + ) + handler = getattr(self, handler_name, self.method_not_allowed) is_async = asyncio.iscoroutinefunction(handler) if is_async:
{"golden_diff": "diff --git a/starlette/endpoints.py b/starlette/endpoints.py\n--- a/starlette/endpoints.py\n+++ b/starlette/endpoints.py\n@@ -23,7 +23,12 @@\n \n async def dispatch(self) -> None:\n request = Request(self.scope, receive=self.receive)\n- handler_name = \"get\" if request.method == \"HEAD\" else request.method.lower()\n+ handler_name = (\n+ \"get\"\n+ if request.method == \"HEAD\" and not hasattr(self, \"head\")\n+ else request.method.lower()\n+ )\n+\n handler = getattr(self, handler_name, self.method_not_allowed)\n is_async = asyncio.iscoroutinefunction(handler)\n if is_async:\n", "issue": "Cannot define a custom HEAD handler in HTTPEndpoint\n### Checklist\r\n\r\n<!-- Please make sure you check all these items before submitting your bug report. -->\r\n\r\n- [x] The bug is reproducible against the latest release and/or `master`.\r\n- [x] There are no similar issues or pull requests to fix it yet.\r\n\r\n### Describe the bug\r\n\r\nI am unable to set a custom head method for a HTTPEndpoint\r\n\r\n### How to reproduce\r\n\r\nPython:\r\n```python\r\nfrom starlette.applications import Starlette\r\nfrom starlette.routing import Route\r\nfrom starlette.responses import Response\r\nfrom starlette.endpoints import HTTPEndpoint\r\n\r\nclass TestHandler(HTTPEndpoint):\r\n async def head(self, request):\r\n print(\"Called head\")\r\n return Response(\"head\")\r\n\r\n async def get(self, request):\r\n print(\"Called get\")\r\n return Response(\"get\")\r\n\r\nroutes = [\r\n Route(\"/\", TestHandler),\r\n]\r\n\r\napp = Starlette(routes=routes)\r\n```\r\nThen run uvicorn, and call `curl -XHEAD localhost:8000/` and `curl -XGET localhost:8000/`\r\n\r\nYou will see in uvicorn logs, both times the get handler gets called.\r\n```\r\nCalled get\r\nINFO: 127.0.0.1:53438 - \"HEAD / HTTP/1.1\" 200 OK\r\nCalled get\r\nINFO: 127.0.0.1:53440 - \"GET / HTTP/1.1\" 200 OK\r\n```\r\n\r\n### Expected behavior\r\n\r\nWhen calling the HEAD method, it should invoke the head() function\r\n\r\n### Actual behavior\r\n\r\nWhen calling the HEAD method, it invokes the get() function\r\n\r\n\r\n### Environment\r\n\r\n- OS: Linux\r\n- Python version: Python 3.9.5\r\n- Starlette version: 0.15.0\r\n\r\n### Additional context\r\n\r\nThe issue is caused by the following code:\r\nhttps://github.com/encode/starlette/blob/master/starlette/endpoints.py#L26\r\n\r\nIt should see if there is a head method, and if so, don't call the get method when head is requested\r\n\r\nEg. linked issues, or a description of what you were trying to achieve. -->\r\n\n", "before_files": [{"content": "import asyncio\nimport json\nimport typing\n\nfrom starlette import status\nfrom starlette.concurrency import run_in_threadpool\nfrom starlette.exceptions import HTTPException\nfrom starlette.requests import Request\nfrom starlette.responses import PlainTextResponse, Response\nfrom starlette.types import Message, Receive, Scope, Send\nfrom starlette.websockets import WebSocket\n\n\nclass HTTPEndpoint:\n def __init__(self, scope: Scope, receive: Receive, send: Send) -> None:\n assert scope[\"type\"] == \"http\"\n self.scope = scope\n self.receive = receive\n self.send = send\n\n def __await__(self) -> typing.Generator:\n return self.dispatch().__await__()\n\n async def dispatch(self) -> None:\n request = Request(self.scope, receive=self.receive)\n handler_name = \"get\" if request.method == \"HEAD\" else request.method.lower()\n handler = getattr(self, handler_name, self.method_not_allowed)\n is_async = asyncio.iscoroutinefunction(handler)\n if is_async:\n response = await handler(request)\n else:\n response = await run_in_threadpool(handler, request)\n await response(self.scope, self.receive, self.send)\n\n async def method_not_allowed(self, request: Request) -> Response:\n # If we're running inside a starlette application then raise an\n # exception, so that the configurable exception handler can deal with\n # returning the response. For plain ASGI apps, just return the response.\n if \"app\" in self.scope:\n raise HTTPException(status_code=405)\n return PlainTextResponse(\"Method Not Allowed\", status_code=405)\n\n\nclass WebSocketEndpoint:\n\n encoding: typing.Optional[str] = None # May be \"text\", \"bytes\", or \"json\".\n\n def __init__(self, scope: Scope, receive: Receive, send: Send) -> None:\n assert scope[\"type\"] == \"websocket\"\n self.scope = scope\n self.receive = receive\n self.send = send\n\n def __await__(self) -> typing.Generator:\n return self.dispatch().__await__()\n\n async def dispatch(self) -> None:\n websocket = WebSocket(self.scope, receive=self.receive, send=self.send)\n await self.on_connect(websocket)\n\n close_code = status.WS_1000_NORMAL_CLOSURE\n\n try:\n while True:\n message = await websocket.receive()\n if message[\"type\"] == \"websocket.receive\":\n data = await self.decode(websocket, message)\n await self.on_receive(websocket, data)\n elif message[\"type\"] == \"websocket.disconnect\":\n close_code = int(message.get(\"code\", status.WS_1000_NORMAL_CLOSURE))\n break\n except Exception as exc:\n close_code = status.WS_1011_INTERNAL_ERROR\n raise exc\n finally:\n await self.on_disconnect(websocket, close_code)\n\n async def decode(self, websocket: WebSocket, message: Message) -> typing.Any:\n\n if self.encoding == \"text\":\n if \"text\" not in message:\n await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA)\n raise RuntimeError(\"Expected text websocket messages, but got bytes\")\n return message[\"text\"]\n\n elif self.encoding == \"bytes\":\n if \"bytes\" not in message:\n await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA)\n raise RuntimeError(\"Expected bytes websocket messages, but got text\")\n return message[\"bytes\"]\n\n elif self.encoding == \"json\":\n if message.get(\"text\") is not None:\n text = message[\"text\"]\n else:\n text = message[\"bytes\"].decode(\"utf-8\")\n\n try:\n return json.loads(text)\n except json.decoder.JSONDecodeError:\n await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA)\n raise RuntimeError(\"Malformed JSON data received.\")\n\n assert (\n self.encoding is None\n ), f\"Unsupported 'encoding' attribute {self.encoding}\"\n return message[\"text\"] if message.get(\"text\") else message[\"bytes\"]\n\n async def on_connect(self, websocket: WebSocket) -> None:\n \"\"\"Override to handle an incoming websocket connection\"\"\"\n await websocket.accept()\n\n async def on_receive(self, websocket: WebSocket, data: typing.Any) -> None:\n \"\"\"Override to handle an incoming websocket message\"\"\"\n\n async def on_disconnect(self, websocket: WebSocket, close_code: int) -> None:\n \"\"\"Override to handle a disconnecting websocket\"\"\"\n", "path": "starlette/endpoints.py"}], "after_files": [{"content": "import asyncio\nimport json\nimport typing\n\nfrom starlette import status\nfrom starlette.concurrency import run_in_threadpool\nfrom starlette.exceptions import HTTPException\nfrom starlette.requests import Request\nfrom starlette.responses import PlainTextResponse, Response\nfrom starlette.types import Message, Receive, Scope, Send\nfrom starlette.websockets import WebSocket\n\n\nclass HTTPEndpoint:\n def __init__(self, scope: Scope, receive: Receive, send: Send) -> None:\n assert scope[\"type\"] == \"http\"\n self.scope = scope\n self.receive = receive\n self.send = send\n\n def __await__(self) -> typing.Generator:\n return self.dispatch().__await__()\n\n async def dispatch(self) -> None:\n request = Request(self.scope, receive=self.receive)\n handler_name = (\n \"get\"\n if request.method == \"HEAD\" and not hasattr(self, \"head\")\n else request.method.lower()\n )\n\n handler = getattr(self, handler_name, self.method_not_allowed)\n is_async = asyncio.iscoroutinefunction(handler)\n if is_async:\n response = await handler(request)\n else:\n response = await run_in_threadpool(handler, request)\n await response(self.scope, self.receive, self.send)\n\n async def method_not_allowed(self, request: Request) -> Response:\n # If we're running inside a starlette application then raise an\n # exception, so that the configurable exception handler can deal with\n # returning the response. For plain ASGI apps, just return the response.\n if \"app\" in self.scope:\n raise HTTPException(status_code=405)\n return PlainTextResponse(\"Method Not Allowed\", status_code=405)\n\n\nclass WebSocketEndpoint:\n\n encoding: typing.Optional[str] = None # May be \"text\", \"bytes\", or \"json\".\n\n def __init__(self, scope: Scope, receive: Receive, send: Send) -> None:\n assert scope[\"type\"] == \"websocket\"\n self.scope = scope\n self.receive = receive\n self.send = send\n\n def __await__(self) -> typing.Generator:\n return self.dispatch().__await__()\n\n async def dispatch(self) -> None:\n websocket = WebSocket(self.scope, receive=self.receive, send=self.send)\n await self.on_connect(websocket)\n\n close_code = status.WS_1000_NORMAL_CLOSURE\n\n try:\n while True:\n message = await websocket.receive()\n if message[\"type\"] == \"websocket.receive\":\n data = await self.decode(websocket, message)\n await self.on_receive(websocket, data)\n elif message[\"type\"] == \"websocket.disconnect\":\n close_code = int(message.get(\"code\", status.WS_1000_NORMAL_CLOSURE))\n break\n except Exception as exc:\n close_code = status.WS_1011_INTERNAL_ERROR\n raise exc\n finally:\n await self.on_disconnect(websocket, close_code)\n\n async def decode(self, websocket: WebSocket, message: Message) -> typing.Any:\n\n if self.encoding == \"text\":\n if \"text\" not in message:\n await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA)\n raise RuntimeError(\"Expected text websocket messages, but got bytes\")\n return message[\"text\"]\n\n elif self.encoding == \"bytes\":\n if \"bytes\" not in message:\n await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA)\n raise RuntimeError(\"Expected bytes websocket messages, but got text\")\n return message[\"bytes\"]\n\n elif self.encoding == \"json\":\n if message.get(\"text\") is not None:\n text = message[\"text\"]\n else:\n text = message[\"bytes\"].decode(\"utf-8\")\n\n try:\n return json.loads(text)\n except json.decoder.JSONDecodeError:\n await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA)\n raise RuntimeError(\"Malformed JSON data received.\")\n\n assert (\n self.encoding is None\n ), f\"Unsupported 'encoding' attribute {self.encoding}\"\n return message[\"text\"] if message.get(\"text\") else message[\"bytes\"]\n\n async def on_connect(self, websocket: WebSocket) -> None:\n \"\"\"Override to handle an incoming websocket connection\"\"\"\n await websocket.accept()\n\n async def on_receive(self, websocket: WebSocket, data: typing.Any) -> None:\n \"\"\"Override to handle an incoming websocket message\"\"\"\n\n async def on_disconnect(self, websocket: WebSocket, close_code: int) -> None:\n \"\"\"Override to handle a disconnecting websocket\"\"\"\n", "path": "starlette/endpoints.py"}]}
1,955
155
gh_patches_debug_49728
rasdani/github-patches
git_diff
googleapis__google-cloud-python-6027
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Please cut a release of Video Intelligence Need to unblock tests of samples --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `videointelligence/setup.py` Content: ``` 1 # Copyright 2018 Google LLC 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 import io 16 import os 17 18 import setuptools 19 20 21 # Package metadata. 22 23 name = 'google-cloud-videointelligence' 24 description = 'Google Cloud Video Intelligence API client library' 25 version = '1.3.0' 26 # Should be one of: 27 # 'Development Status :: 3 - Alpha' 28 # 'Development Status :: 4 - Beta' 29 # 'Development Status :: 5 - Production/Stable' 30 release_status = 'Development Status :: 5 - Production/Stable' 31 dependencies = [ 32 'google-api-core[grpc]<2.0.0dev,>=0.1.0', 33 ] 34 extras = { 35 } 36 37 38 # Setup boilerplate below this line. 39 40 package_root = os.path.abspath(os.path.dirname(__file__)) 41 42 readme_filename = os.path.join(package_root, 'README.rst') 43 with io.open(readme_filename, encoding='utf-8') as readme_file: 44 readme = readme_file.read() 45 46 # Only include packages under the 'google' namespace. Do not include tests, 47 # benchmarks, etc. 48 packages = [ 49 package for package in setuptools.find_packages() 50 if package.startswith('google')] 51 52 # Determine which namespaces are needed. 53 namespaces = ['google'] 54 if 'google.cloud' in packages: 55 namespaces.append('google.cloud') 56 57 58 setuptools.setup( 59 name=name, 60 version=version, 61 description=description, 62 long_description=readme, 63 author='Google LLC', 64 author_email='[email protected]', 65 license='Apache 2.0', 66 url='https://github.com/GoogleCloudPlatform/google-cloud-python', 67 classifiers=[ 68 release_status, 69 'Intended Audience :: Developers', 70 'License :: OSI Approved :: Apache Software License', 71 'Programming Language :: Python', 72 'Programming Language :: Python :: 2', 73 'Programming Language :: Python :: 2.7', 74 'Programming Language :: Python :: 3', 75 'Programming Language :: Python :: 3.4', 76 'Programming Language :: Python :: 3.5', 77 'Programming Language :: Python :: 3.6', 78 'Operating System :: OS Independent', 79 'Topic :: Internet', 80 ], 81 platforms='Posix; MacOS X; Windows', 82 packages=packages, 83 namespace_packages=namespaces, 84 install_requires=dependencies, 85 extras_require=extras, 86 include_package_data=True, 87 zip_safe=False, 88 ) 89 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/videointelligence/setup.py b/videointelligence/setup.py --- a/videointelligence/setup.py +++ b/videointelligence/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-videointelligence' description = 'Google Cloud Video Intelligence API client library' -version = '1.3.0' +version = '1.4.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta'
{"golden_diff": "diff --git a/videointelligence/setup.py b/videointelligence/setup.py\n--- a/videointelligence/setup.py\n+++ b/videointelligence/setup.py\n@@ -22,7 +22,7 @@\n \n name = 'google-cloud-videointelligence'\n description = 'Google Cloud Video Intelligence API client library'\n-version = '1.3.0'\n+version = '1.4.0'\n # Should be one of:\n # 'Development Status :: 3 - Alpha'\n # 'Development Status :: 4 - Beta'\n", "issue": "Please cut a release of Video Intelligence\nNeed to unblock tests of samples\n", "before_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport io\nimport os\n\nimport setuptools\n\n\n# Package metadata.\n\nname = 'google-cloud-videointelligence'\ndescription = 'Google Cloud Video Intelligence API client library'\nversion = '1.3.0'\n# Should be one of:\n# 'Development Status :: 3 - Alpha'\n# 'Development Status :: 4 - Beta'\n# 'Development Status :: 5 - Production/Stable'\nrelease_status = 'Development Status :: 5 - Production/Stable'\ndependencies = [\n 'google-api-core[grpc]<2.0.0dev,>=0.1.0',\n]\nextras = {\n}\n\n\n# Setup boilerplate below this line.\n\npackage_root = os.path.abspath(os.path.dirname(__file__))\n\nreadme_filename = os.path.join(package_root, 'README.rst')\nwith io.open(readme_filename, encoding='utf-8') as readme_file:\n readme = readme_file.read()\n\n# Only include packages under the 'google' namespace. Do not include tests,\n# benchmarks, etc.\npackages = [\n package for package in setuptools.find_packages()\n if package.startswith('google')]\n\n# Determine which namespaces are needed.\nnamespaces = ['google']\nif 'google.cloud' in packages:\n namespaces.append('google.cloud')\n\n\nsetuptools.setup(\n name=name,\n version=version,\n description=description,\n long_description=readme,\n author='Google LLC',\n author_email='[email protected]',\n license='Apache 2.0',\n url='https://github.com/GoogleCloudPlatform/google-cloud-python',\n classifiers=[\n release_status,\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Operating System :: OS Independent',\n 'Topic :: Internet',\n ],\n platforms='Posix; MacOS X; Windows',\n packages=packages,\n namespace_packages=namespaces,\n install_requires=dependencies,\n extras_require=extras,\n include_package_data=True,\n zip_safe=False,\n)\n", "path": "videointelligence/setup.py"}], "after_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport io\nimport os\n\nimport setuptools\n\n\n# Package metadata.\n\nname = 'google-cloud-videointelligence'\ndescription = 'Google Cloud Video Intelligence API client library'\nversion = '1.4.0'\n# Should be one of:\n# 'Development Status :: 3 - Alpha'\n# 'Development Status :: 4 - Beta'\n# 'Development Status :: 5 - Production/Stable'\nrelease_status = 'Development Status :: 5 - Production/Stable'\ndependencies = [\n 'google-api-core[grpc]<2.0.0dev,>=0.1.0',\n]\nextras = {\n}\n\n\n# Setup boilerplate below this line.\n\npackage_root = os.path.abspath(os.path.dirname(__file__))\n\nreadme_filename = os.path.join(package_root, 'README.rst')\nwith io.open(readme_filename, encoding='utf-8') as readme_file:\n readme = readme_file.read()\n\n# Only include packages under the 'google' namespace. Do not include tests,\n# benchmarks, etc.\npackages = [\n package for package in setuptools.find_packages()\n if package.startswith('google')]\n\n# Determine which namespaces are needed.\nnamespaces = ['google']\nif 'google.cloud' in packages:\n namespaces.append('google.cloud')\n\n\nsetuptools.setup(\n name=name,\n version=version,\n description=description,\n long_description=readme,\n author='Google LLC',\n author_email='[email protected]',\n license='Apache 2.0',\n url='https://github.com/GoogleCloudPlatform/google-cloud-python',\n classifiers=[\n release_status,\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Operating System :: OS Independent',\n 'Topic :: Internet',\n ],\n platforms='Posix; MacOS X; Windows',\n packages=packages,\n namespace_packages=namespaces,\n install_requires=dependencies,\n extras_require=extras,\n include_package_data=True,\n zip_safe=False,\n)\n", "path": "videointelligence/setup.py"}]}
1,065
115
gh_patches_debug_54621
rasdani/github-patches
git_diff
ibis-project__ibis-4790
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- docs: infinite build when using `mkdocs serve` It appears that when using `mkdocs serve` the docs are repeatedly rebuilt to no end. I suspect there's a file that we're generating (maybe the operation matrix?) that is being considered new and triggering a rebuild. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `gen_matrix.py` Content: ``` 1 from pathlib import Path 2 3 import pandas as pd 4 import tomli 5 6 import ibis 7 import ibis.expr.operations as ops 8 9 10 def get_backends(): 11 pyproject = tomli.loads(Path("pyproject.toml").read_text()) 12 backends = pyproject["tool"]["poetry"]["plugins"]["ibis.backends"] 13 del backends["spark"] 14 return [(backend, getattr(ibis, backend)) for backend in sorted(backends.keys())] 15 16 17 def get_leaf_classes(op): 18 for child_class in op.__subclasses__(): 19 if not child_class.__subclasses__(): 20 yield child_class 21 else: 22 yield from get_leaf_classes(child_class) 23 24 25 EXCLUDED_OPS = { 26 # Never translates into anything 27 ops.UnresolvedExistsSubquery, 28 ops.UnresolvedNotExistsSubquery, 29 ops.ScalarParameter, 30 } 31 32 INCLUDED_OPS = { 33 # Parent class of MultiQuantile so it's ignored by `get_backends()` 34 ops.Quantile, 35 } 36 37 38 ICONS = { 39 True: ":material-check-decagram:{ .verified }", 40 False: ":material-cancel:{ .cancel }", 41 } 42 43 44 def main(): 45 possible_ops = ( 46 frozenset(get_leaf_classes(ops.Value)) | INCLUDED_OPS 47 ) - EXCLUDED_OPS 48 49 support = {"operation": [f"`{op.__name__}`" for op in possible_ops]} 50 support.update( 51 (name, list(map(backend.has_operation, possible_ops))) 52 for name, backend in get_backends() 53 ) 54 55 df = pd.DataFrame(support).set_index("operation").sort_index() 56 57 counts = df.sum().sort_values(ascending=False) 58 num_ops = len(possible_ops) 59 coverage = ( 60 counts.map(lambda n: f"_{n} ({round(100 * n / num_ops)}%)_") 61 .to_frame(name="**API Coverage**") 62 .T 63 ) 64 65 ops_table = df.loc[:, counts.index].replace(ICONS) 66 table = pd.concat([coverage, ops_table]) 67 dst = Path(__file__).parent.joinpath( 68 "docs", 69 "backends", 70 "support_matrix.csv", 71 ) 72 table.to_csv(dst, index_label="Backends") 73 74 75 main() 76 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/gen_matrix.py b/gen_matrix.py --- a/gen_matrix.py +++ b/gen_matrix.py @@ -69,7 +69,15 @@ "backends", "support_matrix.csv", ) - table.to_csv(dst, index_label="Backends") + + if dst.exists(): + old = pd.read_csv(dst, index_col="Backends") + should_write = not old.equals(table) + else: + should_write = True + + if should_write: + table.to_csv(dst, index_label="Backends") main()
{"golden_diff": "diff --git a/gen_matrix.py b/gen_matrix.py\n--- a/gen_matrix.py\n+++ b/gen_matrix.py\n@@ -69,7 +69,15 @@\n \"backends\",\n \"support_matrix.csv\",\n )\n- table.to_csv(dst, index_label=\"Backends\")\n+\n+ if dst.exists():\n+ old = pd.read_csv(dst, index_col=\"Backends\")\n+ should_write = not old.equals(table)\n+ else:\n+ should_write = True\n+\n+ if should_write:\n+ table.to_csv(dst, index_label=\"Backends\")\n \n \n main()\n", "issue": "docs: infinite build when using `mkdocs serve`\nIt appears that when using `mkdocs serve` the docs are repeatedly rebuilt to no end.\r\n\r\nI suspect there's a file that we're generating (maybe the operation matrix?) that is being considered new and triggering a rebuild.\n", "before_files": [{"content": "from pathlib import Path\n\nimport pandas as pd\nimport tomli\n\nimport ibis\nimport ibis.expr.operations as ops\n\n\ndef get_backends():\n pyproject = tomli.loads(Path(\"pyproject.toml\").read_text())\n backends = pyproject[\"tool\"][\"poetry\"][\"plugins\"][\"ibis.backends\"]\n del backends[\"spark\"]\n return [(backend, getattr(ibis, backend)) for backend in sorted(backends.keys())]\n\n\ndef get_leaf_classes(op):\n for child_class in op.__subclasses__():\n if not child_class.__subclasses__():\n yield child_class\n else:\n yield from get_leaf_classes(child_class)\n\n\nEXCLUDED_OPS = {\n # Never translates into anything\n ops.UnresolvedExistsSubquery,\n ops.UnresolvedNotExistsSubquery,\n ops.ScalarParameter,\n}\n\nINCLUDED_OPS = {\n # Parent class of MultiQuantile so it's ignored by `get_backends()`\n ops.Quantile,\n}\n\n\nICONS = {\n True: \":material-check-decagram:{ .verified }\",\n False: \":material-cancel:{ .cancel }\",\n}\n\n\ndef main():\n possible_ops = (\n frozenset(get_leaf_classes(ops.Value)) | INCLUDED_OPS\n ) - EXCLUDED_OPS\n\n support = {\"operation\": [f\"`{op.__name__}`\" for op in possible_ops]}\n support.update(\n (name, list(map(backend.has_operation, possible_ops)))\n for name, backend in get_backends()\n )\n\n df = pd.DataFrame(support).set_index(\"operation\").sort_index()\n\n counts = df.sum().sort_values(ascending=False)\n num_ops = len(possible_ops)\n coverage = (\n counts.map(lambda n: f\"_{n} ({round(100 * n / num_ops)}%)_\")\n .to_frame(name=\"**API Coverage**\")\n .T\n )\n\n ops_table = df.loc[:, counts.index].replace(ICONS)\n table = pd.concat([coverage, ops_table])\n dst = Path(__file__).parent.joinpath(\n \"docs\",\n \"backends\",\n \"support_matrix.csv\",\n )\n table.to_csv(dst, index_label=\"Backends\")\n\n\nmain()\n", "path": "gen_matrix.py"}], "after_files": [{"content": "from pathlib import Path\n\nimport pandas as pd\nimport tomli\n\nimport ibis\nimport ibis.expr.operations as ops\n\n\ndef get_backends():\n pyproject = tomli.loads(Path(\"pyproject.toml\").read_text())\n backends = pyproject[\"tool\"][\"poetry\"][\"plugins\"][\"ibis.backends\"]\n del backends[\"spark\"]\n return [(backend, getattr(ibis, backend)) for backend in sorted(backends.keys())]\n\n\ndef get_leaf_classes(op):\n for child_class in op.__subclasses__():\n if not child_class.__subclasses__():\n yield child_class\n else:\n yield from get_leaf_classes(child_class)\n\n\nEXCLUDED_OPS = {\n # Never translates into anything\n ops.UnresolvedExistsSubquery,\n ops.UnresolvedNotExistsSubquery,\n ops.ScalarParameter,\n}\n\nINCLUDED_OPS = {\n # Parent class of MultiQuantile so it's ignored by `get_backends()`\n ops.Quantile,\n}\n\n\nICONS = {\n True: \":material-check-decagram:{ .verified }\",\n False: \":material-cancel:{ .cancel }\",\n}\n\n\ndef main():\n possible_ops = (\n frozenset(get_leaf_classes(ops.Value)) | INCLUDED_OPS\n ) - EXCLUDED_OPS\n\n support = {\"operation\": [f\"`{op.__name__}`\" for op in possible_ops]}\n support.update(\n (name, list(map(backend.has_operation, possible_ops)))\n for name, backend in get_backends()\n )\n\n df = pd.DataFrame(support).set_index(\"operation\").sort_index()\n\n counts = df.sum().sort_values(ascending=False)\n num_ops = len(possible_ops)\n coverage = (\n counts.map(lambda n: f\"_{n} ({round(100 * n / num_ops)}%)_\")\n .to_frame(name=\"**API Coverage**\")\n .T\n )\n\n ops_table = df.loc[:, counts.index].replace(ICONS)\n table = pd.concat([coverage, ops_table])\n dst = Path(__file__).parent.joinpath(\n \"docs\",\n \"backends\",\n \"support_matrix.csv\",\n )\n\n if dst.exists():\n old = pd.read_csv(dst, index_col=\"Backends\")\n should_write = not old.equals(table)\n else:\n should_write = True\n\n if should_write:\n table.to_csv(dst, index_label=\"Backends\")\n\n\nmain()\n", "path": "gen_matrix.py"}]}
945
129
gh_patches_debug_36560
rasdani/github-patches
git_diff
mampfes__hacs_waste_collection_schedule-2099
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- [Bug]: day_switch_time does not seem to be working correctly ### I Have A Problem With: The integration in general ### What's Your Problem I have day switch time set to `20:00` but the day switches at `01:19` <img width="228" alt="Screenshot 2024-05-08 at 07 24 31" src="https://github.com/mampfes/hacs_waste_collection_schedule/assets/49797976/c84d1086-1fd8-462a-a206-77ed846838a0"> config: ``` waste_collection_schedule: sources: - name: maldon_gov_uk args: uprn: "uprn" customize: - type: Refuse Collection - type: Recycling day_switch_time: "20:00" fetch_time: 01:00 ``` ### Source (if relevant) Maldon District Council / maldon.gov.uk ### Logs _No response_ ### Relevant Configuration ```YAML waste_collection_schedule: sources: - name: maldon_gov_uk args: uprn: "uprn" customize: - type: Refuse Collection - type: Recycling day_switch_time: "20:00" fetch_time: 01:00 ``` ### Checklist Source Error - [ ] Use the example parameters for your source (often available in the documentation) (don't forget to restart Home Assistant after changing the configuration) - [X] Checked that the website of your service provider is still working - [ ] Tested my attributes on the service provider website (if possible) - [X] I have tested with the latest version of the integration (master) (for HACS in the 3 dot menu of the integration click on "Redownload" and choose master as version) ### Checklist Sensor Error - [X] Checked in the Home Assistant Calendar tab if the event names match the types names (if types argument is used) ### Required - [X] I have searched past (closed AND opened) issues to see if this bug has already been reported, and it hasn't been. - [X] I understand that people give their precious time for free, and thus I've done my very best to make this problem as easy as possible to investigate. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `custom_components/waste_collection_schedule/waste_collection_schedule/source/maldon_gov_uk.py` Content: ``` 1 import re 2 from datetime import datetime 3 4 import requests 5 from bs4 import BeautifulSoup 6 from waste_collection_schedule import Collection 7 8 TITLE = "Maldon District Council" 9 10 DESCRIPTION = ("Source for www.maldon.gov.uk services for Maldon, UK") 11 12 URL = "https://www.maldon.gov.uk/" 13 14 TEST_CASES = { 15 "test 1": {"uprn": "200000917928"}, 16 "test 2": {"uprn": "100091258454"}, 17 } 18 19 API_URL = "https://maldon.suez.co.uk/maldon/ServiceSummary?uprn=" 20 21 ICON_MAP = { 22 "Refuse Collection": "mdi:trash-can", 23 "Recycling": "mdi:recycle", 24 "Green": "mdi:leaf", 25 "Food": "mdi:food-apple", 26 } 27 28 class Source: 29 def __init__(self, uprn: str): 30 self._uprn = uprn 31 32 def _extract_future_date(self, text): 33 # parse both dates and return the future one 34 dates = re.findall(r'\d{2}/\d{2}/\d{4}', text) 35 dates = [datetime.strptime(date, '%d/%m/%Y').date() for date in dates] 36 return max(dates) 37 38 def fetch(self): 39 entries = [] 40 41 session = requests.Session() 42 43 r = session.get(f"{API_URL}{self._uprn}") 44 soup = BeautifulSoup(r.text, features="html.parser") 45 collections = soup.find_all("div", {"class": "panel-default"}) 46 47 if not collections: 48 raise Exception("No collections found for given UPRN") 49 50 for collection in collections: 51 # check is a collection row 52 title = collection.find("h2", {"class": "panel-title"}).text.strip() 53 54 if title == "Other Services" or "You are not currently subscribed" in collection.text: 55 continue 56 57 entries.append( 58 Collection( 59 date=self._extract_future_date(collection.text), 60 t=title, 61 icon=ICON_MAP.get(title), 62 ) 63 ) 64 65 return entries 66 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/custom_components/waste_collection_schedule/waste_collection_schedule/source/maldon_gov_uk.py b/custom_components/waste_collection_schedule/waste_collection_schedule/source/maldon_gov_uk.py --- a/custom_components/waste_collection_schedule/waste_collection_schedule/source/maldon_gov_uk.py +++ b/custom_components/waste_collection_schedule/waste_collection_schedule/source/maldon_gov_uk.py @@ -3,17 +3,17 @@ import requests from bs4 import BeautifulSoup -from waste_collection_schedule import Collection +from waste_collection_schedule import Collection # type: ignore[attr-defined] TITLE = "Maldon District Council" -DESCRIPTION = ("Source for www.maldon.gov.uk services for Maldon, UK") +DESCRIPTION = "Source for www.maldon.gov.uk services for Maldon, UK" URL = "https://www.maldon.gov.uk/" TEST_CASES = { "test 1": {"uprn": "200000917928"}, - "test 2": {"uprn": "100091258454"}, + "test 2": {"uprn": 100091258454}, } API_URL = "https://maldon.suez.co.uk/maldon/ServiceSummary?uprn=" @@ -25,15 +25,15 @@ "Food": "mdi:food-apple", } + class Source: def __init__(self, uprn: str): self._uprn = uprn - def _extract_future_date(self, text): + def _extract_dates(self, text): # parse both dates and return the future one - dates = re.findall(r'\d{2}/\d{2}/\d{4}', text) - dates = [datetime.strptime(date, '%d/%m/%Y').date() for date in dates] - return max(dates) + dates = re.findall(r"\d{2}/\d{2}/\d{4}", text) + return [datetime.strptime(date, "%d/%m/%Y").date() for date in dates] def fetch(self): entries = [] @@ -51,15 +51,19 @@ # check is a collection row title = collection.find("h2", {"class": "panel-title"}).text.strip() - if title == "Other Services" or "You are not currently subscribed" in collection.text: + if ( + title == "Other Services" + or "You are not currently subscribed" in collection.text + ): continue - entries.append( - Collection( - date=self._extract_future_date(collection.text), - t=title, - icon=ICON_MAP.get(title), + for date in self._extract_dates(collection.text): + entries.append( + Collection( + date=date, + t=title, + icon=ICON_MAP.get(title), + ) ) - ) return entries
{"golden_diff": "diff --git a/custom_components/waste_collection_schedule/waste_collection_schedule/source/maldon_gov_uk.py b/custom_components/waste_collection_schedule/waste_collection_schedule/source/maldon_gov_uk.py\n--- a/custom_components/waste_collection_schedule/waste_collection_schedule/source/maldon_gov_uk.py\n+++ b/custom_components/waste_collection_schedule/waste_collection_schedule/source/maldon_gov_uk.py\n@@ -3,17 +3,17 @@\n \n import requests\n from bs4 import BeautifulSoup\n-from waste_collection_schedule import Collection\n+from waste_collection_schedule import Collection # type: ignore[attr-defined]\n \n TITLE = \"Maldon District Council\"\n \n-DESCRIPTION = (\"Source for www.maldon.gov.uk services for Maldon, UK\")\n+DESCRIPTION = \"Source for www.maldon.gov.uk services for Maldon, UK\"\n \n URL = \"https://www.maldon.gov.uk/\"\n \n TEST_CASES = {\n \"test 1\": {\"uprn\": \"200000917928\"},\n- \"test 2\": {\"uprn\": \"100091258454\"},\n+ \"test 2\": {\"uprn\": 100091258454},\n }\n \n API_URL = \"https://maldon.suez.co.uk/maldon/ServiceSummary?uprn=\"\n@@ -25,15 +25,15 @@\n \"Food\": \"mdi:food-apple\",\n }\n \n+\n class Source:\n def __init__(self, uprn: str):\n self._uprn = uprn\n \n- def _extract_future_date(self, text):\n+ def _extract_dates(self, text):\n # parse both dates and return the future one\n- dates = re.findall(r'\\d{2}/\\d{2}/\\d{4}', text)\n- dates = [datetime.strptime(date, '%d/%m/%Y').date() for date in dates]\n- return max(dates)\n+ dates = re.findall(r\"\\d{2}/\\d{2}/\\d{4}\", text)\n+ return [datetime.strptime(date, \"%d/%m/%Y\").date() for date in dates]\n \n def fetch(self):\n entries = []\n@@ -51,15 +51,19 @@\n # check is a collection row\n title = collection.find(\"h2\", {\"class\": \"panel-title\"}).text.strip()\n \n- if title == \"Other Services\" or \"You are not currently subscribed\" in collection.text:\n+ if (\n+ title == \"Other Services\"\n+ or \"You are not currently subscribed\" in collection.text\n+ ):\n continue\n \n- entries.append(\n- Collection(\n- date=self._extract_future_date(collection.text),\n- t=title,\n- icon=ICON_MAP.get(title),\n+ for date in self._extract_dates(collection.text):\n+ entries.append(\n+ Collection(\n+ date=date,\n+ t=title,\n+ icon=ICON_MAP.get(title),\n+ )\n )\n- )\n \n return entries\n", "issue": "[Bug]: day_switch_time does not seem to be working correctly\n### I Have A Problem With:\n\nThe integration in general\n\n### What's Your Problem\n\nI have day switch time set to `20:00` but the day switches at `01:19`\r\n\r\n<img width=\"228\" alt=\"Screenshot 2024-05-08 at 07 24 31\" src=\"https://github.com/mampfes/hacs_waste_collection_schedule/assets/49797976/c84d1086-1fd8-462a-a206-77ed846838a0\">\r\n\r\nconfig:\r\n\r\n```\r\nwaste_collection_schedule:\r\n sources:\r\n - name: maldon_gov_uk\r\n args:\r\n uprn: \"uprn\"\r\n customize:\r\n - type: Refuse Collection\r\n - type: Recycling\r\n day_switch_time: \"20:00\"\r\n fetch_time: 01:00\r\n```\r\n\n\n### Source (if relevant)\n\nMaldon District Council / maldon.gov.uk\n\n### Logs\n\n_No response_\n\n### Relevant Configuration\n\n```YAML\nwaste_collection_schedule:\r\n sources:\r\n - name: maldon_gov_uk\r\n args:\r\n uprn: \"uprn\"\r\n customize:\r\n - type: Refuse Collection\r\n - type: Recycling\r\n day_switch_time: \"20:00\"\r\n fetch_time: 01:00\n```\n\n\n### Checklist Source Error\n\n- [ ] Use the example parameters for your source (often available in the documentation) (don't forget to restart Home Assistant after changing the configuration)\n- [X] Checked that the website of your service provider is still working\n- [ ] Tested my attributes on the service provider website (if possible)\n- [X] I have tested with the latest version of the integration (master) (for HACS in the 3 dot menu of the integration click on \"Redownload\" and choose master as version)\n\n### Checklist Sensor Error\n\n- [X] Checked in the Home Assistant Calendar tab if the event names match the types names (if types argument is used)\n\n### Required\n\n- [X] I have searched past (closed AND opened) issues to see if this bug has already been reported, and it hasn't been.\n- [X] I understand that people give their precious time for free, and thus I've done my very best to make this problem as easy as possible to investigate.\n", "before_files": [{"content": "import re\nfrom datetime import datetime\n\nimport requests\nfrom bs4 import BeautifulSoup\nfrom waste_collection_schedule import Collection\n\nTITLE = \"Maldon District Council\"\n\nDESCRIPTION = (\"Source for www.maldon.gov.uk services for Maldon, UK\")\n\nURL = \"https://www.maldon.gov.uk/\"\n\nTEST_CASES = {\n \"test 1\": {\"uprn\": \"200000917928\"},\n \"test 2\": {\"uprn\": \"100091258454\"},\n}\n\nAPI_URL = \"https://maldon.suez.co.uk/maldon/ServiceSummary?uprn=\"\n\nICON_MAP = {\n \"Refuse Collection\": \"mdi:trash-can\",\n \"Recycling\": \"mdi:recycle\",\n \"Green\": \"mdi:leaf\",\n \"Food\": \"mdi:food-apple\",\n}\n\nclass Source:\n def __init__(self, uprn: str):\n self._uprn = uprn\n\n def _extract_future_date(self, text):\n # parse both dates and return the future one\n dates = re.findall(r'\\d{2}/\\d{2}/\\d{4}', text)\n dates = [datetime.strptime(date, '%d/%m/%Y').date() for date in dates]\n return max(dates)\n\n def fetch(self):\n entries = []\n\n session = requests.Session()\n\n r = session.get(f\"{API_URL}{self._uprn}\")\n soup = BeautifulSoup(r.text, features=\"html.parser\")\n collections = soup.find_all(\"div\", {\"class\": \"panel-default\"})\n\n if not collections:\n raise Exception(\"No collections found for given UPRN\")\n\n for collection in collections:\n # check is a collection row\n title = collection.find(\"h2\", {\"class\": \"panel-title\"}).text.strip()\n\n if title == \"Other Services\" or \"You are not currently subscribed\" in collection.text:\n continue\n\n entries.append(\n Collection(\n date=self._extract_future_date(collection.text),\n t=title,\n icon=ICON_MAP.get(title),\n )\n )\n\n return entries\n", "path": "custom_components/waste_collection_schedule/waste_collection_schedule/source/maldon_gov_uk.py"}], "after_files": [{"content": "import re\nfrom datetime import datetime\n\nimport requests\nfrom bs4 import BeautifulSoup\nfrom waste_collection_schedule import Collection # type: ignore[attr-defined]\n\nTITLE = \"Maldon District Council\"\n\nDESCRIPTION = \"Source for www.maldon.gov.uk services for Maldon, UK\"\n\nURL = \"https://www.maldon.gov.uk/\"\n\nTEST_CASES = {\n \"test 1\": {\"uprn\": \"200000917928\"},\n \"test 2\": {\"uprn\": 100091258454},\n}\n\nAPI_URL = \"https://maldon.suez.co.uk/maldon/ServiceSummary?uprn=\"\n\nICON_MAP = {\n \"Refuse Collection\": \"mdi:trash-can\",\n \"Recycling\": \"mdi:recycle\",\n \"Green\": \"mdi:leaf\",\n \"Food\": \"mdi:food-apple\",\n}\n\n\nclass Source:\n def __init__(self, uprn: str):\n self._uprn = uprn\n\n def _extract_dates(self, text):\n # parse both dates and return the future one\n dates = re.findall(r\"\\d{2}/\\d{2}/\\d{4}\", text)\n return [datetime.strptime(date, \"%d/%m/%Y\").date() for date in dates]\n\n def fetch(self):\n entries = []\n\n session = requests.Session()\n\n r = session.get(f\"{API_URL}{self._uprn}\")\n soup = BeautifulSoup(r.text, features=\"html.parser\")\n collections = soup.find_all(\"div\", {\"class\": \"panel-default\"})\n\n if not collections:\n raise Exception(\"No collections found for given UPRN\")\n\n for collection in collections:\n # check is a collection row\n title = collection.find(\"h2\", {\"class\": \"panel-title\"}).text.strip()\n\n if (\n title == \"Other Services\"\n or \"You are not currently subscribed\" in collection.text\n ):\n continue\n\n for date in self._extract_dates(collection.text):\n entries.append(\n Collection(\n date=date,\n t=title,\n icon=ICON_MAP.get(title),\n )\n )\n\n return entries\n", "path": "custom_components/waste_collection_schedule/waste_collection_schedule/source/maldon_gov_uk.py"}]}
1,408
678
gh_patches_debug_4220
rasdani/github-patches
git_diff
freedomofpress__securedrop-6586
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Clean up outdated references to Python 3.5 *This is a good first issue for new contributors to take on, if you have any questions, please ask on the task or in our [Gitter room](https://gitter.im/freedomofpress/securedrop)!* ## Description SecureDrop now runs on focal, which uses Python 3.8. But there are still references to Python 3.5 that need to be cleaned up. Some should be dropped outright, others should be switched to 3.8. Some examples: ``` $ rg python3\\.5 install_files/securedrop-grsec-focal/opt/securedrop/paxctld.conf 98:/usr/bin/python3.5 E molecule/testinfra/vars/app-qubes-staging.yml 13:securedrop_venv_site_packages: "{{ securedrop_venv }}/lib/python3.5/site-packages" molecule/testinfra/vars/prodVM.yml 12:securedrop_venv_site_packages: "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages" install_files/ansible-base/roles/build-securedrop-app-code-deb-pkg/files/usr.sbin.apache2 71: /etc/python3.5/sitecustomize.py r, 109: /usr/local/lib/python3.5/dist-packages/ r, 117: /opt/venvs/securedrop-app-code/lib/python3.5/ r, 118: /opt/venvs/securedrop-app-code/lib/python3.5/** rm, securedrop/scripts/rqrequeue 9:sys.path.insert(0, "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages") # noqa: E402 securedrop/scripts/shredder 14: 0, "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages" securedrop/scripts/source_deleter 14: 0, "/opt/venvs/securedrop-app-code/lib/python3.5/site-packages" $ rg 3\\.5 --type=py molecule/builder-focal/tests/test_build_dependencies.py 6:SECUREDROP_PYTHON_VERSION = os.environ.get("SECUREDROP_PYTHON_VERSION", "3.5") setup.py 14: python_requires=">=3.5", ``` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `setup.py` Content: ``` 1 import setuptools 2 3 long_description = "The SecureDrop whistleblower platform." 4 5 setuptools.setup( 6 name="securedrop-app-code", 7 version="2.5.0~rc1", 8 author="Freedom of the Press Foundation", 9 author_email="[email protected]", 10 description="SecureDrop Server", 11 long_description=long_description, 12 long_description_content_type="text/markdown", 13 license="AGPLv3+", 14 python_requires=">=3.5", 15 url="https://github.com/freedomofpress/securedrop", 16 classifiers=( 17 "Development Status :: 5 - Stable", 18 "Programming Language :: Python :: 3", 19 "Topic :: Software Development :: Libraries :: Python Modules", 20 "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", 21 "Intended Audience :: Developers", 22 "Operating System :: OS Independent", 23 ), 24 ) 25 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -11,7 +11,7 @@ long_description=long_description, long_description_content_type="text/markdown", license="AGPLv3+", - python_requires=">=3.5", + python_requires=">=3.8", url="https://github.com/freedomofpress/securedrop", classifiers=( "Development Status :: 5 - Stable",
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -11,7 +11,7 @@\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n license=\"AGPLv3+\",\n- python_requires=\">=3.5\",\n+ python_requires=\">=3.8\",\n url=\"https://github.com/freedomofpress/securedrop\",\n classifiers=(\n \"Development Status :: 5 - Stable\",\n", "issue": "Clean up outdated references to Python 3.5\n*This is a good first issue for new contributors to take on, if you have any questions, please ask on the task or in our [Gitter room](https://gitter.im/freedomofpress/securedrop)!*\r\n\r\n## Description\r\n\r\nSecureDrop now runs on focal, which uses Python 3.8. But there are still references to Python 3.5 that need to be cleaned up. Some should be dropped outright, others should be switched to 3.8.\r\n\r\n\r\nSome examples:\r\n```\r\n$ rg python3\\\\.5\r\ninstall_files/securedrop-grsec-focal/opt/securedrop/paxctld.conf\r\n98:/usr/bin/python3.5\t\tE\r\n\r\nmolecule/testinfra/vars/app-qubes-staging.yml\r\n13:securedrop_venv_site_packages: \"{{ securedrop_venv }}/lib/python3.5/site-packages\"\r\n\r\nmolecule/testinfra/vars/prodVM.yml\r\n12:securedrop_venv_site_packages: \"/opt/venvs/securedrop-app-code/lib/python3.5/site-packages\"\r\n\r\ninstall_files/ansible-base/roles/build-securedrop-app-code-deb-pkg/files/usr.sbin.apache2\r\n71: /etc/python3.5/sitecustomize.py r,\r\n109: /usr/local/lib/python3.5/dist-packages/ r,\r\n117: /opt/venvs/securedrop-app-code/lib/python3.5/ r,\r\n118: /opt/venvs/securedrop-app-code/lib/python3.5/** rm,\r\n\r\nsecuredrop/scripts/rqrequeue\r\n9:sys.path.insert(0, \"/opt/venvs/securedrop-app-code/lib/python3.5/site-packages\") # noqa: E402\r\n\r\nsecuredrop/scripts/shredder\r\n14: 0, \"/opt/venvs/securedrop-app-code/lib/python3.5/site-packages\"\r\n\r\nsecuredrop/scripts/source_deleter\r\n14: 0, \"/opt/venvs/securedrop-app-code/lib/python3.5/site-packages\"\r\n$ rg 3\\\\.5 --type=py\r\nmolecule/builder-focal/tests/test_build_dependencies.py\r\n6:SECUREDROP_PYTHON_VERSION = os.environ.get(\"SECUREDROP_PYTHON_VERSION\", \"3.5\")\r\n\r\nsetup.py\r\n14: python_requires=\">=3.5\",\r\n```\n", "before_files": [{"content": "import setuptools\n\nlong_description = \"The SecureDrop whistleblower platform.\"\n\nsetuptools.setup(\n name=\"securedrop-app-code\",\n version=\"2.5.0~rc1\",\n author=\"Freedom of the Press Foundation\",\n author_email=\"[email protected]\",\n description=\"SecureDrop Server\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n license=\"AGPLv3+\",\n python_requires=\">=3.5\",\n url=\"https://github.com/freedomofpress/securedrop\",\n classifiers=(\n \"Development Status :: 5 - Stable\",\n \"Programming Language :: Python :: 3\",\n \"Topic :: Software Development :: Libraries :: Python Modules\",\n \"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)\",\n \"Intended Audience :: Developers\",\n \"Operating System :: OS Independent\",\n ),\n)\n", "path": "setup.py"}], "after_files": [{"content": "import setuptools\n\nlong_description = \"The SecureDrop whistleblower platform.\"\n\nsetuptools.setup(\n name=\"securedrop-app-code\",\n version=\"2.5.0~rc1\",\n author=\"Freedom of the Press Foundation\",\n author_email=\"[email protected]\",\n description=\"SecureDrop Server\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n license=\"AGPLv3+\",\n python_requires=\">=3.8\",\n url=\"https://github.com/freedomofpress/securedrop\",\n classifiers=(\n \"Development Status :: 5 - Stable\",\n \"Programming Language :: Python :: 3\",\n \"Topic :: Software Development :: Libraries :: Python Modules\",\n \"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)\",\n \"Intended Audience :: Developers\",\n \"Operating System :: OS Independent\",\n ),\n)\n", "path": "setup.py"}]}
1,001
107
gh_patches_debug_45504
rasdani/github-patches
git_diff
Project-MONAI__MONAI-2062
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Add support to move data to `device` after inverting **Is your feature request related to a problem? Please describe.** Need to enhance the `TransformInverter` handler to move data to expected `device`. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `monai/handlers/transform_inverter.py` Content: ``` 1 # Copyright 2020 - 2021 MONAI Consortium 2 # Licensed under the Apache License, Version 2.0 (the "License"); 3 # you may not use this file except in compliance with the License. 4 # You may obtain a copy of the License at 5 # http://www.apache.org/licenses/LICENSE-2.0 6 # Unless required by applicable law or agreed to in writing, software 7 # distributed under the License is distributed on an "AS IS" BASIS, 8 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 9 # See the License for the specific language governing permissions and 10 # limitations under the License. 11 12 import warnings 13 from copy import deepcopy 14 from typing import TYPE_CHECKING, Callable, Optional, Sequence, Union 15 16 from torch.utils.data import DataLoader as TorchDataLoader 17 18 from monai.data import BatchInverseTransform 19 from monai.data.utils import no_collation 20 from monai.engines.utils import CommonKeys, IterationEvents 21 from monai.transforms import InvertibleTransform, ToTensor, allow_missing_keys_mode, convert_inverse_interp_mode 22 from monai.utils import InverseKeys, ensure_tuple, ensure_tuple_rep, exact_version, optional_import 23 24 Events, _ = optional_import("ignite.engine", "0.4.4", exact_version, "Events") 25 if TYPE_CHECKING: 26 from ignite.engine import Engine 27 else: 28 Engine, _ = optional_import("ignite.engine", "0.4.4", exact_version, "Engine") 29 30 31 class TransformInverter: 32 """ 33 Ignite handler to automatically invert `transforms`. 34 It takes `engine.state.output` as the input data and uses the transforms information from `engine.state.batch`. 35 The outputs are stored in `engine.state.output` with key: "{output_key}_{postfix}". 36 """ 37 38 def __init__( 39 self, 40 transform: InvertibleTransform, 41 loader: TorchDataLoader, 42 output_keys: Union[str, Sequence[str]] = CommonKeys.PRED, 43 batch_keys: Union[str, Sequence[str]] = CommonKeys.IMAGE, 44 meta_key_postfix: str = "meta_dict", 45 collate_fn: Optional[Callable] = no_collation, 46 postfix: str = "inverted", 47 nearest_interp: Union[bool, Sequence[bool]] = True, 48 post_func: Union[Callable, Sequence[Callable]] = lambda x: x, 49 num_workers: Optional[int] = 0, 50 ) -> None: 51 """ 52 Args: 53 transform: a callable data transform on input data. 54 loader: data loader used to run transforms and generate the batch of data. 55 output_keys: the key of expected data in `ignite.engine.output`, invert transforms on it. 56 it also can be a list of keys, will invert transform for each of them. Default to "pred". 57 batch_keys: the key of input data in `ignite.engine.batch`. will get the applied transforms 58 for this input data, then invert them for the expected data with `output_keys`. 59 It can also be a list of keys, each matches to the `output_keys` data. default to "image". 60 meta_key_postfix: use `{batch_key}_{postfix}` to to fetch the meta data according to the key data, 61 default is `meta_dict`, the meta data is a dictionary object. 62 For example, to handle key `image`, read/write affine matrices from the 63 metadata `image_meta_dict` dictionary's `affine` field. 64 collate_fn: how to collate data after inverse transformations. 65 default won't do any collation, so the output will be a list of size batch size. 66 postfix: will save the inverted result into `ignite.engine.output` with key `{output_key}_{postfix}`. 67 nearest_interp: whether to use `nearest` interpolation mode when inverting the spatial transforms, 68 default to `True`. If `False`, use the same interpolation mode as the original transform. 69 it also can be a list of bool, each matches to the `output_keys` data. 70 post_func: post processing for the inverted data, should be a callable function. 71 it also can be a list of callable, each matches to the `output_keys` data. 72 num_workers: number of workers when run data loader for inverse transforms, 73 default to 0 as only run one iteration and multi-processing may be even slower. 74 Set to `None`, to use the `num_workers` of the input transform data loader. 75 76 """ 77 self.transform = transform 78 self.inverter = BatchInverseTransform( 79 transform=transform, 80 loader=loader, 81 collate_fn=collate_fn, 82 num_workers=num_workers, 83 ) 84 self.output_keys = ensure_tuple(output_keys) 85 self.batch_keys = ensure_tuple_rep(batch_keys, len(self.output_keys)) 86 self.meta_key_postfix = meta_key_postfix 87 self.postfix = postfix 88 self.nearest_interp = ensure_tuple_rep(nearest_interp, len(self.output_keys)) 89 self.post_func = ensure_tuple_rep(post_func, len(self.output_keys)) 90 self._totensor = ToTensor() 91 92 def attach(self, engine: Engine) -> None: 93 """ 94 Args: 95 engine: Ignite Engine, it can be a trainer, validator or evaluator. 96 """ 97 engine.add_event_handler(IterationEvents.MODEL_COMPLETED, self) 98 99 def __call__(self, engine: Engine) -> None: 100 """ 101 Args: 102 engine: Ignite Engine, it can be a trainer, validator or evaluator. 103 """ 104 for output_key, batch_key, nearest_interp, post_funct in zip( 105 self.output_keys, self.batch_keys, self.nearest_interp, self.post_func 106 ): 107 transform_key = batch_key + InverseKeys.KEY_SUFFIX 108 if transform_key not in engine.state.batch: 109 warnings.warn(f"all the transforms on `{batch_key}` are not InvertibleTransform.") 110 continue 111 112 transform_info = engine.state.batch[transform_key] 113 if nearest_interp: 114 transform_info = convert_inverse_interp_mode( 115 trans_info=deepcopy(transform_info), 116 mode="nearest", 117 align_corners=None, 118 ) 119 120 segs_dict = { 121 batch_key: engine.state.output[output_key].detach().cpu(), 122 transform_key: transform_info, 123 } 124 meta_dict_key = f"{batch_key}_{self.meta_key_postfix}" 125 if meta_dict_key in engine.state.batch: 126 segs_dict[meta_dict_key] = engine.state.batch[meta_dict_key] 127 128 with allow_missing_keys_mode(self.transform): # type: ignore 129 inverted_key = f"{output_key}_{self.postfix}" 130 engine.state.output[inverted_key] = [ 131 post_funct(self._totensor(i[batch_key])) for i in self.inverter(segs_dict) 132 ] 133 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/monai/handlers/transform_inverter.py b/monai/handlers/transform_inverter.py --- a/monai/handlers/transform_inverter.py +++ b/monai/handlers/transform_inverter.py @@ -13,6 +13,7 @@ from copy import deepcopy from typing import TYPE_CHECKING, Callable, Optional, Sequence, Union +import torch from torch.utils.data import DataLoader as TorchDataLoader from monai.data import BatchInverseTransform @@ -32,7 +33,7 @@ """ Ignite handler to automatically invert `transforms`. It takes `engine.state.output` as the input data and uses the transforms information from `engine.state.batch`. - The outputs are stored in `engine.state.output` with key: "{output_key}_{postfix}". + The inverted results are stored in `engine.state.output` with key: "{output_key}_{postfix}". """ def __init__( @@ -45,6 +46,8 @@ collate_fn: Optional[Callable] = no_collation, postfix: str = "inverted", nearest_interp: Union[bool, Sequence[bool]] = True, + to_tensor: Union[bool, Sequence[bool]] = True, + device: Union[Union[str, torch.device], Sequence[Union[str, torch.device]]] = "cpu", post_func: Union[Callable, Sequence[Callable]] = lambda x: x, num_workers: Optional[int] = 0, ) -> None: @@ -67,6 +70,11 @@ nearest_interp: whether to use `nearest` interpolation mode when inverting the spatial transforms, default to `True`. If `False`, use the same interpolation mode as the original transform. it also can be a list of bool, each matches to the `output_keys` data. + to_tensor: whether to convert the inverted data into PyTorch Tensor first, default to `True`. + it also can be a list of bool, each matches to the `output_keys` data. + device: if converted to Tensor, move the inverted results to target device before `post_func`, + default to "cpu", it also can be a list of string or `torch.device`, + each matches to the `output_keys` data. post_func: post processing for the inverted data, should be a callable function. it also can be a list of callable, each matches to the `output_keys` data. num_workers: number of workers when run data loader for inverse transforms, @@ -86,6 +94,8 @@ self.meta_key_postfix = meta_key_postfix self.postfix = postfix self.nearest_interp = ensure_tuple_rep(nearest_interp, len(self.output_keys)) + self.to_tensor = ensure_tuple_rep(to_tensor, len(self.output_keys)) + self.device = ensure_tuple_rep(device, len(self.output_keys)) self.post_func = ensure_tuple_rep(post_func, len(self.output_keys)) self._totensor = ToTensor() @@ -101,8 +111,8 @@ Args: engine: Ignite Engine, it can be a trainer, validator or evaluator. """ - for output_key, batch_key, nearest_interp, post_funct in zip( - self.output_keys, self.batch_keys, self.nearest_interp, self.post_func + for output_key, batch_key, nearest_interp, to_tensor, device, post_func in zip( + self.output_keys, self.batch_keys, self.nearest_interp, self.to_tensor, self.device, self.post_func ): transform_key = batch_key + InverseKeys.KEY_SUFFIX if transform_key not in engine.state.batch: @@ -118,7 +128,7 @@ ) segs_dict = { - batch_key: engine.state.output[output_key].detach().cpu(), + batch_key: engine.state.output[output_key], transform_key: transform_info, } meta_dict_key = f"{batch_key}_{self.meta_key_postfix}" @@ -128,5 +138,6 @@ with allow_missing_keys_mode(self.transform): # type: ignore inverted_key = f"{output_key}_{self.postfix}" engine.state.output[inverted_key] = [ - post_funct(self._totensor(i[batch_key])) for i in self.inverter(segs_dict) + post_func(self._totensor(i[batch_key]).to(device) if to_tensor else i[batch_key]) + for i in self.inverter(segs_dict) ]
{"golden_diff": "diff --git a/monai/handlers/transform_inverter.py b/monai/handlers/transform_inverter.py\n--- a/monai/handlers/transform_inverter.py\n+++ b/monai/handlers/transform_inverter.py\n@@ -13,6 +13,7 @@\n from copy import deepcopy\n from typing import TYPE_CHECKING, Callable, Optional, Sequence, Union\n \n+import torch\n from torch.utils.data import DataLoader as TorchDataLoader\n \n from monai.data import BatchInverseTransform\n@@ -32,7 +33,7 @@\n \"\"\"\n Ignite handler to automatically invert `transforms`.\n It takes `engine.state.output` as the input data and uses the transforms information from `engine.state.batch`.\n- The outputs are stored in `engine.state.output` with key: \"{output_key}_{postfix}\".\n+ The inverted results are stored in `engine.state.output` with key: \"{output_key}_{postfix}\".\n \"\"\"\n \n def __init__(\n@@ -45,6 +46,8 @@\n collate_fn: Optional[Callable] = no_collation,\n postfix: str = \"inverted\",\n nearest_interp: Union[bool, Sequence[bool]] = True,\n+ to_tensor: Union[bool, Sequence[bool]] = True,\n+ device: Union[Union[str, torch.device], Sequence[Union[str, torch.device]]] = \"cpu\",\n post_func: Union[Callable, Sequence[Callable]] = lambda x: x,\n num_workers: Optional[int] = 0,\n ) -> None:\n@@ -67,6 +70,11 @@\n nearest_interp: whether to use `nearest` interpolation mode when inverting the spatial transforms,\n default to `True`. If `False`, use the same interpolation mode as the original transform.\n it also can be a list of bool, each matches to the `output_keys` data.\n+ to_tensor: whether to convert the inverted data into PyTorch Tensor first, default to `True`.\n+ it also can be a list of bool, each matches to the `output_keys` data.\n+ device: if converted to Tensor, move the inverted results to target device before `post_func`,\n+ default to \"cpu\", it also can be a list of string or `torch.device`,\n+ each matches to the `output_keys` data.\n post_func: post processing for the inverted data, should be a callable function.\n it also can be a list of callable, each matches to the `output_keys` data.\n num_workers: number of workers when run data loader for inverse transforms,\n@@ -86,6 +94,8 @@\n self.meta_key_postfix = meta_key_postfix\n self.postfix = postfix\n self.nearest_interp = ensure_tuple_rep(nearest_interp, len(self.output_keys))\n+ self.to_tensor = ensure_tuple_rep(to_tensor, len(self.output_keys))\n+ self.device = ensure_tuple_rep(device, len(self.output_keys))\n self.post_func = ensure_tuple_rep(post_func, len(self.output_keys))\n self._totensor = ToTensor()\n \n@@ -101,8 +111,8 @@\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n- for output_key, batch_key, nearest_interp, post_funct in zip(\n- self.output_keys, self.batch_keys, self.nearest_interp, self.post_func\n+ for output_key, batch_key, nearest_interp, to_tensor, device, post_func in zip(\n+ self.output_keys, self.batch_keys, self.nearest_interp, self.to_tensor, self.device, self.post_func\n ):\n transform_key = batch_key + InverseKeys.KEY_SUFFIX\n if transform_key not in engine.state.batch:\n@@ -118,7 +128,7 @@\n )\n \n segs_dict = {\n- batch_key: engine.state.output[output_key].detach().cpu(),\n+ batch_key: engine.state.output[output_key],\n transform_key: transform_info,\n }\n meta_dict_key = f\"{batch_key}_{self.meta_key_postfix}\"\n@@ -128,5 +138,6 @@\n with allow_missing_keys_mode(self.transform): # type: ignore\n inverted_key = f\"{output_key}_{self.postfix}\"\n engine.state.output[inverted_key] = [\n- post_funct(self._totensor(i[batch_key])) for i in self.inverter(segs_dict)\n+ post_func(self._totensor(i[batch_key]).to(device) if to_tensor else i[batch_key])\n+ for i in self.inverter(segs_dict)\n ]\n", "issue": "Add support to move data to `device` after inverting\n**Is your feature request related to a problem? Please describe.**\r\nNeed to enhance the `TransformInverter` handler to move data to expected `device`.\r\n\n", "before_files": [{"content": "# Copyright 2020 - 2021 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport warnings\nfrom copy import deepcopy\nfrom typing import TYPE_CHECKING, Callable, Optional, Sequence, Union\n\nfrom torch.utils.data import DataLoader as TorchDataLoader\n\nfrom monai.data import BatchInverseTransform\nfrom monai.data.utils import no_collation\nfrom monai.engines.utils import CommonKeys, IterationEvents\nfrom monai.transforms import InvertibleTransform, ToTensor, allow_missing_keys_mode, convert_inverse_interp_mode\nfrom monai.utils import InverseKeys, ensure_tuple, ensure_tuple_rep, exact_version, optional_import\n\nEvents, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Events\")\nif TYPE_CHECKING:\n from ignite.engine import Engine\nelse:\n Engine, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Engine\")\n\n\nclass TransformInverter:\n \"\"\"\n Ignite handler to automatically invert `transforms`.\n It takes `engine.state.output` as the input data and uses the transforms information from `engine.state.batch`.\n The outputs are stored in `engine.state.output` with key: \"{output_key}_{postfix}\".\n \"\"\"\n\n def __init__(\n self,\n transform: InvertibleTransform,\n loader: TorchDataLoader,\n output_keys: Union[str, Sequence[str]] = CommonKeys.PRED,\n batch_keys: Union[str, Sequence[str]] = CommonKeys.IMAGE,\n meta_key_postfix: str = \"meta_dict\",\n collate_fn: Optional[Callable] = no_collation,\n postfix: str = \"inverted\",\n nearest_interp: Union[bool, Sequence[bool]] = True,\n post_func: Union[Callable, Sequence[Callable]] = lambda x: x,\n num_workers: Optional[int] = 0,\n ) -> None:\n \"\"\"\n Args:\n transform: a callable data transform on input data.\n loader: data loader used to run transforms and generate the batch of data.\n output_keys: the key of expected data in `ignite.engine.output`, invert transforms on it.\n it also can be a list of keys, will invert transform for each of them. Default to \"pred\".\n batch_keys: the key of input data in `ignite.engine.batch`. will get the applied transforms\n for this input data, then invert them for the expected data with `output_keys`.\n It can also be a list of keys, each matches to the `output_keys` data. default to \"image\".\n meta_key_postfix: use `{batch_key}_{postfix}` to to fetch the meta data according to the key data,\n default is `meta_dict`, the meta data is a dictionary object.\n For example, to handle key `image`, read/write affine matrices from the\n metadata `image_meta_dict` dictionary's `affine` field.\n collate_fn: how to collate data after inverse transformations.\n default won't do any collation, so the output will be a list of size batch size.\n postfix: will save the inverted result into `ignite.engine.output` with key `{output_key}_{postfix}`.\n nearest_interp: whether to use `nearest` interpolation mode when inverting the spatial transforms,\n default to `True`. If `False`, use the same interpolation mode as the original transform.\n it also can be a list of bool, each matches to the `output_keys` data.\n post_func: post processing for the inverted data, should be a callable function.\n it also can be a list of callable, each matches to the `output_keys` data.\n num_workers: number of workers when run data loader for inverse transforms,\n default to 0 as only run one iteration and multi-processing may be even slower.\n Set to `None`, to use the `num_workers` of the input transform data loader.\n\n \"\"\"\n self.transform = transform\n self.inverter = BatchInverseTransform(\n transform=transform,\n loader=loader,\n collate_fn=collate_fn,\n num_workers=num_workers,\n )\n self.output_keys = ensure_tuple(output_keys)\n self.batch_keys = ensure_tuple_rep(batch_keys, len(self.output_keys))\n self.meta_key_postfix = meta_key_postfix\n self.postfix = postfix\n self.nearest_interp = ensure_tuple_rep(nearest_interp, len(self.output_keys))\n self.post_func = ensure_tuple_rep(post_func, len(self.output_keys))\n self._totensor = ToTensor()\n\n def attach(self, engine: Engine) -> None:\n \"\"\"\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n engine.add_event_handler(IterationEvents.MODEL_COMPLETED, self)\n\n def __call__(self, engine: Engine) -> None:\n \"\"\"\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n for output_key, batch_key, nearest_interp, post_funct in zip(\n self.output_keys, self.batch_keys, self.nearest_interp, self.post_func\n ):\n transform_key = batch_key + InverseKeys.KEY_SUFFIX\n if transform_key not in engine.state.batch:\n warnings.warn(f\"all the transforms on `{batch_key}` are not InvertibleTransform.\")\n continue\n\n transform_info = engine.state.batch[transform_key]\n if nearest_interp:\n transform_info = convert_inverse_interp_mode(\n trans_info=deepcopy(transform_info),\n mode=\"nearest\",\n align_corners=None,\n )\n\n segs_dict = {\n batch_key: engine.state.output[output_key].detach().cpu(),\n transform_key: transform_info,\n }\n meta_dict_key = f\"{batch_key}_{self.meta_key_postfix}\"\n if meta_dict_key in engine.state.batch:\n segs_dict[meta_dict_key] = engine.state.batch[meta_dict_key]\n\n with allow_missing_keys_mode(self.transform): # type: ignore\n inverted_key = f\"{output_key}_{self.postfix}\"\n engine.state.output[inverted_key] = [\n post_funct(self._totensor(i[batch_key])) for i in self.inverter(segs_dict)\n ]\n", "path": "monai/handlers/transform_inverter.py"}], "after_files": [{"content": "# Copyright 2020 - 2021 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport warnings\nfrom copy import deepcopy\nfrom typing import TYPE_CHECKING, Callable, Optional, Sequence, Union\n\nimport torch\nfrom torch.utils.data import DataLoader as TorchDataLoader\n\nfrom monai.data import BatchInverseTransform\nfrom monai.data.utils import no_collation\nfrom monai.engines.utils import CommonKeys, IterationEvents\nfrom monai.transforms import InvertibleTransform, ToTensor, allow_missing_keys_mode, convert_inverse_interp_mode\nfrom monai.utils import InverseKeys, ensure_tuple, ensure_tuple_rep, exact_version, optional_import\n\nEvents, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Events\")\nif TYPE_CHECKING:\n from ignite.engine import Engine\nelse:\n Engine, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Engine\")\n\n\nclass TransformInverter:\n \"\"\"\n Ignite handler to automatically invert `transforms`.\n It takes `engine.state.output` as the input data and uses the transforms information from `engine.state.batch`.\n The inverted results are stored in `engine.state.output` with key: \"{output_key}_{postfix}\".\n \"\"\"\n\n def __init__(\n self,\n transform: InvertibleTransform,\n loader: TorchDataLoader,\n output_keys: Union[str, Sequence[str]] = CommonKeys.PRED,\n batch_keys: Union[str, Sequence[str]] = CommonKeys.IMAGE,\n meta_key_postfix: str = \"meta_dict\",\n collate_fn: Optional[Callable] = no_collation,\n postfix: str = \"inverted\",\n nearest_interp: Union[bool, Sequence[bool]] = True,\n to_tensor: Union[bool, Sequence[bool]] = True,\n device: Union[Union[str, torch.device], Sequence[Union[str, torch.device]]] = \"cpu\",\n post_func: Union[Callable, Sequence[Callable]] = lambda x: x,\n num_workers: Optional[int] = 0,\n ) -> None:\n \"\"\"\n Args:\n transform: a callable data transform on input data.\n loader: data loader used to run transforms and generate the batch of data.\n output_keys: the key of expected data in `ignite.engine.output`, invert transforms on it.\n it also can be a list of keys, will invert transform for each of them. Default to \"pred\".\n batch_keys: the key of input data in `ignite.engine.batch`. will get the applied transforms\n for this input data, then invert them for the expected data with `output_keys`.\n It can also be a list of keys, each matches to the `output_keys` data. default to \"image\".\n meta_key_postfix: use `{batch_key}_{postfix}` to to fetch the meta data according to the key data,\n default is `meta_dict`, the meta data is a dictionary object.\n For example, to handle key `image`, read/write affine matrices from the\n metadata `image_meta_dict` dictionary's `affine` field.\n collate_fn: how to collate data after inverse transformations.\n default won't do any collation, so the output will be a list of size batch size.\n postfix: will save the inverted result into `ignite.engine.output` with key `{output_key}_{postfix}`.\n nearest_interp: whether to use `nearest` interpolation mode when inverting the spatial transforms,\n default to `True`. If `False`, use the same interpolation mode as the original transform.\n it also can be a list of bool, each matches to the `output_keys` data.\n to_tensor: whether to convert the inverted data into PyTorch Tensor first, default to `True`.\n it also can be a list of bool, each matches to the `output_keys` data.\n device: if converted to Tensor, move the inverted results to target device before `post_func`,\n default to \"cpu\", it also can be a list of string or `torch.device`,\n each matches to the `output_keys` data.\n post_func: post processing for the inverted data, should be a callable function.\n it also can be a list of callable, each matches to the `output_keys` data.\n num_workers: number of workers when run data loader for inverse transforms,\n default to 0 as only run one iteration and multi-processing may be even slower.\n Set to `None`, to use the `num_workers` of the input transform data loader.\n\n \"\"\"\n self.transform = transform\n self.inverter = BatchInverseTransform(\n transform=transform,\n loader=loader,\n collate_fn=collate_fn,\n num_workers=num_workers,\n )\n self.output_keys = ensure_tuple(output_keys)\n self.batch_keys = ensure_tuple_rep(batch_keys, len(self.output_keys))\n self.meta_key_postfix = meta_key_postfix\n self.postfix = postfix\n self.nearest_interp = ensure_tuple_rep(nearest_interp, len(self.output_keys))\n self.to_tensor = ensure_tuple_rep(to_tensor, len(self.output_keys))\n self.device = ensure_tuple_rep(device, len(self.output_keys))\n self.post_func = ensure_tuple_rep(post_func, len(self.output_keys))\n self._totensor = ToTensor()\n\n def attach(self, engine: Engine) -> None:\n \"\"\"\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n engine.add_event_handler(IterationEvents.MODEL_COMPLETED, self)\n\n def __call__(self, engine: Engine) -> None:\n \"\"\"\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n for output_key, batch_key, nearest_interp, to_tensor, device, post_func in zip(\n self.output_keys, self.batch_keys, self.nearest_interp, self.to_tensor, self.device, self.post_func\n ):\n transform_key = batch_key + InverseKeys.KEY_SUFFIX\n if transform_key not in engine.state.batch:\n warnings.warn(f\"all the transforms on `{batch_key}` are not InvertibleTransform.\")\n continue\n\n transform_info = engine.state.batch[transform_key]\n if nearest_interp:\n transform_info = convert_inverse_interp_mode(\n trans_info=deepcopy(transform_info),\n mode=\"nearest\",\n align_corners=None,\n )\n\n segs_dict = {\n batch_key: engine.state.output[output_key],\n transform_key: transform_info,\n }\n meta_dict_key = f\"{batch_key}_{self.meta_key_postfix}\"\n if meta_dict_key in engine.state.batch:\n segs_dict[meta_dict_key] = engine.state.batch[meta_dict_key]\n\n with allow_missing_keys_mode(self.transform): # type: ignore\n inverted_key = f\"{output_key}_{self.postfix}\"\n engine.state.output[inverted_key] = [\n post_func(self._totensor(i[batch_key]).to(device) if to_tensor else i[batch_key])\n for i in self.inverter(segs_dict)\n ]\n", "path": "monai/handlers/transform_inverter.py"}]}
2,030
1,004
gh_patches_debug_2361
rasdani/github-patches
git_diff
tough-dev-school__education-backend-1502
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- В админке во всех названиях курса выводить название потока Сейчас совершенно непонятно, к какому потоку принадлежит курс — приходится догадываться по старшинству. Надо, чтобы вот тут (см. ниже) выводилось название ProductGroup, к которому привязан курс. <img width="1511" alt="Screenshot 2022-06-20 at 10 55 18" src="https://user-images.githubusercontent.com/1592663/174552950-bf6ee7e8-6ba7-43f7-af90-5ba2fededfd7.png"> --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `src/products/models/course.py` Content: ``` 1 from django.apps import apps 2 from django.core.exceptions import ValidationError 3 from django.db.models import OuterRef 4 from django.db.models import QuerySet 5 from django.db.models import Subquery 6 from django.utils.translation import gettext_lazy as _ 7 8 from app.files import RandomFileName 9 from app.models import models 10 from mailing.tasks import send_mail 11 from products.models.base import Shippable 12 from users.models import User 13 14 15 class CourseQuerySet(QuerySet): 16 def for_lms(self) -> QuerySet["Course"]: 17 return self.filter( 18 display_in_lms=True, 19 ).with_course_homepage() 20 21 def with_course_homepage(self) -> QuerySet["Course"]: 22 materials = ( 23 apps.get_model("notion.Material") 24 .objects.filter( 25 course=OuterRef("pk"), 26 is_home_page=True, 27 ) 28 .order_by( 29 "-created", 30 ) 31 .values( 32 "page_id", 33 ) 34 ) 35 36 return self.annotate( 37 home_page_slug=Subquery(materials[:1]), 38 ) 39 40 41 CourseManager = models.Manager.from_queryset(CourseQuerySet) 42 43 44 class Course(Shippable): 45 objects = CourseManager() 46 47 name_genitive = models.CharField(_("Genitive name"), max_length=255, help_text="«мастер-класса о TDD». К примеру для записей.") 48 zoomus_webinar_id = models.CharField( 49 _("Zoom.us webinar ID"), max_length=255, null=True, blank=True, help_text=_("If set, every user who purcashes this course gets invited") 50 ) 51 52 welcome_letter_template_id = models.CharField( 53 _("Welcome letter template id"), max_length=255, blank=True, null=True, help_text=_("Will be sent upon purchase if set") 54 ) 55 gift_welcome_letter_template_id = models.CharField( 56 _("Special welcome letter template id for gifts"), max_length=255, blank=True, null=True, help_text=_("If not set, common welcome letter will be used") 57 ) 58 display_in_lms = models.BooleanField(_("Display in LMS"), default=True, help_text=_("If disabled will not be shown in LMS")) 59 60 diploma_template_context = models.JSONField(default=dict, blank=True) 61 62 disable_triggers = models.BooleanField(_("Disable all triggers"), default=False) 63 64 confirmation_template_id = models.CharField( 65 _("Confirmation template id"), 66 max_length=255, 67 null=True, 68 blank=True, 69 help_text=_("If set user sill receive this message upon creating zero-priced order"), 70 ) 71 confirmation_success_url = models.URLField(_("Confirmation success URL"), null=True, blank=True) 72 73 cover = models.ImageField( 74 verbose_name=_("Cover image"), 75 upload_to=RandomFileName("courses/covers"), 76 blank=True, 77 help_text=_("The cover image of course"), 78 ) 79 80 class Meta: 81 ordering = ["-id"] 82 verbose_name = _("Course") 83 verbose_name_plural = _("Courses") 84 db_table = "courses_course" 85 86 def clean(self): 87 """Check for correct setting of confirmation_template_id and confirmation_success_url""" 88 if not self.confirmation_template_id and not self.confirmation_success_url: 89 return 90 91 if not all([self.confirmation_template_id, self.confirmation_success_url]): 92 raise ValidationError(_("Both confirmation_template_id and confirmation_success_url must be set")) 93 94 if self.price != 0: 95 raise ValidationError(_("Courses with confirmation should have zero price")) 96 97 def get_purchased_users(self) -> QuerySet[User]: 98 return User.objects.filter( 99 pk__in=apps.get_model("studying.Study").objects.filter(course=self).values_list("student", flat=True), 100 ) 101 102 def send_email_to_all_purchased_users(self, template_id: str): 103 for user in self.get_purchased_users().iterator(): 104 send_mail.delay( 105 to=user.email, 106 template_id=template_id, 107 ) 108 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/src/products/models/course.py b/src/products/models/course.py --- a/src/products/models/course.py +++ b/src/products/models/course.py @@ -105,3 +105,11 @@ to=user.email, template_id=template_id, ) + + def __str__(self) -> str: + name = getattr(self, "name", None) + group = getattr(self, "group", None) + if name is not None and group is not None: + return f"{name} - {group.name}" + + return super().__str__()
{"golden_diff": "diff --git a/src/products/models/course.py b/src/products/models/course.py\n--- a/src/products/models/course.py\n+++ b/src/products/models/course.py\n@@ -105,3 +105,11 @@\n to=user.email,\n template_id=template_id,\n )\n+\n+ def __str__(self) -> str:\n+ name = getattr(self, \"name\", None)\n+ group = getattr(self, \"group\", None)\n+ if name is not None and group is not None:\n+ return f\"{name} - {group.name}\"\n+\n+ return super().__str__()\n", "issue": "\u0412 \u0430\u0434\u043c\u0438\u043d\u043a\u0435 \u0432\u043e \u0432\u0441\u0435\u0445 \u043d\u0430\u0437\u0432\u0430\u043d\u0438\u044f\u0445 \u043a\u0443\u0440\u0441\u0430 \u0432\u044b\u0432\u043e\u0434\u0438\u0442\u044c \u043d\u0430\u0437\u0432\u0430\u043d\u0438\u0435 \u043f\u043e\u0442\u043e\u043a\u0430\n\u0421\u0435\u0439\u0447\u0430\u0441 \u0441\u043e\u0432\u0435\u0440\u0448\u0435\u043d\u043d\u043e \u043d\u0435\u043f\u043e\u043d\u044f\u0442\u043d\u043e, \u043a \u043a\u0430\u043a\u043e\u043c\u0443 \u043f\u043e\u0442\u043e\u043a\u0443 \u043f\u0440\u0438\u043d\u0430\u0434\u043b\u0435\u0436\u0438\u0442 \u043a\u0443\u0440\u0441 \u2014\u00a0\u043f\u0440\u0438\u0445\u043e\u0434\u0438\u0442\u0441\u044f \u0434\u043e\u0433\u0430\u0434\u044b\u0432\u0430\u0442\u044c\u0441\u044f \u043f\u043e \u0441\u0442\u0430\u0440\u0448\u0438\u043d\u0441\u0442\u0432\u0443. \u041d\u0430\u0434\u043e, \u0447\u0442\u043e\u0431\u044b \u0432\u043e\u0442 \u0442\u0443\u0442 (\u0441\u043c. \u043d\u0438\u0436\u0435) \u0432\u044b\u0432\u043e\u0434\u0438\u043b\u043e\u0441\u044c \u043d\u0430\u0437\u0432\u0430\u043d\u0438\u0435 ProductGroup, \u043a \u043a\u043e\u0442\u043e\u0440\u043e\u043c\u0443 \u043f\u0440\u0438\u0432\u044f\u0437\u0430\u043d \u043a\u0443\u0440\u0441.\r\n\r\n<img width=\"1511\" alt=\"Screenshot 2022-06-20 at 10 55 18\" src=\"https://user-images.githubusercontent.com/1592663/174552950-bf6ee7e8-6ba7-43f7-af90-5ba2fededfd7.png\">\r\n\r\n\n", "before_files": [{"content": "from django.apps import apps\nfrom django.core.exceptions import ValidationError\nfrom django.db.models import OuterRef\nfrom django.db.models import QuerySet\nfrom django.db.models import Subquery\nfrom django.utils.translation import gettext_lazy as _\n\nfrom app.files import RandomFileName\nfrom app.models import models\nfrom mailing.tasks import send_mail\nfrom products.models.base import Shippable\nfrom users.models import User\n\n\nclass CourseQuerySet(QuerySet):\n def for_lms(self) -> QuerySet[\"Course\"]:\n return self.filter(\n display_in_lms=True,\n ).with_course_homepage()\n\n def with_course_homepage(self) -> QuerySet[\"Course\"]:\n materials = (\n apps.get_model(\"notion.Material\")\n .objects.filter(\n course=OuterRef(\"pk\"),\n is_home_page=True,\n )\n .order_by(\n \"-created\",\n )\n .values(\n \"page_id\",\n )\n )\n\n return self.annotate(\n home_page_slug=Subquery(materials[:1]),\n )\n\n\nCourseManager = models.Manager.from_queryset(CourseQuerySet)\n\n\nclass Course(Shippable):\n objects = CourseManager()\n\n name_genitive = models.CharField(_(\"Genitive name\"), max_length=255, help_text=\"\u00ab\u043c\u0430\u0441\u0442\u0435\u0440-\u043a\u043b\u0430\u0441\u0441\u0430 \u043e TDD\u00bb. \u041a \u043f\u0440\u0438\u043c\u0435\u0440\u0443 \u0434\u043b\u044f \u0437\u0430\u043f\u0438\u0441\u0435\u0439.\")\n zoomus_webinar_id = models.CharField(\n _(\"Zoom.us webinar ID\"), max_length=255, null=True, blank=True, help_text=_(\"If set, every user who purcashes this course gets invited\")\n )\n\n welcome_letter_template_id = models.CharField(\n _(\"Welcome letter template id\"), max_length=255, blank=True, null=True, help_text=_(\"Will be sent upon purchase if set\")\n )\n gift_welcome_letter_template_id = models.CharField(\n _(\"Special welcome letter template id for gifts\"), max_length=255, blank=True, null=True, help_text=_(\"If not set, common welcome letter will be used\")\n )\n display_in_lms = models.BooleanField(_(\"Display in LMS\"), default=True, help_text=_(\"If disabled will not be shown in LMS\"))\n\n diploma_template_context = models.JSONField(default=dict, blank=True)\n\n disable_triggers = models.BooleanField(_(\"Disable all triggers\"), default=False)\n\n confirmation_template_id = models.CharField(\n _(\"Confirmation template id\"),\n max_length=255,\n null=True,\n blank=True,\n help_text=_(\"If set user sill receive this message upon creating zero-priced order\"),\n )\n confirmation_success_url = models.URLField(_(\"Confirmation success URL\"), null=True, blank=True)\n\n cover = models.ImageField(\n verbose_name=_(\"Cover image\"),\n upload_to=RandomFileName(\"courses/covers\"),\n blank=True,\n help_text=_(\"The cover image of course\"),\n )\n\n class Meta:\n ordering = [\"-id\"]\n verbose_name = _(\"Course\")\n verbose_name_plural = _(\"Courses\")\n db_table = \"courses_course\"\n\n def clean(self):\n \"\"\"Check for correct setting of confirmation_template_id and confirmation_success_url\"\"\"\n if not self.confirmation_template_id and not self.confirmation_success_url:\n return\n\n if not all([self.confirmation_template_id, self.confirmation_success_url]):\n raise ValidationError(_(\"Both confirmation_template_id and confirmation_success_url must be set\"))\n\n if self.price != 0:\n raise ValidationError(_(\"Courses with confirmation should have zero price\"))\n\n def get_purchased_users(self) -> QuerySet[User]:\n return User.objects.filter(\n pk__in=apps.get_model(\"studying.Study\").objects.filter(course=self).values_list(\"student\", flat=True),\n )\n\n def send_email_to_all_purchased_users(self, template_id: str):\n for user in self.get_purchased_users().iterator():\n send_mail.delay(\n to=user.email,\n template_id=template_id,\n )\n", "path": "src/products/models/course.py"}], "after_files": [{"content": "from django.apps import apps\nfrom django.core.exceptions import ValidationError\nfrom django.db.models import OuterRef\nfrom django.db.models import QuerySet\nfrom django.db.models import Subquery\nfrom django.utils.translation import gettext_lazy as _\n\nfrom app.files import RandomFileName\nfrom app.models import models\nfrom mailing.tasks import send_mail\nfrom products.models.base import Shippable\nfrom users.models import User\n\n\nclass CourseQuerySet(QuerySet):\n def for_lms(self) -> QuerySet[\"Course\"]:\n return self.filter(\n display_in_lms=True,\n ).with_course_homepage()\n\n def with_course_homepage(self) -> QuerySet[\"Course\"]:\n materials = (\n apps.get_model(\"notion.Material\")\n .objects.filter(\n course=OuterRef(\"pk\"),\n is_home_page=True,\n )\n .order_by(\n \"-created\",\n )\n .values(\n \"page_id\",\n )\n )\n\n return self.annotate(\n home_page_slug=Subquery(materials[:1]),\n )\n\n\nCourseManager = models.Manager.from_queryset(CourseQuerySet)\n\n\nclass Course(Shippable):\n objects = CourseManager()\n\n name_genitive = models.CharField(_(\"Genitive name\"), max_length=255, help_text=\"\u00ab\u043c\u0430\u0441\u0442\u0435\u0440-\u043a\u043b\u0430\u0441\u0441\u0430 \u043e TDD\u00bb. \u041a \u043f\u0440\u0438\u043c\u0435\u0440\u0443 \u0434\u043b\u044f \u0437\u0430\u043f\u0438\u0441\u0435\u0439.\")\n zoomus_webinar_id = models.CharField(\n _(\"Zoom.us webinar ID\"), max_length=255, null=True, blank=True, help_text=_(\"If set, every user who purcashes this course gets invited\")\n )\n\n welcome_letter_template_id = models.CharField(\n _(\"Welcome letter template id\"), max_length=255, blank=True, null=True, help_text=_(\"Will be sent upon purchase if set\")\n )\n gift_welcome_letter_template_id = models.CharField(\n _(\"Special welcome letter template id for gifts\"), max_length=255, blank=True, null=True, help_text=_(\"If not set, common welcome letter will be used\")\n )\n display_in_lms = models.BooleanField(_(\"Display in LMS\"), default=True, help_text=_(\"If disabled will not be shown in LMS\"))\n\n diploma_template_context = models.JSONField(default=dict, blank=True)\n\n disable_triggers = models.BooleanField(_(\"Disable all triggers\"), default=False)\n\n confirmation_template_id = models.CharField(\n _(\"Confirmation template id\"),\n max_length=255,\n null=True,\n blank=True,\n help_text=_(\"If set user sill receive this message upon creating zero-priced order\"),\n )\n confirmation_success_url = models.URLField(_(\"Confirmation success URL\"), null=True, blank=True)\n\n cover = models.ImageField(\n verbose_name=_(\"Cover image\"),\n upload_to=RandomFileName(\"courses/covers\"),\n blank=True,\n help_text=_(\"The cover image of course\"),\n )\n\n class Meta:\n ordering = [\"-id\"]\n verbose_name = _(\"Course\")\n verbose_name_plural = _(\"Courses\")\n db_table = \"courses_course\"\n\n def clean(self):\n \"\"\"Check for correct setting of confirmation_template_id and confirmation_success_url\"\"\"\n if not self.confirmation_template_id and not self.confirmation_success_url:\n return\n\n if not all([self.confirmation_template_id, self.confirmation_success_url]):\n raise ValidationError(_(\"Both confirmation_template_id and confirmation_success_url must be set\"))\n\n if self.price != 0:\n raise ValidationError(_(\"Courses with confirmation should have zero price\"))\n\n def get_purchased_users(self) -> QuerySet[User]:\n return User.objects.filter(\n pk__in=apps.get_model(\"studying.Study\").objects.filter(course=self).values_list(\"student\", flat=True),\n )\n\n def send_email_to_all_purchased_users(self, template_id: str):\n for user in self.get_purchased_users().iterator():\n send_mail.delay(\n to=user.email,\n template_id=template_id,\n )\n\n def __str__(self) -> str:\n name = getattr(self, \"name\", None)\n group = getattr(self, \"group\", None)\n if name is not None and group is not None:\n return f\"{name} - {group.name}\"\n\n return super().__str__()\n", "path": "src/products/models/course.py"}]}
1,473
130
gh_patches_debug_28795
rasdani/github-patches
git_diff
readthedocs__readthedocs.org-548
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- mercurial project imported from bitbucket stuck in 'Triggered' state The docs for pylibftdi are set to be built (via a POST trigger) from https://bitbucket.org/codedstructure/pylibftdi, but builds (https://readthedocs.org/builds/pylibftdi/) are stuck at 'Triggered'. Based on comments in #435 I set the project up to build against a github mirror, and that worked successfully, so it seems (from #435) that this is likely an hg issue. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `readthedocs/vcs_support/backends/hg.py` Content: ``` 1 import csv 2 from StringIO import StringIO 3 4 from projects.exceptions import ProjectImportError 5 from vcs_support.base import BaseVCS, VCSVersion 6 7 8 class Backend(BaseVCS): 9 supports_tags = True 10 supports_branches = True 11 fallback_branch = 'default' 12 13 def update(self): 14 super(Backend, self).update() 15 retcode = self.run('hg', 'status')[0] 16 if retcode == 0: 17 return self.pull() 18 else: 19 return self.clone() 20 21 def pull(self): 22 pull_output = self.run('hg', 'pull') 23 if pull_output[0] != 0: 24 raise ProjectImportError( 25 ("Failed to get code from '%s' (hg pull): %s" 26 % (self.repo_url, pull_output[0])) 27 ) 28 update_output = self.run('hg', 'update', '-C')[0] 29 if update_output[0] != 0: 30 raise ProjectImportError( 31 ("Failed to get code from '%s' (hg update): %s" 32 % (self.repo_url, pull_output[0])) 33 ) 34 return update_output 35 36 def clone(self): 37 output = self.run('hg', 'clone', self.repo_url, '.') 38 if output[0] != 0: 39 raise ProjectImportError( 40 ("Failed to get code from '%s' (hg clone): %s" 41 % (self.repo_url, output[0])) 42 ) 43 return output 44 45 @property 46 def branches(self): 47 retcode, stdout = self.run('hg', 'branches', '-q')[:2] 48 # error (or no tags found) 49 if retcode != 0: 50 return [] 51 return self.parse_branches(stdout) 52 53 def parse_branches(self, data): 54 """ 55 stable 56 default 57 """ 58 59 names = [name.lstrip() for name in data.splitlines()] 60 return [VCSVersion(self, name, name) for name in names if name] 61 62 @property 63 def tags(self): 64 retcode, stdout = self.run('hg', 'tags')[:2] 65 # error (or no tags found) 66 if retcode != 0: 67 return [] 68 return self.parse_tags(stdout) 69 70 def parse_tags(self, data): 71 """ 72 Parses output of show-ref --tags, eg: 73 74 tip 278:c4b2d21db51a 75 0.2.2 152:6b0364d98837 76 0.2.1 117:a14b7b6ffa03 77 0.1 50:30c2c6b3a055 78 """ 79 # parse the lines into a list of tuples (commit-hash, tag ref name) 80 raw_tags = csv.reader(StringIO(data), delimiter=' ') 81 vcs_tags = [] 82 for row in raw_tags: 83 row = filter(lambda f: f != '', row) 84 if row == []: 85 continue 86 name, commit = row 87 if name == 'tip': 88 continue 89 revision, commit_hash = commit.split(':') 90 vcs_tags.append(VCSVersion(self, commit_hash, name)) 91 return vcs_tags 92 93 def checkout(self, identifier=None): 94 super(Backend, self).checkout() 95 if not identifier: 96 identifier = 'tip' 97 retcode = self.run('hg', 'status')[0] 98 if retcode == 0: 99 self.run('hg', 'pull') 100 return self.run('hg', 'update', '-C', identifier) 101 else: 102 self.clone() 103 return self.run('hg', 'update', '-C', identifier) 104 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/readthedocs/vcs_support/backends/hg.py b/readthedocs/vcs_support/backends/hg.py --- a/readthedocs/vcs_support/backends/hg.py +++ b/readthedocs/vcs_support/backends/hg.py @@ -1,6 +1,3 @@ -import csv -from StringIO import StringIO - from projects.exceptions import ProjectImportError from vcs_support.base import BaseVCS, VCSVersion @@ -69,19 +66,24 @@ def parse_tags(self, data): """ - Parses output of show-ref --tags, eg: + Parses output of `hg tags`, eg: + + tip 278:c4b2d21db51a + 0.2.2 152:6b0364d98837 + 0.2.1 117:a14b7b6ffa03 + 0.1 50:30c2c6b3a055 + maintenance release 1 10:f83c32fe8126 - tip 278:c4b2d21db51a - 0.2.2 152:6b0364d98837 - 0.2.1 117:a14b7b6ffa03 - 0.1 50:30c2c6b3a055 + Into VCSVersion objects with the tag name as verbose_name and the + commit hash as identifier. """ - # parse the lines into a list of tuples (commit-hash, tag ref name) - raw_tags = csv.reader(StringIO(data), delimiter=' ') vcs_tags = [] - for row in raw_tags: - row = filter(lambda f: f != '', row) - if row == []: + tag_lines = [line.strip() for line in data.splitlines()] + # starting from the rhs of each line, split a single value (changeset) + # off at whitespace; the tag name is the string to the left of that + tag_pairs = [line.rsplit(None, 1) for line in tag_lines] + for row in tag_pairs: + if len(row) != 2: continue name, commit = row if name == 'tip':
{"golden_diff": "diff --git a/readthedocs/vcs_support/backends/hg.py b/readthedocs/vcs_support/backends/hg.py\n--- a/readthedocs/vcs_support/backends/hg.py\n+++ b/readthedocs/vcs_support/backends/hg.py\n@@ -1,6 +1,3 @@\n-import csv\n-from StringIO import StringIO\n-\n from projects.exceptions import ProjectImportError\n from vcs_support.base import BaseVCS, VCSVersion\n \n@@ -69,19 +66,24 @@\n \n def parse_tags(self, data):\n \"\"\"\n- Parses output of show-ref --tags, eg:\n+ Parses output of `hg tags`, eg:\n+\n+ tip 278:c4b2d21db51a\n+ 0.2.2 152:6b0364d98837\n+ 0.2.1 117:a14b7b6ffa03\n+ 0.1 50:30c2c6b3a055\n+ maintenance release 1 10:f83c32fe8126\n \n- tip 278:c4b2d21db51a\n- 0.2.2 152:6b0364d98837\n- 0.2.1 117:a14b7b6ffa03\n- 0.1 50:30c2c6b3a055\n+ Into VCSVersion objects with the tag name as verbose_name and the\n+ commit hash as identifier.\n \"\"\"\n- # parse the lines into a list of tuples (commit-hash, tag ref name)\n- raw_tags = csv.reader(StringIO(data), delimiter=' ')\n vcs_tags = []\n- for row in raw_tags:\n- row = filter(lambda f: f != '', row)\n- if row == []:\n+ tag_lines = [line.strip() for line in data.splitlines()]\n+ # starting from the rhs of each line, split a single value (changeset)\n+ # off at whitespace; the tag name is the string to the left of that\n+ tag_pairs = [line.rsplit(None, 1) for line in tag_lines]\n+ for row in tag_pairs:\n+ if len(row) != 2:\n continue\n name, commit = row\n if name == 'tip':\n", "issue": "mercurial project imported from bitbucket stuck in 'Triggered' state\nThe docs for pylibftdi are set to be built (via a POST trigger) from https://bitbucket.org/codedstructure/pylibftdi, but builds (https://readthedocs.org/builds/pylibftdi/) are stuck at 'Triggered'.\n\nBased on comments in #435 I set the project up to build against a github mirror, and that worked successfully, so it seems (from #435) that this is likely an hg issue.\n\n", "before_files": [{"content": "import csv\nfrom StringIO import StringIO\n\nfrom projects.exceptions import ProjectImportError\nfrom vcs_support.base import BaseVCS, VCSVersion\n\n\nclass Backend(BaseVCS):\n supports_tags = True\n supports_branches = True\n fallback_branch = 'default'\n\n def update(self):\n super(Backend, self).update()\n retcode = self.run('hg', 'status')[0]\n if retcode == 0:\n return self.pull()\n else:\n return self.clone()\n\n def pull(self):\n pull_output = self.run('hg', 'pull')\n if pull_output[0] != 0:\n raise ProjectImportError(\n (\"Failed to get code from '%s' (hg pull): %s\"\n % (self.repo_url, pull_output[0]))\n )\n update_output = self.run('hg', 'update', '-C')[0]\n if update_output[0] != 0:\n raise ProjectImportError(\n (\"Failed to get code from '%s' (hg update): %s\"\n % (self.repo_url, pull_output[0]))\n )\n return update_output\n\n def clone(self):\n output = self.run('hg', 'clone', self.repo_url, '.')\n if output[0] != 0:\n raise ProjectImportError(\n (\"Failed to get code from '%s' (hg clone): %s\"\n % (self.repo_url, output[0]))\n )\n return output\n\n @property\n def branches(self):\n retcode, stdout = self.run('hg', 'branches', '-q')[:2]\n # error (or no tags found)\n if retcode != 0:\n return []\n return self.parse_branches(stdout)\n\n def parse_branches(self, data):\n \"\"\"\n stable\n default\n \"\"\"\n\n names = [name.lstrip() for name in data.splitlines()]\n return [VCSVersion(self, name, name) for name in names if name]\n\n @property\n def tags(self):\n retcode, stdout = self.run('hg', 'tags')[:2]\n # error (or no tags found)\n if retcode != 0:\n return []\n return self.parse_tags(stdout)\n\n def parse_tags(self, data):\n \"\"\"\n Parses output of show-ref --tags, eg:\n\n tip 278:c4b2d21db51a\n 0.2.2 152:6b0364d98837\n 0.2.1 117:a14b7b6ffa03\n 0.1 50:30c2c6b3a055\n \"\"\"\n # parse the lines into a list of tuples (commit-hash, tag ref name)\n raw_tags = csv.reader(StringIO(data), delimiter=' ')\n vcs_tags = []\n for row in raw_tags:\n row = filter(lambda f: f != '', row)\n if row == []:\n continue\n name, commit = row\n if name == 'tip':\n continue\n revision, commit_hash = commit.split(':')\n vcs_tags.append(VCSVersion(self, commit_hash, name))\n return vcs_tags\n\n def checkout(self, identifier=None):\n super(Backend, self).checkout()\n if not identifier:\n identifier = 'tip'\n retcode = self.run('hg', 'status')[0]\n if retcode == 0:\n self.run('hg', 'pull')\n return self.run('hg', 'update', '-C', identifier)\n else:\n self.clone()\n return self.run('hg', 'update', '-C', identifier)\n", "path": "readthedocs/vcs_support/backends/hg.py"}], "after_files": [{"content": "from projects.exceptions import ProjectImportError\nfrom vcs_support.base import BaseVCS, VCSVersion\n\n\nclass Backend(BaseVCS):\n supports_tags = True\n supports_branches = True\n fallback_branch = 'default'\n\n def update(self):\n super(Backend, self).update()\n retcode = self.run('hg', 'status')[0]\n if retcode == 0:\n return self.pull()\n else:\n return self.clone()\n\n def pull(self):\n pull_output = self.run('hg', 'pull')\n if pull_output[0] != 0:\n raise ProjectImportError(\n (\"Failed to get code from '%s' (hg pull): %s\"\n % (self.repo_url, pull_output[0]))\n )\n update_output = self.run('hg', 'update', '-C')[0]\n if update_output[0] != 0:\n raise ProjectImportError(\n (\"Failed to get code from '%s' (hg update): %s\"\n % (self.repo_url, pull_output[0]))\n )\n return update_output\n\n def clone(self):\n output = self.run('hg', 'clone', self.repo_url, '.')\n if output[0] != 0:\n raise ProjectImportError(\n (\"Failed to get code from '%s' (hg clone): %s\"\n % (self.repo_url, output[0]))\n )\n return output\n\n @property\n def branches(self):\n retcode, stdout = self.run('hg', 'branches', '-q')[:2]\n # error (or no tags found)\n if retcode != 0:\n return []\n return self.parse_branches(stdout)\n\n def parse_branches(self, data):\n \"\"\"\n stable\n default\n \"\"\"\n\n names = [name.lstrip() for name in data.splitlines()]\n return [VCSVersion(self, name, name) for name in names if name]\n\n @property\n def tags(self):\n retcode, stdout = self.run('hg', 'tags')[:2]\n # error (or no tags found)\n if retcode != 0:\n return []\n return self.parse_tags(stdout)\n\n def parse_tags(self, data):\n \"\"\"\n Parses output of `hg tags`, eg:\n\n tip 278:c4b2d21db51a\n 0.2.2 152:6b0364d98837\n 0.2.1 117:a14b7b6ffa03\n 0.1 50:30c2c6b3a055\n maintenance release 1 10:f83c32fe8126\n\n Into VCSVersion objects with the tag name as verbose_name and the\n commit hash as identifier.\n \"\"\"\n vcs_tags = []\n tag_lines = [line.strip() for line in data.splitlines()]\n # starting from the rhs of each line, split a single value (changeset)\n # off at whitespace; the tag name is the string to the left of that\n tag_pairs = [line.rsplit(None, 1) for line in tag_lines]\n for row in tag_pairs:\n if len(row) != 2:\n continue\n name, commit = row\n if name == 'tip':\n continue\n revision, commit_hash = commit.split(':')\n vcs_tags.append(VCSVersion(self, commit_hash, name))\n return vcs_tags\n\n def checkout(self, identifier=None):\n super(Backend, self).checkout()\n if not identifier:\n identifier = 'tip'\n retcode = self.run('hg', 'status')[0]\n if retcode == 0:\n self.run('hg', 'pull')\n return self.run('hg', 'update', '-C', identifier)\n else:\n self.clone()\n return self.run('hg', 'update', '-C', identifier)\n", "path": "readthedocs/vcs_support/backends/hg.py"}]}
1,390
559
gh_patches_debug_3875
rasdani/github-patches
git_diff
kartoza__prj.app-813
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Error 500: Editing Answers. # Problem When I select the edit option for the answers on http://changelog.qgis.org/id/inasafe-realtime2/ Then I get error 500. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `django_project/lesson/views/answer.py` Content: ``` 1 # coding=utf-8 2 """Answer views.""" 3 4 from django.core.urlresolvers import reverse 5 from django.views.generic import ( 6 CreateView, 7 DeleteView, 8 UpdateView, 9 ) 10 from django.shortcuts import get_object_or_404 11 from django.utils.translation import ugettext_lazy as _ 12 13 from braces.views import LoginRequiredMixin 14 15 from lesson.forms.answer import AnswerForm 16 from lesson.models.answer import Answer 17 from lesson.models.worksheet_question import WorksheetQuestion 18 19 20 class AnswerMixin(object): 21 """Mixin class to provide standard settings for Answer.""" 22 23 model = Answer 24 form_class = AnswerForm 25 26 27 class AnswerCreateView( 28 LoginRequiredMixin, AnswerMixin, CreateView): 29 """Create view for Answer.""" 30 31 context_object_name = 'answer' 32 template_name = 'create.html' 33 creation_label = _('Add answer') 34 35 def get_success_url(self): 36 """Define the redirect URL 37 38 After successful creation of the object, the User will be redirected 39 to the unapproved Version list page for the object's parent Worksheet 40 41 :returns: URL 42 :rtype: HttpResponse 43 """ 44 return reverse('worksheet-detail', kwargs={ 45 'pk': self.object.question.worksheet.pk, 46 'section_slug': self.object.question.worksheet.section.slug, 47 'project_slug': self.object.question.worksheet.section.project.slug 48 }) 49 50 def get_form_kwargs(self): 51 """Get keyword arguments from form. 52 53 :returns keyword argument from the form 54 :rtype dict 55 """ 56 kwargs = super(AnswerCreateView, self).get_form_kwargs() 57 pk = self.kwargs['question_pk'] 58 kwargs['question'] = get_object_or_404(WorksheetQuestion, pk=pk) 59 return kwargs 60 61 62 # noinspection PyAttributeOutsideInit 63 class AnswerDeleteView( 64 LoginRequiredMixin, 65 AnswerMixin, 66 DeleteView): 67 """Delete view for Answer.""" 68 69 context_object_name = 'answer' 70 template_name = 'answer/delete.html' 71 72 def get_success_url(self): 73 """Define the redirect URL. 74 75 After successful deletion of the object, the User will be redirected 76 to the Certifying Organisation list page 77 for the object's parent Worksheet. 78 79 :returns: URL 80 :rtype: HttpResponse 81 """ 82 return reverse('worksheet-detail', kwargs={ 83 'pk': self.object.question.worksheet.pk, 84 'section_slug': self.object.question.worksheet.section.slug, 85 'project_slug': self.object.question.worksheet.section.project.slug 86 }) 87 88 89 # noinspection PyAttributeOutsideInit 90 class AnswerUpdateView( 91 LoginRequiredMixin, 92 AnswerMixin, 93 UpdateView): 94 """Update view for Answer.""" 95 96 context_object_name = 'answer' 97 template_name = 'update.html' 98 update_label = _('Update answer') 99 100 def get_form_kwargs(self): 101 """Get keyword arguments from form. 102 103 :returns keyword argument from the form 104 :rtype: dict 105 """ 106 kwargs = super(AnswerUpdateView, self).get_form_kwargs() 107 answer = get_object_or_404(Answer, self.pk_url_kwarg) 108 kwargs['question'] = answer.question 109 return kwargs 110 111 def get_success_url(self): 112 """Define the redirect URL. 113 114 After successful update of the object, the User will be redirected to 115 the specification list page for the object's parent Worksheet. 116 117 :returns: URL 118 :rtype: HttpResponse 119 """ 120 return reverse('worksheet-detail', kwargs={ 121 'pk': self.object.question.worksheet.pk, 122 'section_slug': self.object.question.worksheet.section.slug, 123 'project_slug': self.object.question.worksheet.section.project.slug 124 }) 125 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/django_project/lesson/views/answer.py b/django_project/lesson/views/answer.py --- a/django_project/lesson/views/answer.py +++ b/django_project/lesson/views/answer.py @@ -104,7 +104,7 @@ :rtype: dict """ kwargs = super(AnswerUpdateView, self).get_form_kwargs() - answer = get_object_or_404(Answer, self.pk_url_kwarg) + answer = get_object_or_404(Answer, pk=kwargs['instance'].pk) kwargs['question'] = answer.question return kwargs
{"golden_diff": "diff --git a/django_project/lesson/views/answer.py b/django_project/lesson/views/answer.py\n--- a/django_project/lesson/views/answer.py\n+++ b/django_project/lesson/views/answer.py\n@@ -104,7 +104,7 @@\n :rtype: dict\n \"\"\"\n kwargs = super(AnswerUpdateView, self).get_form_kwargs()\n- answer = get_object_or_404(Answer, self.pk_url_kwarg)\n+ answer = get_object_or_404(Answer, pk=kwargs['instance'].pk)\n kwargs['question'] = answer.question\n return kwargs\n", "issue": "Error 500: Editing Answers.\n# Problem\r\n\r\nWhen I select the edit option for the answers on http://changelog.qgis.org/id/inasafe-realtime2/\r\nThen I get error 500.\n", "before_files": [{"content": "# coding=utf-8\n\"\"\"Answer views.\"\"\"\n\nfrom django.core.urlresolvers import reverse\nfrom django.views.generic import (\n CreateView,\n DeleteView,\n UpdateView,\n)\nfrom django.shortcuts import get_object_or_404\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom braces.views import LoginRequiredMixin\n\nfrom lesson.forms.answer import AnswerForm\nfrom lesson.models.answer import Answer\nfrom lesson.models.worksheet_question import WorksheetQuestion\n\n\nclass AnswerMixin(object):\n \"\"\"Mixin class to provide standard settings for Answer.\"\"\"\n\n model = Answer\n form_class = AnswerForm\n\n\nclass AnswerCreateView(\n LoginRequiredMixin, AnswerMixin, CreateView):\n \"\"\"Create view for Answer.\"\"\"\n\n context_object_name = 'answer'\n template_name = 'create.html'\n creation_label = _('Add answer')\n\n def get_success_url(self):\n \"\"\"Define the redirect URL\n\n After successful creation of the object, the User will be redirected\n to the unapproved Version list page for the object's parent Worksheet\n\n :returns: URL\n :rtype: HttpResponse\n \"\"\"\n return reverse('worksheet-detail', kwargs={\n 'pk': self.object.question.worksheet.pk,\n 'section_slug': self.object.question.worksheet.section.slug,\n 'project_slug': self.object.question.worksheet.section.project.slug\n })\n\n def get_form_kwargs(self):\n \"\"\"Get keyword arguments from form.\n\n :returns keyword argument from the form\n :rtype dict\n \"\"\"\n kwargs = super(AnswerCreateView, self).get_form_kwargs()\n pk = self.kwargs['question_pk']\n kwargs['question'] = get_object_or_404(WorksheetQuestion, pk=pk)\n return kwargs\n\n\n# noinspection PyAttributeOutsideInit\nclass AnswerDeleteView(\n LoginRequiredMixin,\n AnswerMixin,\n DeleteView):\n \"\"\"Delete view for Answer.\"\"\"\n\n context_object_name = 'answer'\n template_name = 'answer/delete.html'\n\n def get_success_url(self):\n \"\"\"Define the redirect URL.\n\n After successful deletion of the object, the User will be redirected\n to the Certifying Organisation list page\n for the object's parent Worksheet.\n\n :returns: URL\n :rtype: HttpResponse\n \"\"\"\n return reverse('worksheet-detail', kwargs={\n 'pk': self.object.question.worksheet.pk,\n 'section_slug': self.object.question.worksheet.section.slug,\n 'project_slug': self.object.question.worksheet.section.project.slug\n })\n\n\n# noinspection PyAttributeOutsideInit\nclass AnswerUpdateView(\n LoginRequiredMixin,\n AnswerMixin,\n UpdateView):\n \"\"\"Update view for Answer.\"\"\"\n\n context_object_name = 'answer'\n template_name = 'update.html'\n update_label = _('Update answer')\n\n def get_form_kwargs(self):\n \"\"\"Get keyword arguments from form.\n\n :returns keyword argument from the form\n :rtype: dict\n \"\"\"\n kwargs = super(AnswerUpdateView, self).get_form_kwargs()\n answer = get_object_or_404(Answer, self.pk_url_kwarg)\n kwargs['question'] = answer.question\n return kwargs\n\n def get_success_url(self):\n \"\"\"Define the redirect URL.\n\n After successful update of the object, the User will be redirected to\n the specification list page for the object's parent Worksheet.\n\n :returns: URL\n :rtype: HttpResponse\n \"\"\"\n return reverse('worksheet-detail', kwargs={\n 'pk': self.object.question.worksheet.pk,\n 'section_slug': self.object.question.worksheet.section.slug,\n 'project_slug': self.object.question.worksheet.section.project.slug\n })\n", "path": "django_project/lesson/views/answer.py"}], "after_files": [{"content": "# coding=utf-8\n\"\"\"Answer views.\"\"\"\n\nfrom django.core.urlresolvers import reverse\nfrom django.views.generic import (\n CreateView,\n DeleteView,\n UpdateView,\n)\nfrom django.shortcuts import get_object_or_404\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom braces.views import LoginRequiredMixin\n\nfrom lesson.forms.answer import AnswerForm\nfrom lesson.models.answer import Answer\nfrom lesson.models.worksheet_question import WorksheetQuestion\n\n\nclass AnswerMixin(object):\n \"\"\"Mixin class to provide standard settings for Answer.\"\"\"\n\n model = Answer\n form_class = AnswerForm\n\n\nclass AnswerCreateView(\n LoginRequiredMixin, AnswerMixin, CreateView):\n \"\"\"Create view for Answer.\"\"\"\n\n context_object_name = 'answer'\n template_name = 'create.html'\n creation_label = _('Add answer')\n\n def get_success_url(self):\n \"\"\"Define the redirect URL\n\n After successful creation of the object, the User will be redirected\n to the unapproved Version list page for the object's parent Worksheet\n\n :returns: URL\n :rtype: HttpResponse\n \"\"\"\n return reverse('worksheet-detail', kwargs={\n 'pk': self.object.question.worksheet.pk,\n 'section_slug': self.object.question.worksheet.section.slug,\n 'project_slug': self.object.question.worksheet.section.project.slug\n })\n\n def get_form_kwargs(self):\n \"\"\"Get keyword arguments from form.\n\n :returns keyword argument from the form\n :rtype dict\n \"\"\"\n kwargs = super(AnswerCreateView, self).get_form_kwargs()\n pk = self.kwargs['question_pk']\n kwargs['question'] = get_object_or_404(WorksheetQuestion, pk=pk)\n return kwargs\n\n\n# noinspection PyAttributeOutsideInit\nclass AnswerDeleteView(\n LoginRequiredMixin,\n AnswerMixin,\n DeleteView):\n \"\"\"Delete view for Answer.\"\"\"\n\n context_object_name = 'answer'\n template_name = 'answer/delete.html'\n\n def get_success_url(self):\n \"\"\"Define the redirect URL.\n\n After successful deletion of the object, the User will be redirected\n to the Certifying Organisation list page\n for the object's parent Worksheet.\n\n :returns: URL\n :rtype: HttpResponse\n \"\"\"\n return reverse('worksheet-detail', kwargs={\n 'pk': self.object.question.worksheet.pk,\n 'section_slug': self.object.question.worksheet.section.slug,\n 'project_slug': self.object.question.worksheet.section.project.slug\n })\n\n\n# noinspection PyAttributeOutsideInit\nclass AnswerUpdateView(\n LoginRequiredMixin,\n AnswerMixin,\n UpdateView):\n \"\"\"Update view for Answer.\"\"\"\n\n context_object_name = 'answer'\n template_name = 'update.html'\n update_label = _('Update answer')\n\n def get_form_kwargs(self):\n \"\"\"Get keyword arguments from form.\n\n :returns keyword argument from the form\n :rtype: dict\n \"\"\"\n kwargs = super(AnswerUpdateView, self).get_form_kwargs()\n answer = get_object_or_404(Answer, pk=kwargs['instance'].pk)\n kwargs['question'] = answer.question\n return kwargs\n\n def get_success_url(self):\n \"\"\"Define the redirect URL.\n\n After successful update of the object, the User will be redirected to\n the specification list page for the object's parent Worksheet.\n\n :returns: URL\n :rtype: HttpResponse\n \"\"\"\n return reverse('worksheet-detail', kwargs={\n 'pk': self.object.question.worksheet.pk,\n 'section_slug': self.object.question.worksheet.section.slug,\n 'project_slug': self.object.question.worksheet.section.project.slug\n })\n", "path": "django_project/lesson/views/answer.py"}]}
1,351
140
gh_patches_debug_22768
rasdani/github-patches
git_diff
sql-machine-learning__elasticdl-1384
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- [PS-1]Add new RPC services definition in elasticdl.proto according to PS design [PS design](https://github.com/sql-machine-learning/elasticdl/blob/develop/docs/designs/ps_design.md#rpc-definition) adds some new RPC services. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `elasticdl/python/ps/servicer.py` Content: ``` 1 from google.protobuf import empty_pb2 2 3 from elasticdl.proto import elasticdl_pb2_grpc 4 5 6 class PserverServicer(elasticdl_pb2_grpc.PserverServicer): 7 """PS service implementation""" 8 9 def __init__( 10 self, 11 parameters, 12 grads_to_wait, 13 optimizer, 14 lr_staleness_modulation=False, 15 use_async=False, 16 ): 17 self._parameters = parameters 18 self._grads_to_wait = grads_to_wait 19 self._optimizer = optimizer 20 self._lr_staleness_modulation = lr_staleness_modulation 21 self._use_async = use_async 22 self._version = 0 23 24 def pull_variable(self, request, _): 25 # TODO: implement this RPC service 26 return empty_pb2.Empty() 27 28 def pull_embedding_vector(self, request, _): 29 # TODO: implement this RPC service 30 return empty_pb2.Empty() 31 32 def push_model(self, request, _): 33 # TODO: implement this RPC service 34 return empty_pb2.Empty() 35 36 def push_gradient(self, request, _): 37 # TODO: implement this RPC service 38 return empty_pb2.Empty() 39 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/elasticdl/python/ps/servicer.py b/elasticdl/python/ps/servicer.py --- a/elasticdl/python/ps/servicer.py +++ b/elasticdl/python/ps/servicer.py @@ -1,6 +1,6 @@ from google.protobuf import empty_pb2 -from elasticdl.proto import elasticdl_pb2_grpc +from elasticdl.proto import elasticdl_pb2, elasticdl_pb2_grpc class PserverServicer(elasticdl_pb2_grpc.PserverServicer): @@ -23,11 +23,11 @@ def pull_variable(self, request, _): # TODO: implement this RPC service - return empty_pb2.Empty() + return elasticdl_pb2.PullVariableResponse() def pull_embedding_vector(self, request, _): # TODO: implement this RPC service - return empty_pb2.Empty() + return elasticdl_pb2.Tensor() def push_model(self, request, _): # TODO: implement this RPC service @@ -35,4 +35,4 @@ def push_gradient(self, request, _): # TODO: implement this RPC service - return empty_pb2.Empty() + return elasticdl_pb2.PushGradientResponse()
{"golden_diff": "diff --git a/elasticdl/python/ps/servicer.py b/elasticdl/python/ps/servicer.py\n--- a/elasticdl/python/ps/servicer.py\n+++ b/elasticdl/python/ps/servicer.py\n@@ -1,6 +1,6 @@\n from google.protobuf import empty_pb2\n \n-from elasticdl.proto import elasticdl_pb2_grpc\n+from elasticdl.proto import elasticdl_pb2, elasticdl_pb2_grpc\n \n \n class PserverServicer(elasticdl_pb2_grpc.PserverServicer):\n@@ -23,11 +23,11 @@\n \n def pull_variable(self, request, _):\n # TODO: implement this RPC service\n- return empty_pb2.Empty()\n+ return elasticdl_pb2.PullVariableResponse()\n \n def pull_embedding_vector(self, request, _):\n # TODO: implement this RPC service\n- return empty_pb2.Empty()\n+ return elasticdl_pb2.Tensor()\n \n def push_model(self, request, _):\n # TODO: implement this RPC service\n@@ -35,4 +35,4 @@\n \n def push_gradient(self, request, _):\n # TODO: implement this RPC service\n- return empty_pb2.Empty()\n+ return elasticdl_pb2.PushGradientResponse()\n", "issue": "[PS-1]Add new RPC services definition in elasticdl.proto according to PS design\n[PS design](https://github.com/sql-machine-learning/elasticdl/blob/develop/docs/designs/ps_design.md#rpc-definition) adds some new RPC services.\n", "before_files": [{"content": "from google.protobuf import empty_pb2\n\nfrom elasticdl.proto import elasticdl_pb2_grpc\n\n\nclass PserverServicer(elasticdl_pb2_grpc.PserverServicer):\n \"\"\"PS service implementation\"\"\"\n\n def __init__(\n self,\n parameters,\n grads_to_wait,\n optimizer,\n lr_staleness_modulation=False,\n use_async=False,\n ):\n self._parameters = parameters\n self._grads_to_wait = grads_to_wait\n self._optimizer = optimizer\n self._lr_staleness_modulation = lr_staleness_modulation\n self._use_async = use_async\n self._version = 0\n\n def pull_variable(self, request, _):\n # TODO: implement this RPC service\n return empty_pb2.Empty()\n\n def pull_embedding_vector(self, request, _):\n # TODO: implement this RPC service\n return empty_pb2.Empty()\n\n def push_model(self, request, _):\n # TODO: implement this RPC service\n return empty_pb2.Empty()\n\n def push_gradient(self, request, _):\n # TODO: implement this RPC service\n return empty_pb2.Empty()\n", "path": "elasticdl/python/ps/servicer.py"}], "after_files": [{"content": "from google.protobuf import empty_pb2\n\nfrom elasticdl.proto import elasticdl_pb2, elasticdl_pb2_grpc\n\n\nclass PserverServicer(elasticdl_pb2_grpc.PserverServicer):\n \"\"\"PS service implementation\"\"\"\n\n def __init__(\n self,\n parameters,\n grads_to_wait,\n optimizer,\n lr_staleness_modulation=False,\n use_async=False,\n ):\n self._parameters = parameters\n self._grads_to_wait = grads_to_wait\n self._optimizer = optimizer\n self._lr_staleness_modulation = lr_staleness_modulation\n self._use_async = use_async\n self._version = 0\n\n def pull_variable(self, request, _):\n # TODO: implement this RPC service\n return elasticdl_pb2.PullVariableResponse()\n\n def pull_embedding_vector(self, request, _):\n # TODO: implement this RPC service\n return elasticdl_pb2.Tensor()\n\n def push_model(self, request, _):\n # TODO: implement this RPC service\n return empty_pb2.Empty()\n\n def push_gradient(self, request, _):\n # TODO: implement this RPC service\n return elasticdl_pb2.PushGradientResponse()\n", "path": "elasticdl/python/ps/servicer.py"}]}
636
280
gh_patches_debug_12962
rasdani/github-patches
git_diff
mkdocs__mkdocs-615
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Only creating wheels for Python 2.7 Seems I didn't set something up correctly. It looks like this is a limitation of `setup.py bdist_wheel` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `setup.py` Content: ``` 1 #!/usr/bin/env python 2 # -*- coding: utf-8 -*- 3 4 from __future__ import print_function 5 from setuptools import setup 6 import re 7 import os 8 import sys 9 10 PY26 = sys.version_info[:2] == (2, 6) 11 12 13 long_description = ( 14 "MkDocs is a fast, simple and downright gorgeous static site generator " 15 "that's geared towards building project documentation. Documentation " 16 "source files are written in Markdown, and configured with a single YAML " 17 "configuration file." 18 ) 19 20 21 def get_version(package): 22 """Return package version as listed in `__version__` in `init.py`.""" 23 init_py = open(os.path.join(package, '__init__.py')).read() 24 return re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1) 25 26 27 def get_packages(package): 28 """Return root package and all sub-packages.""" 29 return [dirpath 30 for dirpath, dirnames, filenames in os.walk(package) 31 if os.path.exists(os.path.join(dirpath, '__init__.py'))] 32 33 34 def get_package_data(package): 35 """ 36 Return all files under the root package, that are not in a 37 package themselves. 38 """ 39 walk = [(dirpath.replace(package + os.sep, '', 1), filenames) 40 for dirpath, dirnames, filenames in os.walk(package) 41 if not os.path.exists(os.path.join(dirpath, '__init__.py'))] 42 43 filepaths = [] 44 for base, filenames in walk: 45 filepaths.extend([os.path.join(base, filename) 46 for filename in filenames]) 47 return {package: filepaths} 48 49 setup( 50 name="mkdocs", 51 version=get_version("mkdocs"), 52 url='http://www.mkdocs.org', 53 license='BSD', 54 description='Project documentation with Markdown.', 55 long_description=long_description, 56 author='Tom Christie', 57 author_email='[email protected]', # SEE NOTE BELOW (*) 58 packages=get_packages("mkdocs"), 59 package_data=get_package_data("mkdocs"), 60 install_requires=[ 61 'click>=4.0', 62 'Jinja2>=2.7.1', 63 'livereload>=2.3.2', 64 'Markdown>=2.3.1,<2.5' if PY26 else 'Markdown>=2.3.1', 65 'PyYAML>=3.10', 66 'tornado>=4.1', 67 ], 68 entry_points={ 69 'console_scripts': [ 70 'mkdocs = mkdocs.cli:cli', 71 ], 72 }, 73 classifiers=[ 74 'Development Status :: 5 - Production/Stable', 75 'Environment :: Console', 76 'Environment :: Web Environment', 77 'Intended Audience :: Developers', 78 'License :: OSI Approved :: BSD License', 79 'Operating System :: OS Independent', 80 'Programming Language :: Python', 81 'Programming Language :: Python :: 2', 82 'Programming Language :: Python :: 2.6', 83 'Programming Language :: Python :: 2.7', 84 'Programming Language :: Python :: 3', 85 'Programming Language :: Python :: 3.3', 86 'Programming Language :: Python :: 3.4', 87 "Programming Language :: Python :: Implementation :: CPython", 88 'Topic :: Documentation', 89 'Topic :: Text Processing', 90 ], 91 zip_safe=False 92 ) 93 94 # (*) Please direct queries to the discussion group: 95 # https://groups.google.com/forum/#!forum/mkdocs 96 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -46,6 +46,22 @@ for filename in filenames]) return {package: filepaths} + +if sys.argv[-1] == 'publish': + if os.system("pip freeze | grep wheel"): + print("wheel not installed.\nUse `pip install wheel`.\nExiting.") + sys.exit() + if os.system("pip freeze | grep twine"): + print("twine not installed.\nUse `pip install twine`.\nExiting.") + sys.exit() + os.system("python setup.py sdist bdist_wheel") + os.system("twine upload dist/*") + print("You probably want to also tag the version now:") + print(" git tag -a {0} -m 'version {0}'".format(get_version("mkdocs"))) + print(" git push --tags") + sys.exit() + + setup( name="mkdocs", version=get_version("mkdocs"),
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -46,6 +46,22 @@\n for filename in filenames])\n return {package: filepaths}\n \n+\n+if sys.argv[-1] == 'publish':\n+ if os.system(\"pip freeze | grep wheel\"):\n+ print(\"wheel not installed.\\nUse `pip install wheel`.\\nExiting.\")\n+ sys.exit()\n+ if os.system(\"pip freeze | grep twine\"):\n+ print(\"twine not installed.\\nUse `pip install twine`.\\nExiting.\")\n+ sys.exit()\n+ os.system(\"python setup.py sdist bdist_wheel\")\n+ os.system(\"twine upload dist/*\")\n+ print(\"You probably want to also tag the version now:\")\n+ print(\" git tag -a {0} -m 'version {0}'\".format(get_version(\"mkdocs\")))\n+ print(\" git push --tags\")\n+ sys.exit()\n+\n+\n setup(\n name=\"mkdocs\",\n version=get_version(\"mkdocs\"),\n", "issue": "Only creating wheels for Python 2.7\nSeems I didn't set something up correctly. It looks like this is a limitation of `setup.py bdist_wheel`\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom __future__ import print_function\nfrom setuptools import setup\nimport re\nimport os\nimport sys\n\nPY26 = sys.version_info[:2] == (2, 6)\n\n\nlong_description = (\n \"MkDocs is a fast, simple and downright gorgeous static site generator \"\n \"that's geared towards building project documentation. Documentation \"\n \"source files are written in Markdown, and configured with a single YAML \"\n \"configuration file.\"\n)\n\n\ndef get_version(package):\n \"\"\"Return package version as listed in `__version__` in `init.py`.\"\"\"\n init_py = open(os.path.join(package, '__init__.py')).read()\n return re.search(\"__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", init_py).group(1)\n\n\ndef get_packages(package):\n \"\"\"Return root package and all sub-packages.\"\"\"\n return [dirpath\n for dirpath, dirnames, filenames in os.walk(package)\n if os.path.exists(os.path.join(dirpath, '__init__.py'))]\n\n\ndef get_package_data(package):\n \"\"\"\n Return all files under the root package, that are not in a\n package themselves.\n \"\"\"\n walk = [(dirpath.replace(package + os.sep, '', 1), filenames)\n for dirpath, dirnames, filenames in os.walk(package)\n if not os.path.exists(os.path.join(dirpath, '__init__.py'))]\n\n filepaths = []\n for base, filenames in walk:\n filepaths.extend([os.path.join(base, filename)\n for filename in filenames])\n return {package: filepaths}\n\nsetup(\n name=\"mkdocs\",\n version=get_version(\"mkdocs\"),\n url='http://www.mkdocs.org',\n license='BSD',\n description='Project documentation with Markdown.',\n long_description=long_description,\n author='Tom Christie',\n author_email='[email protected]', # SEE NOTE BELOW (*)\n packages=get_packages(\"mkdocs\"),\n package_data=get_package_data(\"mkdocs\"),\n install_requires=[\n 'click>=4.0',\n 'Jinja2>=2.7.1',\n 'livereload>=2.3.2',\n 'Markdown>=2.3.1,<2.5' if PY26 else 'Markdown>=2.3.1',\n 'PyYAML>=3.10',\n 'tornado>=4.1',\n ],\n entry_points={\n 'console_scripts': [\n 'mkdocs = mkdocs.cli:cli',\n ],\n },\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n \"Programming Language :: Python :: Implementation :: CPython\",\n 'Topic :: Documentation',\n 'Topic :: Text Processing',\n ],\n zip_safe=False\n)\n\n# (*) Please direct queries to the discussion group:\n# https://groups.google.com/forum/#!forum/mkdocs\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom __future__ import print_function\nfrom setuptools import setup\nimport re\nimport os\nimport sys\n\nPY26 = sys.version_info[:2] == (2, 6)\n\n\nlong_description = (\n \"MkDocs is a fast, simple and downright gorgeous static site generator \"\n \"that's geared towards building project documentation. Documentation \"\n \"source files are written in Markdown, and configured with a single YAML \"\n \"configuration file.\"\n)\n\n\ndef get_version(package):\n \"\"\"Return package version as listed in `__version__` in `init.py`.\"\"\"\n init_py = open(os.path.join(package, '__init__.py')).read()\n return re.search(\"__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", init_py).group(1)\n\n\ndef get_packages(package):\n \"\"\"Return root package and all sub-packages.\"\"\"\n return [dirpath\n for dirpath, dirnames, filenames in os.walk(package)\n if os.path.exists(os.path.join(dirpath, '__init__.py'))]\n\n\ndef get_package_data(package):\n \"\"\"\n Return all files under the root package, that are not in a\n package themselves.\n \"\"\"\n walk = [(dirpath.replace(package + os.sep, '', 1), filenames)\n for dirpath, dirnames, filenames in os.walk(package)\n if not os.path.exists(os.path.join(dirpath, '__init__.py'))]\n\n filepaths = []\n for base, filenames in walk:\n filepaths.extend([os.path.join(base, filename)\n for filename in filenames])\n return {package: filepaths}\n\n\nif sys.argv[-1] == 'publish':\n if os.system(\"pip freeze | grep wheel\"):\n print(\"wheel not installed.\\nUse `pip install wheel`.\\nExiting.\")\n sys.exit()\n if os.system(\"pip freeze | grep twine\"):\n print(\"twine not installed.\\nUse `pip install twine`.\\nExiting.\")\n sys.exit()\n os.system(\"python setup.py sdist bdist_wheel\")\n os.system(\"twine upload dist/*\")\n print(\"You probably want to also tag the version now:\")\n print(\" git tag -a {0} -m 'version {0}'\".format(get_version(\"mkdocs\")))\n print(\" git push --tags\")\n sys.exit()\n\n\nsetup(\n name=\"mkdocs\",\n version=get_version(\"mkdocs\"),\n url='http://www.mkdocs.org',\n license='BSD',\n description='Project documentation with Markdown.',\n long_description=long_description,\n author='Tom Christie',\n author_email='[email protected]', # SEE NOTE BELOW (*)\n packages=get_packages(\"mkdocs\"),\n package_data=get_package_data(\"mkdocs\"),\n install_requires=[\n 'click>=4.0',\n 'Jinja2>=2.7.1',\n 'livereload>=2.3.2',\n 'Markdown>=2.3.1,<2.5' if PY26 else 'Markdown>=2.3.1',\n 'PyYAML>=3.10',\n 'tornado>=4.1',\n ],\n entry_points={\n 'console_scripts': [\n 'mkdocs = mkdocs.cli:cli',\n ],\n },\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n \"Programming Language :: Python :: Implementation :: CPython\",\n 'Topic :: Documentation',\n 'Topic :: Text Processing',\n ],\n zip_safe=False\n)\n\n# (*) Please direct queries to the discussion group:\n# https://groups.google.com/forum/#!forum/mkdocs\n", "path": "setup.py"}]}
1,218
234
gh_patches_debug_26209
rasdani/github-patches
git_diff
Cog-Creators__Red-DiscordBot-3166
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- [p]announce fails if bot belongs to team # Command bugs #### Command name `announce` #### What cog is this command from? `Admin` #### What were you expecting to happen? Send announcement to all enabled servers, if failed, send message to the one of owners or all owners (like an `[p]contact`) #### What actually happened? announcement failed almost immediately with error in console #### How can we reproduce this issue? 1. Set bot with token belonging to team 2. Create environment, where bot cant send announcement to server 3. Announce an message 4. `[p]announce` silently fails with error: ```py Traceback (most recent call last): File "/home/fixator/Red-V3/lib/python3.7/site-packages/redbot/cogs/admin/announcer.py", line 67, in announcer await channel.send(self.message) File "/home/fixator/Red-V3/lib/python3.7/site-packages/discord/abc.py", line 823, in send data = await state.http.send_message(channel.id, content, tts=tts, embed=embed, nonce=nonce) File "/home/fixator/Red-V3/lib/python3.7/site-packages/discord/http.py", line 218, in request raise Forbidden(r, data) discord.errors.Forbidden: 403 FORBIDDEN (error code: 50001): Missing Access During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/fixator/Red-V3/lib/python3.7/site-packages/redbot/cogs/admin/announcer.py", line 70, in announcer _("I could not announce to server: {server.id}").format(server=g) File "/home/fixator/Red-V3/lib/python3.7/site-packages/discord/abc.py", line 823, in send data = await state.http.send_message(channel.id, content, tts=tts, embed=embed, nonce=nonce) File "/home/fixator/Red-V3/lib/python3.7/site-packages/discord/http.py", line 218, in request raise Forbidden(r, data) discord.errors.Forbidden: 403 FORBIDDEN (error code: 50007): Cannot send messages to this user ``` Caused by https://github.com/Cog-Creators/Red-DiscordBot/blob/f0836d7182d99239d1fde24cf2231c6ebf206f72/redbot/cogs/admin/announcer.py#L56 *Kinda related to #2781, i guess* --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `redbot/cogs/admin/announcer.py` Content: ``` 1 import asyncio 2 3 import discord 4 from redbot.core import commands 5 from redbot.core.i18n import Translator 6 7 _ = Translator("Announcer", __file__) 8 9 10 class Announcer: 11 def __init__(self, ctx: commands.Context, message: str, config=None): 12 """ 13 :param ctx: 14 :param message: 15 :param config: Used to determine channel overrides 16 """ 17 self.ctx = ctx 18 self.message = message 19 self.config = config 20 21 self.active = None 22 23 def start(self): 24 """ 25 Starts an announcement. 26 :return: 27 """ 28 if self.active is None: 29 self.active = True 30 self.ctx.bot.loop.create_task(self.announcer()) 31 32 def cancel(self): 33 """ 34 Cancels a running announcement. 35 :return: 36 """ 37 self.active = False 38 39 async def _get_announce_channel(self, guild: discord.Guild) -> discord.TextChannel: 40 channel_id = await self.config.guild(guild).announce_channel() 41 channel = None 42 43 if channel_id is not None: 44 channel = guild.get_channel(channel_id) 45 46 if channel is None: 47 channel = guild.system_channel 48 49 if channel is None: 50 channel = guild.text_channels[0] 51 52 return channel 53 54 async def announcer(self): 55 guild_list = self.ctx.bot.guilds 56 bot_owner = (await self.ctx.bot.application_info()).owner 57 for g in guild_list: 58 if not self.active: 59 return 60 61 if await self.config.guild(g).announce_ignore(): 62 continue 63 64 channel = await self._get_announce_channel(g) 65 66 try: 67 await channel.send(self.message) 68 except discord.Forbidden: 69 await bot_owner.send( 70 _("I could not announce to server: {server.id}").format(server=g) 71 ) 72 await asyncio.sleep(0.5) 73 74 self.active = False 75 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/redbot/cogs/admin/announcer.py b/redbot/cogs/admin/announcer.py --- a/redbot/cogs/admin/announcer.py +++ b/redbot/cogs/admin/announcer.py @@ -3,6 +3,7 @@ import discord from redbot.core import commands from redbot.core.i18n import Translator +from redbot.core.utils.chat_formatting import humanize_list, inline _ = Translator("Announcer", __file__) @@ -53,7 +54,7 @@ async def announcer(self): guild_list = self.ctx.bot.guilds - bot_owner = (await self.ctx.bot.application_info()).owner + failed = [] for g in guild_list: if not self.active: return @@ -66,9 +67,14 @@ try: await channel.send(self.message) except discord.Forbidden: - await bot_owner.send( - _("I could not announce to server: {server.id}").format(server=g) - ) + failed.append(str(g.id)) await asyncio.sleep(0.5) + msg = ( + _("I could not announce to the following server: ") + if len(failed) == 1 + else _("I could not announce to the following servers: ") + ) + msg += humanize_list(tuple(map(inline, failed))) + await self.ctx.bot.send_to_owners(msg) self.active = False
{"golden_diff": "diff --git a/redbot/cogs/admin/announcer.py b/redbot/cogs/admin/announcer.py\n--- a/redbot/cogs/admin/announcer.py\n+++ b/redbot/cogs/admin/announcer.py\n@@ -3,6 +3,7 @@\n import discord\n from redbot.core import commands\n from redbot.core.i18n import Translator\n+from redbot.core.utils.chat_formatting import humanize_list, inline\n \n _ = Translator(\"Announcer\", __file__)\n \n@@ -53,7 +54,7 @@\n \n async def announcer(self):\n guild_list = self.ctx.bot.guilds\n- bot_owner = (await self.ctx.bot.application_info()).owner\n+ failed = []\n for g in guild_list:\n if not self.active:\n return\n@@ -66,9 +67,14 @@\n try:\n await channel.send(self.message)\n except discord.Forbidden:\n- await bot_owner.send(\n- _(\"I could not announce to server: {server.id}\").format(server=g)\n- )\n+ failed.append(str(g.id))\n await asyncio.sleep(0.5)\n \n+ msg = (\n+ _(\"I could not announce to the following server: \")\n+ if len(failed) == 1\n+ else _(\"I could not announce to the following servers: \")\n+ )\n+ msg += humanize_list(tuple(map(inline, failed)))\n+ await self.ctx.bot.send_to_owners(msg)\n self.active = False\n", "issue": "[p]announce fails if bot belongs to team\n# Command bugs\r\n\r\n#### Command name\r\n\r\n`announce`\r\n\r\n#### What cog is this command from?\r\n\r\n`Admin`\r\n\r\n#### What were you expecting to happen?\r\n\r\nSend announcement to all enabled servers, if failed, send message to the one of owners or all owners (like an `[p]contact`)\r\n\r\n#### What actually happened?\r\n\r\nannouncement failed almost immediately with error in console \r\n\r\n#### How can we reproduce this issue?\r\n\r\n1. Set bot with token belonging to team\r\n2. Create environment, where bot cant send announcement to server\r\n3. Announce an message\r\n4. `[p]announce` silently fails with error:\r\n```py\r\nTraceback (most recent call last):\r\n File \"/home/fixator/Red-V3/lib/python3.7/site-packages/redbot/cogs/admin/announcer.py\", line 67, in announcer\r\n await channel.send(self.message)\r\n File \"/home/fixator/Red-V3/lib/python3.7/site-packages/discord/abc.py\", line 823, in send\r\n data = await state.http.send_message(channel.id, content, tts=tts, embed=embed, nonce=nonce)\r\n File \"/home/fixator/Red-V3/lib/python3.7/site-packages/discord/http.py\", line 218, in request\r\n raise Forbidden(r, data)\r\ndiscord.errors.Forbidden: 403 FORBIDDEN (error code: 50001): Missing Access\r\nDuring handling of the above exception, another exception occurred:\r\nTraceback (most recent call last):\r\n File \"/home/fixator/Red-V3/lib/python3.7/site-packages/redbot/cogs/admin/announcer.py\", line 70, in announcer\r\n _(\"I could not announce to server: {server.id}\").format(server=g)\r\n File \"/home/fixator/Red-V3/lib/python3.7/site-packages/discord/abc.py\", line 823, in send\r\n data = await state.http.send_message(channel.id, content, tts=tts, embed=embed, nonce=nonce)\r\n File \"/home/fixator/Red-V3/lib/python3.7/site-packages/discord/http.py\", line 218, in request\r\n raise Forbidden(r, data)\r\ndiscord.errors.Forbidden: 403 FORBIDDEN (error code: 50007): Cannot send messages to this user\r\n```\r\n\r\nCaused by https://github.com/Cog-Creators/Red-DiscordBot/blob/f0836d7182d99239d1fde24cf2231c6ebf206f72/redbot/cogs/admin/announcer.py#L56\r\n\r\n*Kinda related to #2781, i guess*\n", "before_files": [{"content": "import asyncio\n\nimport discord\nfrom redbot.core import commands\nfrom redbot.core.i18n import Translator\n\n_ = Translator(\"Announcer\", __file__)\n\n\nclass Announcer:\n def __init__(self, ctx: commands.Context, message: str, config=None):\n \"\"\"\n :param ctx:\n :param message:\n :param config: Used to determine channel overrides\n \"\"\"\n self.ctx = ctx\n self.message = message\n self.config = config\n\n self.active = None\n\n def start(self):\n \"\"\"\n Starts an announcement.\n :return:\n \"\"\"\n if self.active is None:\n self.active = True\n self.ctx.bot.loop.create_task(self.announcer())\n\n def cancel(self):\n \"\"\"\n Cancels a running announcement.\n :return:\n \"\"\"\n self.active = False\n\n async def _get_announce_channel(self, guild: discord.Guild) -> discord.TextChannel:\n channel_id = await self.config.guild(guild).announce_channel()\n channel = None\n\n if channel_id is not None:\n channel = guild.get_channel(channel_id)\n\n if channel is None:\n channel = guild.system_channel\n\n if channel is None:\n channel = guild.text_channels[0]\n\n return channel\n\n async def announcer(self):\n guild_list = self.ctx.bot.guilds\n bot_owner = (await self.ctx.bot.application_info()).owner\n for g in guild_list:\n if not self.active:\n return\n\n if await self.config.guild(g).announce_ignore():\n continue\n\n channel = await self._get_announce_channel(g)\n\n try:\n await channel.send(self.message)\n except discord.Forbidden:\n await bot_owner.send(\n _(\"I could not announce to server: {server.id}\").format(server=g)\n )\n await asyncio.sleep(0.5)\n\n self.active = False\n", "path": "redbot/cogs/admin/announcer.py"}], "after_files": [{"content": "import asyncio\n\nimport discord\nfrom redbot.core import commands\nfrom redbot.core.i18n import Translator\nfrom redbot.core.utils.chat_formatting import humanize_list, inline\n\n_ = Translator(\"Announcer\", __file__)\n\n\nclass Announcer:\n def __init__(self, ctx: commands.Context, message: str, config=None):\n \"\"\"\n :param ctx:\n :param message:\n :param config: Used to determine channel overrides\n \"\"\"\n self.ctx = ctx\n self.message = message\n self.config = config\n\n self.active = None\n\n def start(self):\n \"\"\"\n Starts an announcement.\n :return:\n \"\"\"\n if self.active is None:\n self.active = True\n self.ctx.bot.loop.create_task(self.announcer())\n\n def cancel(self):\n \"\"\"\n Cancels a running announcement.\n :return:\n \"\"\"\n self.active = False\n\n async def _get_announce_channel(self, guild: discord.Guild) -> discord.TextChannel:\n channel_id = await self.config.guild(guild).announce_channel()\n channel = None\n\n if channel_id is not None:\n channel = guild.get_channel(channel_id)\n\n if channel is None:\n channel = guild.system_channel\n\n if channel is None:\n channel = guild.text_channels[0]\n\n return channel\n\n async def announcer(self):\n guild_list = self.ctx.bot.guilds\n failed = []\n for g in guild_list:\n if not self.active:\n return\n\n if await self.config.guild(g).announce_ignore():\n continue\n\n channel = await self._get_announce_channel(g)\n\n try:\n await channel.send(self.message)\n except discord.Forbidden:\n failed.append(str(g.id))\n await asyncio.sleep(0.5)\n\n msg = (\n _(\"I could not announce to the following server: \")\n if len(failed) == 1\n else _(\"I could not announce to the following servers: \")\n )\n msg += humanize_list(tuple(map(inline, failed)))\n await self.ctx.bot.send_to_owners(msg)\n self.active = False\n", "path": "redbot/cogs/admin/announcer.py"}]}
1,420
329
gh_patches_debug_41
rasdani/github-patches
git_diff
streamlit__streamlit-3038
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Dark theme does not properly adjust markdown tables ### Summary When I load the latest streamlit in darkmode I cannot see anything in my markdown tables because the text color is changed but not the background color. ### Steps to reproduce Code snippet: ``` md = """ | Label | Info | | -------- | --------- | | Row | Data | """ st.markdown(md) ``` **Expected behavior:** I would expect if the text color get changed to white in the table, the background color should get changed to something dark **Actual behavior:** Both the text color and background are white so nothing can be seen. ### Is this a regression? no, consequence of new theme ### Debug info - Streamlit version: 0.79.0 - Python version: 3.7.9 - pip - OS version: MacOS Catalina 10.15.7 - Browser version: Chrome 89.0.4389.90 ### Additional information I'm not sure why markdown tables have different background style but they seem to; perhaps other ui elements would be affected as well. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `e2e/scripts/st_markdown.py` Content: ``` 1 # Copyright 2018-2021 Streamlit Inc. 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 import streamlit as st 16 17 st.markdown("This **markdown** is awesome! :sunglasses:") 18 19 st.markdown("This <b>HTML tag</b> is escaped!") 20 21 st.markdown("This <b>HTML tag</b> is not escaped!", unsafe_allow_html=True) 22 23 st.markdown("[text]") 24 25 st.markdown("[link](href)") 26 27 st.markdown("[][]") 28 29 st.markdown("Inline math with $\KaTeX$") 30 31 st.markdown( 32 """ 33 $$ 34 ax^2 + bx + c = 0 35 $$ 36 """ 37 ) 38 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/e2e/scripts/st_markdown.py b/e2e/scripts/st_markdown.py --- a/e2e/scripts/st_markdown.py +++ b/e2e/scripts/st_markdown.py @@ -35,3 +35,11 @@ $$ """ ) + +st.markdown( + """ +| Col1 | Col2 | +| --------- | ----------- | +| Some | Data | +""" +)
{"golden_diff": "diff --git a/e2e/scripts/st_markdown.py b/e2e/scripts/st_markdown.py\n--- a/e2e/scripts/st_markdown.py\n+++ b/e2e/scripts/st_markdown.py\n@@ -35,3 +35,11 @@\n $$\n \"\"\"\n )\n+\n+st.markdown(\n+ \"\"\"\n+| Col1 | Col2 |\n+| --------- | ----------- |\n+| Some | Data |\n+\"\"\"\n+)\n", "issue": "Dark theme does not properly adjust markdown tables\n### Summary\r\n\r\nWhen I load the latest streamlit in darkmode I cannot see anything in my markdown tables because the text color is changed but not the background color.\r\n\r\n### Steps to reproduce\r\n\r\nCode snippet:\r\n\r\n```\r\nmd = \"\"\"\r\n| Label | Info |\r\n| -------- | --------- |\r\n| Row | Data |\r\n\"\"\"\r\nst.markdown(md)\r\n```\r\n\r\n**Expected behavior:**\r\n\r\nI would expect if the text color get changed to white in the table, the background color should get changed to something dark\r\n\r\n**Actual behavior:**\r\n\r\nBoth the text color and background are white so nothing can be seen.\r\n\r\n### Is this a regression?\r\n\r\nno, consequence of new theme\r\n\r\n### Debug info\r\n\r\n- Streamlit version: 0.79.0\r\n- Python version: 3.7.9\r\n- pip\r\n- OS version: MacOS Catalina 10.15.7\r\n- Browser version: Chrome 89.0.4389.90\r\n\r\n### Additional information\r\n\r\nI'm not sure why markdown tables have different background style but they seem to; perhaps other ui elements would be affected as well.\r\n\n", "before_files": [{"content": "# Copyright 2018-2021 Streamlit Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport streamlit as st\n\nst.markdown(\"This **markdown** is awesome! :sunglasses:\")\n\nst.markdown(\"This <b>HTML tag</b> is escaped!\")\n\nst.markdown(\"This <b>HTML tag</b> is not escaped!\", unsafe_allow_html=True)\n\nst.markdown(\"[text]\")\n\nst.markdown(\"[link](href)\")\n\nst.markdown(\"[][]\")\n\nst.markdown(\"Inline math with $\\KaTeX$\")\n\nst.markdown(\n \"\"\"\n$$\nax^2 + bx + c = 0\n$$\n\"\"\"\n)\n", "path": "e2e/scripts/st_markdown.py"}], "after_files": [{"content": "# Copyright 2018-2021 Streamlit Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport streamlit as st\n\nst.markdown(\"This **markdown** is awesome! :sunglasses:\")\n\nst.markdown(\"This <b>HTML tag</b> is escaped!\")\n\nst.markdown(\"This <b>HTML tag</b> is not escaped!\", unsafe_allow_html=True)\n\nst.markdown(\"[text]\")\n\nst.markdown(\"[link](href)\")\n\nst.markdown(\"[][]\")\n\nst.markdown(\"Inline math with $\\KaTeX$\")\n\nst.markdown(\n \"\"\"\n$$\nax^2 + bx + c = 0\n$$\n\"\"\"\n)\n\nst.markdown(\n \"\"\"\n| Col1 | Col2 |\n| --------- | ----------- |\n| Some | Data |\n\"\"\"\n)\n", "path": "e2e/scripts/st_markdown.py"}]}
831
98
gh_patches_debug_6084
rasdani/github-patches
git_diff
bridgecrewio__checkov-107
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Checkov fails to start in Windows environments **Describe the bug** After you install Checkov on Windows, running Checkov does nothing. **To Reproduce** Steps to reproduce the behavior: 1. Open Powershell/cmd 2. Run cli command 'checkov' 3. Does nothing **Expected behavior** The tool running. Magic. **Screenshots** I'm not sure showing nothing would help. **Desktop (please complete the following information):** - OS: Windows 10 - Checkov Version 1.0.173 **Additional context** I know Windows! Like who cares and tbh ive got WSL2 and it works a dream but customers, customers and their awful locked down... anyway. I'm using Python37 where i've installed . If you look in your c:/Python37/scripts folder there is a "checkov" bash script. This is the nub of it this doesn't run! However if you add a batch file "checkov-scan.bat" [or call whatever} with this content: ```cmd C:\Python37\python C:\Python37\Lib\site-packages\checkov\main.py %1 %2 ``` Then when you run "checkov-scan" at your shell, it works! So is there anyway you could package up something similar in a release? please? Also I made a python based pre-commit for checkov called checkov-scan - here <https://github.com/JamesWoolfenden/pre-commit> --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `setup.py` Content: ``` 1 #!/usr/bin/env python 2 import logging 3 import os 4 from importlib import util 5 from os import path 6 7 import setuptools 8 from setuptools import setup 9 10 # read the contents of your README file 11 this_directory = path.abspath(path.dirname(__file__)) 12 with open(path.join(this_directory, "README.md"), encoding="utf-8") as f: 13 long_description = f.read() 14 15 logger = logging.getLogger(__name__) 16 spec = util.spec_from_file_location( 17 "checkov.version", os.path.join("checkov", "version.py") 18 ) 19 # noinspection PyUnresolvedReferences 20 mod = util.module_from_spec(spec) 21 spec.loader.exec_module(mod) # type: ignore 22 version = mod.version # type: ignore 23 24 setup( 25 extras_require={ 26 "dev": [ 27 "alabaster==0.7.12", 28 "attrs==19.3.0", 29 "babel==2.7.0", 30 "certifi==2019.11.28", 31 "chardet==3.0.4", 32 "coverage==4.5.4", 33 "coverage-badge==1.0.1", 34 "detect-secrets==0.13.0", 35 "docopt==0.6.2", 36 "docutils==0.15.2", 37 "idna==2.8", 38 "imagesize==1.1.0", 39 "importlib-metadata==1.1.0; python_version < '3.8'", 40 "jinja2==2.10.3", 41 "lark-parser==0.7.8", 42 "markupsafe==1.1.1", 43 "more-itertools==8.0.0", 44 "packaging==19.2", 45 "pluggy==0.13.1", 46 "py==1.8.0", 47 "pygments==2.5.2", 48 "pyparsing==2.4.5", 49 "pytest==5.3.1", 50 "python-hcl2==0.2.0", 51 "pytz==2019.3", 52 "pyyaml==5.1.2", 53 "requests==2.22.0", 54 "six==1.13.0", 55 "snowballstemmer==2.0.0", 56 "sphinx==2.2.1", 57 "sphinxcontrib-applehelp==1.0.1", 58 "sphinxcontrib-devhelp==1.0.1", 59 "sphinxcontrib-htmlhelp==1.0.2", 60 "sphinxcontrib-jsmath==1.0.1", 61 "sphinxcontrib-qthelp==1.0.2", 62 "sphinxcontrib-serializinghtml==1.1.3", 63 "urllib3==1.25.7", 64 "wcwidth==0.1.7", 65 "zipp==0.6.0", 66 ] 67 }, 68 install_requires=[ 69 "chardet==3.0.4", 70 "colorama==0.4.3", 71 "docopt==0.6.2", 72 "idna==2.8", 73 "junit-xml==1.8", 74 "lark-parser==0.7.8", 75 "python-hcl2==0.2.0", 76 "pyyaml==5.2", 77 "requests==2.22.0", 78 "six==1.13.0", 79 "tabulate==0.8.6", 80 "termcolor==1.1.0", 81 "urllib3==1.25.7", 82 "dpath==1.5.0" 83 ], 84 license="Apache License 2.0", 85 name="checkov", 86 version=version, 87 description="Infrastructure as code static analysis", 88 author="bridgecrew", 89 author_email="[email protected]", 90 url="https://github.com/bridgecrewio/checkov", 91 packages=setuptools.find_packages(exclude=["tests*"]), 92 scripts=["bin/checkov"], 93 long_description=long_description, 94 long_description_content_type="text/markdown", 95 classifiers=[ 96 'Environment :: Console', 97 'Intended Audience :: Developers', 98 'Intended Audience :: System Administrators', 99 'Programming Language :: Python :: 3.6', 100 'Programming Language :: Python :: 3.7', 101 'Topic :: Security', 102 'Topic :: Software Development :: Build Tools' 103 ] 104 ) 105 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -89,7 +89,7 @@ author_email="[email protected]", url="https://github.com/bridgecrewio/checkov", packages=setuptools.find_packages(exclude=["tests*"]), - scripts=["bin/checkov"], + scripts=["bin/checkov","bin/checkov.bat"], long_description=long_description, long_description_content_type="text/markdown", classifiers=[
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -89,7 +89,7 @@\n author_email=\"[email protected]\",\n url=\"https://github.com/bridgecrewio/checkov\",\n packages=setuptools.find_packages(exclude=[\"tests*\"]),\n- scripts=[\"bin/checkov\"],\n+ scripts=[\"bin/checkov\",\"bin/checkov.bat\"],\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n classifiers=[\n", "issue": "Checkov fails to start in Windows environments \n**Describe the bug**\r\nAfter you install Checkov on Windows, running Checkov does nothing.\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. Open Powershell/cmd\r\n2. Run cli command 'checkov'\r\n3. Does nothing\r\n\r\n**Expected behavior**\r\nThe tool running. Magic.\r\n\r\n**Screenshots**\r\nI'm not sure showing nothing would help.\r\n\r\n**Desktop (please complete the following information):**\r\n - OS: Windows 10\r\n - Checkov Version 1.0.173\r\n\r\n**Additional context**\r\nI know Windows! Like who cares and tbh ive got WSL2 and it works a dream but customers, customers and their awful locked down... anyway.\r\nI'm using Python37 where i've installed .\r\nIf you look in your c:/Python37/scripts folder there is a \"checkov\" bash script. This is the nub of it this doesn't run! However if you add a batch file \"checkov-scan.bat\" [or call whatever} with this content:\r\n```cmd\r\nC:\\Python37\\python C:\\Python37\\Lib\\site-packages\\checkov\\main.py %1 %2\r\n```\r\nThen when you run \"checkov-scan\" at your shell, it works! So is there anyway you could package up something similar in a release? please? \r\nAlso I made a python based pre-commit for checkov called checkov-scan - here <https://github.com/JamesWoolfenden/pre-commit>\r\n\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\nimport logging\nimport os\nfrom importlib import util\nfrom os import path\n\nimport setuptools\nfrom setuptools import setup\n\n# read the contents of your README file\nthis_directory = path.abspath(path.dirname(__file__))\nwith open(path.join(this_directory, \"README.md\"), encoding=\"utf-8\") as f:\n long_description = f.read()\n\nlogger = logging.getLogger(__name__)\nspec = util.spec_from_file_location(\n \"checkov.version\", os.path.join(\"checkov\", \"version.py\")\n)\n# noinspection PyUnresolvedReferences\nmod = util.module_from_spec(spec)\nspec.loader.exec_module(mod) # type: ignore\nversion = mod.version # type: ignore\n\nsetup(\n extras_require={\n \"dev\": [\n \"alabaster==0.7.12\",\n \"attrs==19.3.0\",\n \"babel==2.7.0\",\n \"certifi==2019.11.28\",\n \"chardet==3.0.4\",\n \"coverage==4.5.4\",\n \"coverage-badge==1.0.1\",\n \"detect-secrets==0.13.0\",\n \"docopt==0.6.2\",\n \"docutils==0.15.2\",\n \"idna==2.8\",\n \"imagesize==1.1.0\",\n \"importlib-metadata==1.1.0; python_version < '3.8'\",\n \"jinja2==2.10.3\",\n \"lark-parser==0.7.8\",\n \"markupsafe==1.1.1\",\n \"more-itertools==8.0.0\",\n \"packaging==19.2\",\n \"pluggy==0.13.1\",\n \"py==1.8.0\",\n \"pygments==2.5.2\",\n \"pyparsing==2.4.5\",\n \"pytest==5.3.1\",\n \"python-hcl2==0.2.0\",\n \"pytz==2019.3\",\n \"pyyaml==5.1.2\",\n \"requests==2.22.0\",\n \"six==1.13.0\",\n \"snowballstemmer==2.0.0\",\n \"sphinx==2.2.1\",\n \"sphinxcontrib-applehelp==1.0.1\",\n \"sphinxcontrib-devhelp==1.0.1\",\n \"sphinxcontrib-htmlhelp==1.0.2\",\n \"sphinxcontrib-jsmath==1.0.1\",\n \"sphinxcontrib-qthelp==1.0.2\",\n \"sphinxcontrib-serializinghtml==1.1.3\",\n \"urllib3==1.25.7\",\n \"wcwidth==0.1.7\",\n \"zipp==0.6.0\",\n ]\n },\n install_requires=[\n \"chardet==3.0.4\",\n \"colorama==0.4.3\",\n \"docopt==0.6.2\",\n \"idna==2.8\",\n \"junit-xml==1.8\",\n \"lark-parser==0.7.8\",\n \"python-hcl2==0.2.0\",\n \"pyyaml==5.2\",\n \"requests==2.22.0\",\n \"six==1.13.0\",\n \"tabulate==0.8.6\",\n \"termcolor==1.1.0\",\n \"urllib3==1.25.7\",\n \"dpath==1.5.0\"\n ],\n license=\"Apache License 2.0\",\n name=\"checkov\",\n version=version,\n description=\"Infrastructure as code static analysis\",\n author=\"bridgecrew\",\n author_email=\"[email protected]\",\n url=\"https://github.com/bridgecrewio/checkov\",\n packages=setuptools.find_packages(exclude=[\"tests*\"]),\n scripts=[\"bin/checkov\"],\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n classifiers=[\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Security',\n 'Topic :: Software Development :: Build Tools'\n ]\n)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\nimport logging\nimport os\nfrom importlib import util\nfrom os import path\n\nimport setuptools\nfrom setuptools import setup\n\n# read the contents of your README file\nthis_directory = path.abspath(path.dirname(__file__))\nwith open(path.join(this_directory, \"README.md\"), encoding=\"utf-8\") as f:\n long_description = f.read()\n\nlogger = logging.getLogger(__name__)\nspec = util.spec_from_file_location(\n \"checkov.version\", os.path.join(\"checkov\", \"version.py\")\n)\n# noinspection PyUnresolvedReferences\nmod = util.module_from_spec(spec)\nspec.loader.exec_module(mod) # type: ignore\nversion = mod.version # type: ignore\n\nsetup(\n extras_require={\n \"dev\": [\n \"alabaster==0.7.12\",\n \"attrs==19.3.0\",\n \"babel==2.7.0\",\n \"certifi==2019.11.28\",\n \"chardet==3.0.4\",\n \"coverage==4.5.4\",\n \"coverage-badge==1.0.1\",\n \"detect-secrets==0.13.0\",\n \"docopt==0.6.2\",\n \"docutils==0.15.2\",\n \"idna==2.8\",\n \"imagesize==1.1.0\",\n \"importlib-metadata==1.1.0; python_version < '3.8'\",\n \"jinja2==2.10.3\",\n \"lark-parser==0.7.8\",\n \"markupsafe==1.1.1\",\n \"more-itertools==8.0.0\",\n \"packaging==19.2\",\n \"pluggy==0.13.1\",\n \"py==1.8.0\",\n \"pygments==2.5.2\",\n \"pyparsing==2.4.5\",\n \"pytest==5.3.1\",\n \"python-hcl2==0.2.0\",\n \"pytz==2019.3\",\n \"pyyaml==5.1.2\",\n \"requests==2.22.0\",\n \"six==1.13.0\",\n \"snowballstemmer==2.0.0\",\n \"sphinx==2.2.1\",\n \"sphinxcontrib-applehelp==1.0.1\",\n \"sphinxcontrib-devhelp==1.0.1\",\n \"sphinxcontrib-htmlhelp==1.0.2\",\n \"sphinxcontrib-jsmath==1.0.1\",\n \"sphinxcontrib-qthelp==1.0.2\",\n \"sphinxcontrib-serializinghtml==1.1.3\",\n \"urllib3==1.25.7\",\n \"wcwidth==0.1.7\",\n \"zipp==0.6.0\",\n ]\n },\n install_requires=[\n \"chardet==3.0.4\",\n \"colorama==0.4.3\",\n \"docopt==0.6.2\",\n \"idna==2.8\",\n \"junit-xml==1.8\",\n \"lark-parser==0.7.8\",\n \"python-hcl2==0.2.0\",\n \"pyyaml==5.2\",\n \"requests==2.22.0\",\n \"six==1.13.0\",\n \"tabulate==0.8.6\",\n \"termcolor==1.1.0\",\n \"urllib3==1.25.7\",\n \"dpath==1.5.0\"\n ],\n license=\"Apache License 2.0\",\n name=\"checkov\",\n version=version,\n description=\"Infrastructure as code static analysis\",\n author=\"bridgecrew\",\n author_email=\"[email protected]\",\n url=\"https://github.com/bridgecrewio/checkov\",\n packages=setuptools.find_packages(exclude=[\"tests*\"]),\n scripts=[\"bin/checkov\",\"bin/checkov.bat\"],\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n classifiers=[\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'Intended Audience :: System Administrators',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Security',\n 'Topic :: Software Development :: Build Tools'\n ]\n)\n", "path": "setup.py"}]}
1,756
109
gh_patches_debug_21881
rasdani/github-patches
git_diff
google__TensorNetwork-263
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- ncon_interface tests fail --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `conftest.py` Content: ``` 1 # Copyright 2019 The TensorNetwork Authors 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 from __future__ import absolute_import 16 from __future__ import division 17 from __future__ import print_function 18 import pytest 19 20 21 @pytest.fixture(name="backend", params=["numpy", "tensorflow", 22 "jax", "pytorch"]) 23 def backend_fixure(request): 24 return request.param 25 ``` Path: `tensornetwork/__init__.py` Content: ``` 1 from __future__ import absolute_import 2 from tensornetwork.network import TensorNetwork 3 from tensornetwork.network_components import Node, Edge, CopyNode 4 from tensornetwork.ncon_interface import ncon, ncon_network 5 from tensornetwork.version import __version__ 6 from tensornetwork.visualization.graphviz import to_graphviz 7 from tensornetwork import contractors 8 from tensornetwork import config 9 from typing import Text, Optional, Type 10 from tensornetwork.utils import load 11 12 13 def set_default_backend(backend: Text, dtype: Optional[Type] = None) -> None: 14 config.default_backend = backend 15 config.default_dype = dtype 16 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/conftest.py b/conftest.py --- a/conftest.py +++ b/conftest.py @@ -16,9 +16,33 @@ from __future__ import division from __future__ import print_function import pytest +import jax +import tensornetwork +import tensorflow as tf @pytest.fixture(name="backend", params=["numpy", "tensorflow", "jax", "pytorch"]) def backend_fixure(request): return request.param + + [email protected](autouse=True) +def reset_default_backend(): + tensornetwork.set_default_backend("numpy") + yield + tensornetwork.set_default_backend("numpy") + + [email protected](autouse=True) +def enable_jax_64(): + jax.config.update("jax_enable_x64", True) + yield + jax.config.update("jax_enable_x64", True) + + [email protected](autouse=True) +def tf_enable_v2_behaviour(): + tf.compat.v1.enable_v2_behavior() + yield + tf.compat.v1.enable_v2_behavior() diff --git a/tensornetwork/__init__.py b/tensornetwork/__init__.py --- a/tensornetwork/__init__.py +++ b/tensornetwork/__init__.py @@ -12,4 +12,4 @@ def set_default_backend(backend: Text, dtype: Optional[Type] = None) -> None: config.default_backend = backend - config.default_dype = dtype + config.default_dtype = dtype
{"golden_diff": "diff --git a/conftest.py b/conftest.py\n--- a/conftest.py\n+++ b/conftest.py\n@@ -16,9 +16,33 @@\n from __future__ import division\n from __future__ import print_function\n import pytest\n+import jax\n+import tensornetwork\n+import tensorflow as tf\n \n \n @pytest.fixture(name=\"backend\", params=[\"numpy\", \"tensorflow\",\n \"jax\", \"pytorch\"])\n def backend_fixure(request):\n return request.param\n+\n+\[email protected](autouse=True)\n+def reset_default_backend():\n+ tensornetwork.set_default_backend(\"numpy\")\n+ yield\n+ tensornetwork.set_default_backend(\"numpy\")\n+\n+\[email protected](autouse=True)\n+def enable_jax_64():\n+ jax.config.update(\"jax_enable_x64\", True)\n+ yield\n+ jax.config.update(\"jax_enable_x64\", True)\n+\n+\[email protected](autouse=True)\n+def tf_enable_v2_behaviour():\n+ tf.compat.v1.enable_v2_behavior()\n+ yield\n+ tf.compat.v1.enable_v2_behavior()\ndiff --git a/tensornetwork/__init__.py b/tensornetwork/__init__.py\n--- a/tensornetwork/__init__.py\n+++ b/tensornetwork/__init__.py\n@@ -12,4 +12,4 @@\n \n def set_default_backend(backend: Text, dtype: Optional[Type] = None) -> None:\n config.default_backend = backend\n- config.default_dype = dtype\n+ config.default_dtype = dtype\n", "issue": "ncon_interface tests fail \n\n", "before_files": [{"content": "# Copyright 2019 The TensorNetwork Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nimport pytest\n\n\[email protected](name=\"backend\", params=[\"numpy\", \"tensorflow\",\n \"jax\", \"pytorch\"])\ndef backend_fixure(request):\n return request.param\n", "path": "conftest.py"}, {"content": "from __future__ import absolute_import\nfrom tensornetwork.network import TensorNetwork\nfrom tensornetwork.network_components import Node, Edge, CopyNode\nfrom tensornetwork.ncon_interface import ncon, ncon_network\nfrom tensornetwork.version import __version__\nfrom tensornetwork.visualization.graphviz import to_graphviz\nfrom tensornetwork import contractors\nfrom tensornetwork import config\nfrom typing import Text, Optional, Type\nfrom tensornetwork.utils import load\n\n\ndef set_default_backend(backend: Text, dtype: Optional[Type] = None) -> None:\n config.default_backend = backend\n config.default_dype = dtype\n", "path": "tensornetwork/__init__.py"}], "after_files": [{"content": "# Copyright 2019 The TensorNetwork Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nimport pytest\nimport jax\nimport tensornetwork\nimport tensorflow as tf\n\n\[email protected](name=\"backend\", params=[\"numpy\", \"tensorflow\",\n \"jax\", \"pytorch\"])\ndef backend_fixure(request):\n return request.param\n\n\[email protected](autouse=True)\ndef reset_default_backend():\n tensornetwork.set_default_backend(\"numpy\")\n yield\n tensornetwork.set_default_backend(\"numpy\")\n\n\[email protected](autouse=True)\ndef enable_jax_64():\n jax.config.update(\"jax_enable_x64\", True)\n yield\n jax.config.update(\"jax_enable_x64\", True)\n\n\[email protected](autouse=True)\ndef tf_enable_v2_behaviour():\n tf.compat.v1.enable_v2_behavior()\n yield\n tf.compat.v1.enable_v2_behavior()\n", "path": "conftest.py"}, {"content": "from __future__ import absolute_import\nfrom tensornetwork.network import TensorNetwork\nfrom tensornetwork.network_components import Node, Edge, CopyNode\nfrom tensornetwork.ncon_interface import ncon, ncon_network\nfrom tensornetwork.version import __version__\nfrom tensornetwork.visualization.graphviz import to_graphviz\nfrom tensornetwork import contractors\nfrom tensornetwork import config\nfrom typing import Text, Optional, Type\nfrom tensornetwork.utils import load\n\n\ndef set_default_backend(backend: Text, dtype: Optional[Type] = None) -> None:\n config.default_backend = backend\n config.default_dtype = dtype\n", "path": "tensornetwork/__init__.py"}]}
677
355
gh_patches_debug_14998
rasdani/github-patches
git_diff
ManageIQ__integration_tests-8406
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Catalog exists property throws "CandidateNotFound" Exception When we call <catalog_obj>.exists , it throws "CandidateNotFound" Exception, where as in our test cases we expect a Boolean value "False" >> https://github.com/ManageIQ/integration_tests/blob/master/cfme/services/catalogs/catalog.py#L119 Steps to Reproduce: <catalog_obj>.exists Actual Result: Trace-back of "CandidateNotFound" Exception Expected Result: False --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `cfme/services/catalogs/catalog.py` Content: ``` 1 import attr 2 3 from widgetastic.utils import Parameter 4 from widgetastic.widget import Text 5 from widgetastic_manageiq import MultiBoxSelect 6 from widgetastic_patternfly import Button, Input 7 from navmazing import NavigateToAttribute, NavigateToSibling 8 9 from cfme.common import Taggable 10 from cfme.modeling.base import BaseCollection, BaseEntity 11 from cfme.utils.appliance.implementations.ui import navigator, CFMENavigateStep, navigate_to 12 from cfme.utils.pretty import Pretty 13 from cfme.utils.update import Updateable 14 from cfme.utils.wait import wait_for 15 16 from . import ServicesCatalogView 17 18 19 class CatalogsMultiBoxSelect(MultiBoxSelect): 20 move_into_button = Button(title=Parameter("@move_into")) 21 move_from_button = Button(title=Parameter("@move_from")) 22 23 24 class CatalogForm(ServicesCatalogView): 25 title = Text('#explorer_title_text') 26 27 name = Input(name='name') 28 description = Input(name="description") 29 assign_catalog_items = CatalogsMultiBoxSelect( 30 move_into="Move Selected buttons right", 31 move_from="Move Selected buttons left", 32 available_items="available_fields", 33 chosen_items="selected_fields" 34 ) 35 36 save_button = Button('Save') 37 cancel_button = Button('Cancel') 38 39 40 class CatalogsView(ServicesCatalogView): 41 title = Text("#explorer_title_text") 42 43 @property 44 def is_displayed(self): 45 return ( 46 self.in_explorer and 47 self.catalogs.is_opened and 48 self.catalogs.tree.currently_selected == ["All Catalogs"]) 49 50 51 class DetailsCatalogView(ServicesCatalogView): 52 title = Text("#explorer_title_text") 53 54 @property 55 def is_displayed(self): 56 return ( 57 self.in_explorer and self.catalogs.is_opened and 58 self.title.text == 'Catalog "{}"'.format(self.context["object"].name) 59 ) 60 61 62 class AddCatalogView(CatalogForm): 63 64 add_button = Button("Add") 65 66 @property 67 def is_displayed(self): 68 return ( 69 self.in_explorer and self.catalogs.is_opened and 70 self.title.text == 'Adding a new Catalog' 71 ) 72 73 74 class EditCatalogView(CatalogForm): 75 76 save_button = Button('Save') 77 reset_button = Button('Reset') 78 79 @property 80 def is_displayed(self): 81 return ( 82 self.in_explorer and self.catalogs.is_opened and 83 self.title.text == 'Editing Catalog "{}"'.format(self.context["object"].name) 84 ) 85 86 87 @attr.s 88 class Catalog(BaseEntity, Updateable, Pretty, Taggable): 89 90 name = attr.ib() 91 description = attr.ib() 92 items = attr.ib(default=None) 93 94 def update(self, updates): 95 view = navigate_to(self, 'Edit') 96 changed = view.fill(updates) 97 if changed: 98 view.save_button.click() 99 else: 100 view.cancel_button.click() 101 view = self.create_view(DetailsCatalogView, override=updates, wait='10s') 102 view.flash.assert_no_error() 103 if changed: 104 view.flash.assert_message( 105 'Catalog "{}" was saved'.format(updates.get('name', self.name))) 106 else: 107 view.flash.assert_message( 108 'Edit of Catalog "{}" was cancelled by the user'.format(self.name)) 109 110 def delete(self): 111 view = navigate_to(self, "Details") 112 view.configuration.item_select('Remove Catalog', handle_alert=True) 113 view = self.create_view(CatalogsView, wait='10s') 114 view.flash.assert_no_error() 115 view.flash.assert_success_message( 116 'Catalog "{}": Delete successful'.format(self.description or self.name)) 117 118 @property 119 def exists(self): 120 try: 121 navigate_to(self, 'Details') 122 return True 123 except NameError: 124 return False 125 126 127 @attr.s 128 class CatalogCollection(BaseCollection): 129 """A collection for the :py:class:`cfme.services.catalogs.catalog.Catalog`""" 130 ENTITY = Catalog 131 132 def create(self, name, description, items=None): 133 """Create a catalog. 134 135 Args: 136 name: The name of the catalog 137 description: The description of the catalog 138 items: Items in the catalog 139 """ 140 view = navigate_to(self, 'Add') 141 view.fill({ 142 'name': name, 143 'description': description, 144 'assign_catalog_items': items 145 }) 146 view.add_button.click() 147 catalog = self.instantiate(name=name, description=description, items=items) 148 view = self.create_view(CatalogsView) 149 assert view.is_displayed 150 view.flash.assert_no_error() 151 return catalog 152 153 154 @navigator.register(CatalogCollection) 155 class All(CFMENavigateStep): 156 VIEW = CatalogsView 157 prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn') 158 159 def step(self): 160 self.prerequisite_view.navigation.select('Services', 'Catalogs') 161 self.view.catalogs.tree.click_path("All Catalogs") 162 163 164 @navigator.register(CatalogCollection) 165 class Add(CFMENavigateStep): 166 VIEW = AddCatalogView 167 prerequisite = NavigateToSibling('All') 168 169 def step(self): 170 self.prerequisite_view.configuration.item_select('Add a New Catalog') 171 172 173 @navigator.register(Catalog) 174 class Details(CFMENavigateStep): 175 VIEW = DetailsCatalogView 176 prerequisite = NavigateToAttribute('parent', 'All') 177 178 def step(self): 179 self.prerequisite_view.catalogs.tree.click_path("All Catalogs", self.obj.name) 180 181 182 @navigator.register(Catalog) 183 class Edit(CFMENavigateStep): 184 VIEW = EditCatalogView 185 prerequisite = NavigateToSibling('Details') 186 187 def step(self): 188 self.prerequisite_view.configuration.item_select('Edit this Item') 189 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/cfme/services/catalogs/catalog.py b/cfme/services/catalogs/catalog.py --- a/cfme/services/catalogs/catalog.py +++ b/cfme/services/catalogs/catalog.py @@ -3,7 +3,7 @@ from widgetastic.utils import Parameter from widgetastic.widget import Text from widgetastic_manageiq import MultiBoxSelect -from widgetastic_patternfly import Button, Input +from widgetastic_patternfly import Button, CandidateNotFound, Input from navmazing import NavigateToAttribute, NavigateToSibling from cfme.common import Taggable @@ -120,7 +120,7 @@ try: navigate_to(self, 'Details') return True - except NameError: + except (NameError, CandidateNotFound): return False
{"golden_diff": "diff --git a/cfme/services/catalogs/catalog.py b/cfme/services/catalogs/catalog.py\n--- a/cfme/services/catalogs/catalog.py\n+++ b/cfme/services/catalogs/catalog.py\n@@ -3,7 +3,7 @@\n from widgetastic.utils import Parameter\n from widgetastic.widget import Text\n from widgetastic_manageiq import MultiBoxSelect\n-from widgetastic_patternfly import Button, Input\n+from widgetastic_patternfly import Button, CandidateNotFound, Input\n from navmazing import NavigateToAttribute, NavigateToSibling\n \n from cfme.common import Taggable\n@@ -120,7 +120,7 @@\n try:\n navigate_to(self, 'Details')\n return True\n- except NameError:\n+ except (NameError, CandidateNotFound):\n return False\n", "issue": "Catalog exists property throws \"CandidateNotFound\" Exception\nWhen we call <catalog_obj>.exists , it throws \"CandidateNotFound\" Exception, where as in our test cases we expect a Boolean value \"False\"\r\n>> https://github.com/ManageIQ/integration_tests/blob/master/cfme/services/catalogs/catalog.py#L119 \r\n\r\nSteps to Reproduce: <catalog_obj>.exists\r\nActual Result: Trace-back of \"CandidateNotFound\" Exception\r\nExpected Result: False \n", "before_files": [{"content": "import attr\n\nfrom widgetastic.utils import Parameter\nfrom widgetastic.widget import Text\nfrom widgetastic_manageiq import MultiBoxSelect\nfrom widgetastic_patternfly import Button, Input\nfrom navmazing import NavigateToAttribute, NavigateToSibling\n\nfrom cfme.common import Taggable\nfrom cfme.modeling.base import BaseCollection, BaseEntity\nfrom cfme.utils.appliance.implementations.ui import navigator, CFMENavigateStep, navigate_to\nfrom cfme.utils.pretty import Pretty\nfrom cfme.utils.update import Updateable\nfrom cfme.utils.wait import wait_for\n\nfrom . import ServicesCatalogView\n\n\nclass CatalogsMultiBoxSelect(MultiBoxSelect):\n move_into_button = Button(title=Parameter(\"@move_into\"))\n move_from_button = Button(title=Parameter(\"@move_from\"))\n\n\nclass CatalogForm(ServicesCatalogView):\n title = Text('#explorer_title_text')\n\n name = Input(name='name')\n description = Input(name=\"description\")\n assign_catalog_items = CatalogsMultiBoxSelect(\n move_into=\"Move Selected buttons right\",\n move_from=\"Move Selected buttons left\",\n available_items=\"available_fields\",\n chosen_items=\"selected_fields\"\n )\n\n save_button = Button('Save')\n cancel_button = Button('Cancel')\n\n\nclass CatalogsView(ServicesCatalogView):\n title = Text(\"#explorer_title_text\")\n\n @property\n def is_displayed(self):\n return (\n self.in_explorer and\n self.catalogs.is_opened and\n self.catalogs.tree.currently_selected == [\"All Catalogs\"])\n\n\nclass DetailsCatalogView(ServicesCatalogView):\n title = Text(\"#explorer_title_text\")\n\n @property\n def is_displayed(self):\n return (\n self.in_explorer and self.catalogs.is_opened and\n self.title.text == 'Catalog \"{}\"'.format(self.context[\"object\"].name)\n )\n\n\nclass AddCatalogView(CatalogForm):\n\n add_button = Button(\"Add\")\n\n @property\n def is_displayed(self):\n return (\n self.in_explorer and self.catalogs.is_opened and\n self.title.text == 'Adding a new Catalog'\n )\n\n\nclass EditCatalogView(CatalogForm):\n\n save_button = Button('Save')\n reset_button = Button('Reset')\n\n @property\n def is_displayed(self):\n return (\n self.in_explorer and self.catalogs.is_opened and\n self.title.text == 'Editing Catalog \"{}\"'.format(self.context[\"object\"].name)\n )\n\n\[email protected]\nclass Catalog(BaseEntity, Updateable, Pretty, Taggable):\n\n name = attr.ib()\n description = attr.ib()\n items = attr.ib(default=None)\n\n def update(self, updates):\n view = navigate_to(self, 'Edit')\n changed = view.fill(updates)\n if changed:\n view.save_button.click()\n else:\n view.cancel_button.click()\n view = self.create_view(DetailsCatalogView, override=updates, wait='10s')\n view.flash.assert_no_error()\n if changed:\n view.flash.assert_message(\n 'Catalog \"{}\" was saved'.format(updates.get('name', self.name)))\n else:\n view.flash.assert_message(\n 'Edit of Catalog \"{}\" was cancelled by the user'.format(self.name))\n\n def delete(self):\n view = navigate_to(self, \"Details\")\n view.configuration.item_select('Remove Catalog', handle_alert=True)\n view = self.create_view(CatalogsView, wait='10s')\n view.flash.assert_no_error()\n view.flash.assert_success_message(\n 'Catalog \"{}\": Delete successful'.format(self.description or self.name))\n\n @property\n def exists(self):\n try:\n navigate_to(self, 'Details')\n return True\n except NameError:\n return False\n\n\[email protected]\nclass CatalogCollection(BaseCollection):\n \"\"\"A collection for the :py:class:`cfme.services.catalogs.catalog.Catalog`\"\"\"\n ENTITY = Catalog\n\n def create(self, name, description, items=None):\n \"\"\"Create a catalog.\n\n Args:\n name: The name of the catalog\n description: The description of the catalog\n items: Items in the catalog\n \"\"\"\n view = navigate_to(self, 'Add')\n view.fill({\n 'name': name,\n 'description': description,\n 'assign_catalog_items': items\n })\n view.add_button.click()\n catalog = self.instantiate(name=name, description=description, items=items)\n view = self.create_view(CatalogsView)\n assert view.is_displayed\n view.flash.assert_no_error()\n return catalog\n\n\[email protected](CatalogCollection)\nclass All(CFMENavigateStep):\n VIEW = CatalogsView\n prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')\n\n def step(self):\n self.prerequisite_view.navigation.select('Services', 'Catalogs')\n self.view.catalogs.tree.click_path(\"All Catalogs\")\n\n\[email protected](CatalogCollection)\nclass Add(CFMENavigateStep):\n VIEW = AddCatalogView\n prerequisite = NavigateToSibling('All')\n\n def step(self):\n self.prerequisite_view.configuration.item_select('Add a New Catalog')\n\n\[email protected](Catalog)\nclass Details(CFMENavigateStep):\n VIEW = DetailsCatalogView\n prerequisite = NavigateToAttribute('parent', 'All')\n\n def step(self):\n self.prerequisite_view.catalogs.tree.click_path(\"All Catalogs\", self.obj.name)\n\n\[email protected](Catalog)\nclass Edit(CFMENavigateStep):\n VIEW = EditCatalogView\n prerequisite = NavigateToSibling('Details')\n\n def step(self):\n self.prerequisite_view.configuration.item_select('Edit this Item')\n", "path": "cfme/services/catalogs/catalog.py"}], "after_files": [{"content": "import attr\n\nfrom widgetastic.utils import Parameter\nfrom widgetastic.widget import Text\nfrom widgetastic_manageiq import MultiBoxSelect\nfrom widgetastic_patternfly import Button, CandidateNotFound, Input\nfrom navmazing import NavigateToAttribute, NavigateToSibling\n\nfrom cfme.common import Taggable\nfrom cfme.modeling.base import BaseCollection, BaseEntity\nfrom cfme.utils.appliance.implementations.ui import navigator, CFMENavigateStep, navigate_to\nfrom cfme.utils.pretty import Pretty\nfrom cfme.utils.update import Updateable\nfrom cfme.utils.wait import wait_for\n\nfrom . import ServicesCatalogView\n\n\nclass CatalogsMultiBoxSelect(MultiBoxSelect):\n move_into_button = Button(title=Parameter(\"@move_into\"))\n move_from_button = Button(title=Parameter(\"@move_from\"))\n\n\nclass CatalogForm(ServicesCatalogView):\n title = Text('#explorer_title_text')\n\n name = Input(name='name')\n description = Input(name=\"description\")\n assign_catalog_items = CatalogsMultiBoxSelect(\n move_into=\"Move Selected buttons right\",\n move_from=\"Move Selected buttons left\",\n available_items=\"available_fields\",\n chosen_items=\"selected_fields\"\n )\n\n save_button = Button('Save')\n cancel_button = Button('Cancel')\n\n\nclass CatalogsView(ServicesCatalogView):\n title = Text(\"#explorer_title_text\")\n\n @property\n def is_displayed(self):\n return (\n self.in_explorer and\n self.catalogs.is_opened and\n self.catalogs.tree.currently_selected == [\"All Catalogs\"])\n\n\nclass DetailsCatalogView(ServicesCatalogView):\n title = Text(\"#explorer_title_text\")\n\n @property\n def is_displayed(self):\n return (\n self.in_explorer and self.catalogs.is_opened and\n self.title.text == 'Catalog \"{}\"'.format(self.context[\"object\"].name)\n )\n\n\nclass AddCatalogView(CatalogForm):\n\n add_button = Button(\"Add\")\n\n @property\n def is_displayed(self):\n return (\n self.in_explorer and self.catalogs.is_opened and\n self.title.text == 'Adding a new Catalog'\n )\n\n\nclass EditCatalogView(CatalogForm):\n\n save_button = Button('Save')\n reset_button = Button('Reset')\n\n @property\n def is_displayed(self):\n return (\n self.in_explorer and self.catalogs.is_opened and\n self.title.text == 'Editing Catalog \"{}\"'.format(self.context[\"object\"].name)\n )\n\n\[email protected]\nclass Catalog(BaseEntity, Updateable, Pretty, Taggable):\n\n name = attr.ib()\n description = attr.ib()\n items = attr.ib(default=None)\n\n def update(self, updates):\n view = navigate_to(self, 'Edit')\n changed = view.fill(updates)\n if changed:\n view.save_button.click()\n else:\n view.cancel_button.click()\n view = self.create_view(DetailsCatalogView, override=updates, wait='10s')\n view.flash.assert_no_error()\n if changed:\n view.flash.assert_message(\n 'Catalog \"{}\" was saved'.format(updates.get('name', self.name)))\n else:\n view.flash.assert_message(\n 'Edit of Catalog \"{}\" was cancelled by the user'.format(self.name))\n\n def delete(self):\n view = navigate_to(self, \"Details\")\n view.configuration.item_select('Remove Catalog', handle_alert=True)\n view = self.create_view(CatalogsView, wait='10s')\n view.flash.assert_no_error()\n view.flash.assert_success_message(\n 'Catalog \"{}\": Delete successful'.format(self.description or self.name))\n\n @property\n def exists(self):\n try:\n navigate_to(self, 'Details')\n return True\n except (NameError, CandidateNotFound):\n return False\n\n\[email protected]\nclass CatalogCollection(BaseCollection):\n \"\"\"A collection for the :py:class:`cfme.services.catalogs.catalog.Catalog`\"\"\"\n ENTITY = Catalog\n\n def create(self, name, description, items=None):\n \"\"\"Create a catalog.\n\n Args:\n name: The name of the catalog\n description: The description of the catalog\n items: Items in the catalog\n \"\"\"\n view = navigate_to(self, 'Add')\n view.fill({\n 'name': name,\n 'description': description,\n 'assign_catalog_items': items\n })\n view.add_button.click()\n catalog = self.instantiate(name=name, description=description, items=items)\n view = self.create_view(CatalogsView)\n assert view.is_displayed\n view.flash.assert_no_error()\n return catalog\n\n\[email protected](CatalogCollection)\nclass All(CFMENavigateStep):\n VIEW = CatalogsView\n prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')\n\n def step(self):\n self.prerequisite_view.navigation.select('Services', 'Catalogs')\n self.view.catalogs.tree.click_path(\"All Catalogs\")\n\n\[email protected](CatalogCollection)\nclass Add(CFMENavigateStep):\n VIEW = AddCatalogView\n prerequisite = NavigateToSibling('All')\n\n def step(self):\n self.prerequisite_view.configuration.item_select('Add a New Catalog')\n\n\[email protected](Catalog)\nclass Details(CFMENavigateStep):\n VIEW = DetailsCatalogView\n prerequisite = NavigateToAttribute('parent', 'All')\n\n def step(self):\n self.prerequisite_view.catalogs.tree.click_path(\"All Catalogs\", self.obj.name)\n\n\[email protected](Catalog)\nclass Edit(CFMENavigateStep):\n VIEW = EditCatalogView\n prerequisite = NavigateToSibling('Details')\n\n def step(self):\n self.prerequisite_view.configuration.item_select('Edit this Item')\n", "path": "cfme/services/catalogs/catalog.py"}]}
2,047
172
gh_patches_debug_57271
rasdani/github-patches
git_diff
DataDog__dd-trace-py-984
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- 'async for' requires an object with __aiter__ method, got AIOTracedCursor ## Problem Using ddtrace and aiopg, if I do: ```python await cur.execute(query) async for value in cur: yield value ``` If my connection is not patched, I get: ``` TypeError: 'async for' requires an object with __aiter__ method, got AIOTracedCursor (...) File "path/to/my/file.py", line 241, in get_many async for value in cur: ``` (if my connection is not patched, it works) ## Analysis The cursor class is replaced with `AIOTracedCursor` which inherits `wrapt.ObjectProxy`. Problem is, while thanks to `ObjectProxy`, `AIOTracedCursor().__aiter__()` would most probably work and return whatever the real proxy would return, this is not enough for Python to accept that the cursor is an iterator. A small example with simple objects: ```python class A(): def iter(self): return iter([]) async def aiter(self): return iter([]) def __getattr__(self, attr): if attr.endswith("iter__"): return getattr(self, attr.strip("_")) a = A() ``` We implement `a.__iter__()` and `a.__aiter__()` but Python doesn't see it: ``` In [6]: a.__iter__() Out[6]: <list_iterator at 0x7fdff00de860> In [7]: a.__aiter__() Out[7]: <coroutine object A.aiter at 0x7fdff00ddba0> In [8]: async for e in a: print(e) --------------------------------------------------------------------------- TypeError Traceback (most recent call last) cell_name in async-def-wrapper() TypeError: 'async for' requires an object with __aiter__ method, got A In [9]: iter(a) --------------------------------------------------------------------------- TypeError Traceback (most recent call last) <ipython-input-9-2b64cb055077> in <module> ----> 1 iter(a) TypeError: 'A' object is not iterable ``` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `ddtrace/contrib/aiopg/connection.py` Content: ``` 1 import asyncio 2 from ddtrace.vendor import wrapt 3 4 from aiopg.utils import _ContextManager 5 6 from .. import dbapi 7 from ...constants import ANALYTICS_SAMPLE_RATE_KEY 8 from ...ext import sql, AppTypes 9 from ...pin import Pin 10 from ...settings import config 11 12 13 class AIOTracedCursor(wrapt.ObjectProxy): 14 """ TracedCursor wraps a psql cursor and traces its queries. """ 15 16 def __init__(self, cursor, pin): 17 super(AIOTracedCursor, self).__init__(cursor) 18 pin.onto(self) 19 name = pin.app or 'sql' 20 self._datadog_name = '%s.query' % name 21 22 @asyncio.coroutine 23 def _trace_method(self, method, resource, extra_tags, *args, **kwargs): 24 pin = Pin.get_from(self) 25 if not pin or not pin.enabled(): 26 result = yield from method(*args, **kwargs) 27 return result 28 service = pin.service 29 30 with pin.tracer.trace(self._datadog_name, service=service, 31 resource=resource) as s: 32 s.span_type = sql.TYPE 33 s.set_tag(sql.QUERY, resource) 34 s.set_tags(pin.tags) 35 s.set_tags(extra_tags) 36 37 # set analytics sample rate 38 s.set_tag( 39 ANALYTICS_SAMPLE_RATE_KEY, 40 config.aiopg.get_analytics_sample_rate() 41 ) 42 43 try: 44 result = yield from method(*args, **kwargs) 45 return result 46 finally: 47 s.set_metric('db.rowcount', self.rowcount) 48 49 @asyncio.coroutine 50 def executemany(self, query, *args, **kwargs): 51 # FIXME[matt] properly handle kwargs here. arg names can be different 52 # with different libs. 53 result = yield from self._trace_method( 54 self.__wrapped__.executemany, query, {'sql.executemany': 'true'}, 55 query, *args, **kwargs) 56 return result 57 58 @asyncio.coroutine 59 def execute(self, query, *args, **kwargs): 60 result = yield from self._trace_method( 61 self.__wrapped__.execute, query, {}, query, *args, **kwargs) 62 return result 63 64 @asyncio.coroutine 65 def callproc(self, proc, args): 66 result = yield from self._trace_method( 67 self.__wrapped__.callproc, proc, {}, proc, args) 68 return result 69 70 71 class AIOTracedConnection(wrapt.ObjectProxy): 72 """ TracedConnection wraps a Connection with tracing code. """ 73 74 def __init__(self, conn, pin=None, cursor_cls=AIOTracedCursor): 75 super(AIOTracedConnection, self).__init__(conn) 76 name = dbapi._get_vendor(conn) 77 db_pin = pin or Pin(service=name, app=name, app_type=AppTypes.db) 78 db_pin.onto(self) 79 # wrapt requires prefix of `_self` for attributes that are only in the 80 # proxy (since some of our source objects will use `__slots__`) 81 self._self_cursor_cls = cursor_cls 82 83 def cursor(self, *args, **kwargs): 84 # unfortunately we also need to patch this method as otherwise "self" 85 # ends up being the aiopg connection object 86 coro = self._cursor(*args, **kwargs) 87 return _ContextManager(coro) 88 89 @asyncio.coroutine 90 def _cursor(self, *args, **kwargs): 91 cursor = yield from self.__wrapped__._cursor(*args, **kwargs) 92 pin = Pin.get_from(self) 93 if not pin: 94 return cursor 95 return self._self_cursor_cls(cursor, pin) 96 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/ddtrace/contrib/aiopg/connection.py b/ddtrace/contrib/aiopg/connection.py --- a/ddtrace/contrib/aiopg/connection.py +++ b/ddtrace/contrib/aiopg/connection.py @@ -67,6 +67,9 @@ self.__wrapped__.callproc, proc, {}, proc, args) return result + def __aiter__(self): + return self.__wrapped__.__aiter__() + class AIOTracedConnection(wrapt.ObjectProxy): """ TracedConnection wraps a Connection with tracing code. """
{"golden_diff": "diff --git a/ddtrace/contrib/aiopg/connection.py b/ddtrace/contrib/aiopg/connection.py\n--- a/ddtrace/contrib/aiopg/connection.py\n+++ b/ddtrace/contrib/aiopg/connection.py\n@@ -67,6 +67,9 @@\n self.__wrapped__.callproc, proc, {}, proc, args)\n return result\n \n+ def __aiter__(self):\n+ return self.__wrapped__.__aiter__()\n+\n \n class AIOTracedConnection(wrapt.ObjectProxy):\n \"\"\" TracedConnection wraps a Connection with tracing code. \"\"\"\n", "issue": "'async for' requires an object with __aiter__ method, got AIOTracedCursor\n## Problem\r\nUsing ddtrace and aiopg, if I do:\r\n\r\n```python\r\nawait cur.execute(query)\r\nasync for value in cur:\r\n yield value\r\n```\r\nIf my connection is not patched, I get:\r\n```\r\nTypeError: 'async for' requires an object with __aiter__ method, got AIOTracedCursor\r\n(...)\r\n File \"path/to/my/file.py\", line 241, in get_many\r\n async for value in cur:\r\n```\r\n(if my connection is not patched, it works)\r\n\r\n## Analysis\r\n\r\nThe cursor class is replaced with `AIOTracedCursor` which inherits `wrapt.ObjectProxy`.\r\n\r\nProblem is, while thanks to `ObjectProxy`, `AIOTracedCursor().__aiter__()` would most probably work and return whatever the real proxy would return, this is not enough for Python to accept that the cursor is an iterator.\r\n\r\nA small example with simple objects:\r\n```python\r\nclass A():\r\n def iter(self):\r\n return iter([])\r\n\r\n async def aiter(self):\r\n return iter([])\r\n\r\n def __getattr__(self, attr):\r\n if attr.endswith(\"iter__\"):\r\n return getattr(self, attr.strip(\"_\"))\r\na = A()\r\n```\r\nWe implement `a.__iter__()` and `a.__aiter__()` but Python doesn't see it:\r\n```\r\nIn [6]: a.__iter__() \r\nOut[6]: <list_iterator at 0x7fdff00de860>\r\n\r\nIn [7]: a.__aiter__() \r\nOut[7]: <coroutine object A.aiter at 0x7fdff00ddba0>\r\n\r\nIn [8]: async for e in a: print(e) \r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\ncell_name in async-def-wrapper()\r\n\r\nTypeError: 'async for' requires an object with __aiter__ method, got A \r\n\r\nIn [9]: iter(a) \r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n<ipython-input-9-2b64cb055077> in <module>\r\n----> 1 iter(a)\r\n\r\nTypeError: 'A' object is not iterable\r\n\r\n```\n", "before_files": [{"content": "import asyncio\nfrom ddtrace.vendor import wrapt\n\nfrom aiopg.utils import _ContextManager\n\nfrom .. import dbapi\nfrom ...constants import ANALYTICS_SAMPLE_RATE_KEY\nfrom ...ext import sql, AppTypes\nfrom ...pin import Pin\nfrom ...settings import config\n\n\nclass AIOTracedCursor(wrapt.ObjectProxy):\n \"\"\" TracedCursor wraps a psql cursor and traces its queries. \"\"\"\n\n def __init__(self, cursor, pin):\n super(AIOTracedCursor, self).__init__(cursor)\n pin.onto(self)\n name = pin.app or 'sql'\n self._datadog_name = '%s.query' % name\n\n @asyncio.coroutine\n def _trace_method(self, method, resource, extra_tags, *args, **kwargs):\n pin = Pin.get_from(self)\n if not pin or not pin.enabled():\n result = yield from method(*args, **kwargs)\n return result\n service = pin.service\n\n with pin.tracer.trace(self._datadog_name, service=service,\n resource=resource) as s:\n s.span_type = sql.TYPE\n s.set_tag(sql.QUERY, resource)\n s.set_tags(pin.tags)\n s.set_tags(extra_tags)\n\n # set analytics sample rate\n s.set_tag(\n ANALYTICS_SAMPLE_RATE_KEY,\n config.aiopg.get_analytics_sample_rate()\n )\n\n try:\n result = yield from method(*args, **kwargs)\n return result\n finally:\n s.set_metric('db.rowcount', self.rowcount)\n\n @asyncio.coroutine\n def executemany(self, query, *args, **kwargs):\n # FIXME[matt] properly handle kwargs here. arg names can be different\n # with different libs.\n result = yield from self._trace_method(\n self.__wrapped__.executemany, query, {'sql.executemany': 'true'},\n query, *args, **kwargs)\n return result\n\n @asyncio.coroutine\n def execute(self, query, *args, **kwargs):\n result = yield from self._trace_method(\n self.__wrapped__.execute, query, {}, query, *args, **kwargs)\n return result\n\n @asyncio.coroutine\n def callproc(self, proc, args):\n result = yield from self._trace_method(\n self.__wrapped__.callproc, proc, {}, proc, args)\n return result\n\n\nclass AIOTracedConnection(wrapt.ObjectProxy):\n \"\"\" TracedConnection wraps a Connection with tracing code. \"\"\"\n\n def __init__(self, conn, pin=None, cursor_cls=AIOTracedCursor):\n super(AIOTracedConnection, self).__init__(conn)\n name = dbapi._get_vendor(conn)\n db_pin = pin or Pin(service=name, app=name, app_type=AppTypes.db)\n db_pin.onto(self)\n # wrapt requires prefix of `_self` for attributes that are only in the\n # proxy (since some of our source objects will use `__slots__`)\n self._self_cursor_cls = cursor_cls\n\n def cursor(self, *args, **kwargs):\n # unfortunately we also need to patch this method as otherwise \"self\"\n # ends up being the aiopg connection object\n coro = self._cursor(*args, **kwargs)\n return _ContextManager(coro)\n\n @asyncio.coroutine\n def _cursor(self, *args, **kwargs):\n cursor = yield from self.__wrapped__._cursor(*args, **kwargs)\n pin = Pin.get_from(self)\n if not pin:\n return cursor\n return self._self_cursor_cls(cursor, pin)\n", "path": "ddtrace/contrib/aiopg/connection.py"}], "after_files": [{"content": "import asyncio\nfrom ddtrace.vendor import wrapt\n\nfrom aiopg.utils import _ContextManager\n\nfrom .. import dbapi\nfrom ...constants import ANALYTICS_SAMPLE_RATE_KEY\nfrom ...ext import sql, AppTypes\nfrom ...pin import Pin\nfrom ...settings import config\n\n\nclass AIOTracedCursor(wrapt.ObjectProxy):\n \"\"\" TracedCursor wraps a psql cursor and traces its queries. \"\"\"\n\n def __init__(self, cursor, pin):\n super(AIOTracedCursor, self).__init__(cursor)\n pin.onto(self)\n name = pin.app or 'sql'\n self._datadog_name = '%s.query' % name\n\n @asyncio.coroutine\n def _trace_method(self, method, resource, extra_tags, *args, **kwargs):\n pin = Pin.get_from(self)\n if not pin or not pin.enabled():\n result = yield from method(*args, **kwargs)\n return result\n service = pin.service\n\n with pin.tracer.trace(self._datadog_name, service=service,\n resource=resource) as s:\n s.span_type = sql.TYPE\n s.set_tag(sql.QUERY, resource)\n s.set_tags(pin.tags)\n s.set_tags(extra_tags)\n\n # set analytics sample rate\n s.set_tag(\n ANALYTICS_SAMPLE_RATE_KEY,\n config.aiopg.get_analytics_sample_rate()\n )\n\n try:\n result = yield from method(*args, **kwargs)\n return result\n finally:\n s.set_metric('db.rowcount', self.rowcount)\n\n @asyncio.coroutine\n def executemany(self, query, *args, **kwargs):\n # FIXME[matt] properly handle kwargs here. arg names can be different\n # with different libs.\n result = yield from self._trace_method(\n self.__wrapped__.executemany, query, {'sql.executemany': 'true'},\n query, *args, **kwargs)\n return result\n\n @asyncio.coroutine\n def execute(self, query, *args, **kwargs):\n result = yield from self._trace_method(\n self.__wrapped__.execute, query, {}, query, *args, **kwargs)\n return result\n\n @asyncio.coroutine\n def callproc(self, proc, args):\n result = yield from self._trace_method(\n self.__wrapped__.callproc, proc, {}, proc, args)\n return result\n\n def __aiter__(self):\n return self.__wrapped__.__aiter__()\n\n\nclass AIOTracedConnection(wrapt.ObjectProxy):\n \"\"\" TracedConnection wraps a Connection with tracing code. \"\"\"\n\n def __init__(self, conn, pin=None, cursor_cls=AIOTracedCursor):\n super(AIOTracedConnection, self).__init__(conn)\n name = dbapi._get_vendor(conn)\n db_pin = pin or Pin(service=name, app=name, app_type=AppTypes.db)\n db_pin.onto(self)\n # wrapt requires prefix of `_self` for attributes that are only in the\n # proxy (since some of our source objects will use `__slots__`)\n self._self_cursor_cls = cursor_cls\n\n def cursor(self, *args, **kwargs):\n # unfortunately we also need to patch this method as otherwise \"self\"\n # ends up being the aiopg connection object\n coro = self._cursor(*args, **kwargs)\n return _ContextManager(coro)\n\n @asyncio.coroutine\n def _cursor(self, *args, **kwargs):\n cursor = yield from self.__wrapped__._cursor(*args, **kwargs)\n pin = Pin.get_from(self)\n if not pin:\n return cursor\n return self._self_cursor_cls(cursor, pin)\n", "path": "ddtrace/contrib/aiopg/connection.py"}]}
1,735
129
gh_patches_debug_29199
rasdani/github-patches
git_diff
electricitymaps__electricitymaps-contrib-1361
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- IN-AP has changed its data url The new link is https://core.ap.gov.in/CMDashBoard/UserInterface/Power/PowerReport.aspx (same page layout I think). Old link returns 404. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `parsers/IN_AP.py` Content: ``` 1 #!/usr/bin/env python3 2 3 from requests import Session 4 from .lib import zonekey, IN, web 5 6 7 def fetch_production(zone_key='IN-AP', session=None, target_datetime=None, logger=None): 8 """Fetch Andhra Pradesh production""" 9 if target_datetime: 10 raise NotImplementedError('This parser is not yet able to parse past dates') 11 12 zonekey.assert_zone_key(zone_key, 'IN-AP') 13 14 html = web.get_response_soup(zone_key, 15 'http://www.core.ap.gov.in/CMDashBoard/UserInterface/Power/PowerReport.aspx', session) 16 india_date = IN.read_datetime_from_span_id(html, 'lblPowerStatusDate', 'DD-MM-YYYY HH:mm') 17 18 hydro_value = IN.read_value_from_span_id(html, 'lblHydel') 19 gas_value = IN.read_value_from_span_id(html, 'lblGas') 20 wind_value = IN.read_value_from_span_id(html, 'lblWind') 21 solar_value = IN.read_value_from_span_id(html, 'lblSolar') 22 23 # All thermal centrals are considered coal based production 24 # https://en.wikipedia.org/wiki/Power_sector_of_Andhra_Pradesh 25 thermal_value = IN.read_value_from_span_id(html, 'lblThermal') 26 27 cgs_value = IN.read_value_from_span_id(html, 'lblCGS') 28 ipp_value = IN.read_value_from_span_id(html, 'lblIPPS') 29 30 data = { 31 'zoneKey': zone_key, 32 'datetime': india_date.datetime, 33 'production': { 34 'biomass': 0.0, 35 'coal': thermal_value, 36 'gas': gas_value, 37 'hydro': hydro_value, 38 'nuclear': 0.0, 39 'oil': 0.0, 40 'solar': solar_value, 41 'wind': wind_value, 42 'geothermal': 0.0, 43 'unknown': round(cgs_value + ipp_value, 2) 44 }, 45 'storage': { 46 'hydro': 0.0 47 }, 48 'source': 'core.ap.gov.in', 49 } 50 51 return data 52 53 54 def fetch_consumption(zone_key='IN-AP', session=None, target_datetime=None, logger=None): 55 """Fetch Andhra Pradesh consumption""" 56 if target_datetime: 57 raise NotImplementedError('This parser is not yet able to parse past dates') 58 59 zonekey.assert_zone_key(zone_key, 'IN-AP') 60 61 html = web.get_response_soup(zone_key, 62 'http://www.core.ap.gov.in/CMDashBoard/UserInterface/Power/PowerReport.aspx', session) 63 india_date = IN.read_datetime_from_span_id(html, 'lblPowerStatusDate', 'DD-MM-YYYY HH:mm') 64 65 demand_value = IN.read_value_from_span_id(html, 'lblGridDemand') 66 67 data = { 68 'zoneKey': zone_key, 69 'datetime': india_date.datetime, 70 'consumption': demand_value, 71 'source': 'core.ap.gov.in' 72 } 73 74 return data 75 76 77 if __name__ == '__main__': 78 session = Session() 79 print(fetch_production('IN-AP', session)) 80 print(fetch_consumption('IN-AP', session)) 81 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/parsers/IN_AP.py b/parsers/IN_AP.py --- a/parsers/IN_AP.py +++ b/parsers/IN_AP.py @@ -3,7 +3,6 @@ from requests import Session from .lib import zonekey, IN, web - def fetch_production(zone_key='IN-AP', session=None, target_datetime=None, logger=None): """Fetch Andhra Pradesh production""" if target_datetime: @@ -12,7 +11,7 @@ zonekey.assert_zone_key(zone_key, 'IN-AP') html = web.get_response_soup(zone_key, - 'http://www.core.ap.gov.in/CMDashBoard/UserInterface/Power/PowerReport.aspx', session) + 'https://core.ap.gov.in/CMDashBoard/UserInterface/Power/PowerReport.aspx', session) india_date = IN.read_datetime_from_span_id(html, 'lblPowerStatusDate', 'DD-MM-YYYY HH:mm') hydro_value = IN.read_value_from_span_id(html, 'lblHydel') @@ -59,7 +58,7 @@ zonekey.assert_zone_key(zone_key, 'IN-AP') html = web.get_response_soup(zone_key, - 'http://www.core.ap.gov.in/CMDashBoard/UserInterface/Power/PowerReport.aspx', session) + 'https://core.ap.gov.in/CMDashBoard/UserInterface/Power/PowerReport.aspx', session) india_date = IN.read_datetime_from_span_id(html, 'lblPowerStatusDate', 'DD-MM-YYYY HH:mm') demand_value = IN.read_value_from_span_id(html, 'lblGridDemand')
{"golden_diff": "diff --git a/parsers/IN_AP.py b/parsers/IN_AP.py\n--- a/parsers/IN_AP.py\n+++ b/parsers/IN_AP.py\n@@ -3,7 +3,6 @@\n from requests import Session\n from .lib import zonekey, IN, web\n \n-\n def fetch_production(zone_key='IN-AP', session=None, target_datetime=None, logger=None):\n \"\"\"Fetch Andhra Pradesh production\"\"\"\n if target_datetime:\n@@ -12,7 +11,7 @@\n zonekey.assert_zone_key(zone_key, 'IN-AP')\n \n html = web.get_response_soup(zone_key,\n- 'http://www.core.ap.gov.in/CMDashBoard/UserInterface/Power/PowerReport.aspx', session)\n+ 'https://core.ap.gov.in/CMDashBoard/UserInterface/Power/PowerReport.aspx', session)\n india_date = IN.read_datetime_from_span_id(html, 'lblPowerStatusDate', 'DD-MM-YYYY HH:mm')\n \n hydro_value = IN.read_value_from_span_id(html, 'lblHydel')\n@@ -59,7 +58,7 @@\n zonekey.assert_zone_key(zone_key, 'IN-AP')\n \n html = web.get_response_soup(zone_key,\n- 'http://www.core.ap.gov.in/CMDashBoard/UserInterface/Power/PowerReport.aspx', session)\n+ 'https://core.ap.gov.in/CMDashBoard/UserInterface/Power/PowerReport.aspx', session)\n india_date = IN.read_datetime_from_span_id(html, 'lblPowerStatusDate', 'DD-MM-YYYY HH:mm')\n \n demand_value = IN.read_value_from_span_id(html, 'lblGridDemand')\n", "issue": "IN-AP has changed its data url\nThe new link is https://core.ap.gov.in/CMDashBoard/UserInterface/Power/PowerReport.aspx (same page layout I think). Old link returns 404.\n", "before_files": [{"content": "#!/usr/bin/env python3\n\nfrom requests import Session\nfrom .lib import zonekey, IN, web\n\n\ndef fetch_production(zone_key='IN-AP', session=None, target_datetime=None, logger=None):\n \"\"\"Fetch Andhra Pradesh production\"\"\"\n if target_datetime:\n raise NotImplementedError('This parser is not yet able to parse past dates')\n\n zonekey.assert_zone_key(zone_key, 'IN-AP')\n\n html = web.get_response_soup(zone_key,\n 'http://www.core.ap.gov.in/CMDashBoard/UserInterface/Power/PowerReport.aspx', session)\n india_date = IN.read_datetime_from_span_id(html, 'lblPowerStatusDate', 'DD-MM-YYYY HH:mm')\n\n hydro_value = IN.read_value_from_span_id(html, 'lblHydel')\n gas_value = IN.read_value_from_span_id(html, 'lblGas')\n wind_value = IN.read_value_from_span_id(html, 'lblWind')\n solar_value = IN.read_value_from_span_id(html, 'lblSolar')\n\n # All thermal centrals are considered coal based production\n # https://en.wikipedia.org/wiki/Power_sector_of_Andhra_Pradesh\n thermal_value = IN.read_value_from_span_id(html, 'lblThermal')\n\n cgs_value = IN.read_value_from_span_id(html, 'lblCGS')\n ipp_value = IN.read_value_from_span_id(html, 'lblIPPS')\n\n data = {\n 'zoneKey': zone_key,\n 'datetime': india_date.datetime,\n 'production': {\n 'biomass': 0.0,\n 'coal': thermal_value,\n 'gas': gas_value,\n 'hydro': hydro_value,\n 'nuclear': 0.0,\n 'oil': 0.0,\n 'solar': solar_value,\n 'wind': wind_value,\n 'geothermal': 0.0,\n 'unknown': round(cgs_value + ipp_value, 2)\n },\n 'storage': {\n 'hydro': 0.0\n },\n 'source': 'core.ap.gov.in',\n }\n\n return data\n\n\ndef fetch_consumption(zone_key='IN-AP', session=None, target_datetime=None, logger=None):\n \"\"\"Fetch Andhra Pradesh consumption\"\"\"\n if target_datetime:\n raise NotImplementedError('This parser is not yet able to parse past dates')\n\n zonekey.assert_zone_key(zone_key, 'IN-AP')\n\n html = web.get_response_soup(zone_key,\n 'http://www.core.ap.gov.in/CMDashBoard/UserInterface/Power/PowerReport.aspx', session)\n india_date = IN.read_datetime_from_span_id(html, 'lblPowerStatusDate', 'DD-MM-YYYY HH:mm')\n\n demand_value = IN.read_value_from_span_id(html, 'lblGridDemand')\n\n data = {\n 'zoneKey': zone_key,\n 'datetime': india_date.datetime,\n 'consumption': demand_value,\n 'source': 'core.ap.gov.in'\n }\n\n return data\n\n\nif __name__ == '__main__':\n session = Session()\n print(fetch_production('IN-AP', session))\n print(fetch_consumption('IN-AP', session))\n", "path": "parsers/IN_AP.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\nfrom requests import Session\nfrom .lib import zonekey, IN, web\n\ndef fetch_production(zone_key='IN-AP', session=None, target_datetime=None, logger=None):\n \"\"\"Fetch Andhra Pradesh production\"\"\"\n if target_datetime:\n raise NotImplementedError('This parser is not yet able to parse past dates')\n\n zonekey.assert_zone_key(zone_key, 'IN-AP')\n\n html = web.get_response_soup(zone_key,\n 'https://core.ap.gov.in/CMDashBoard/UserInterface/Power/PowerReport.aspx', session)\n india_date = IN.read_datetime_from_span_id(html, 'lblPowerStatusDate', 'DD-MM-YYYY HH:mm')\n\n hydro_value = IN.read_value_from_span_id(html, 'lblHydel')\n gas_value = IN.read_value_from_span_id(html, 'lblGas')\n wind_value = IN.read_value_from_span_id(html, 'lblWind')\n solar_value = IN.read_value_from_span_id(html, 'lblSolar')\n\n # All thermal centrals are considered coal based production\n # https://en.wikipedia.org/wiki/Power_sector_of_Andhra_Pradesh\n thermal_value = IN.read_value_from_span_id(html, 'lblThermal')\n\n cgs_value = IN.read_value_from_span_id(html, 'lblCGS')\n ipp_value = IN.read_value_from_span_id(html, 'lblIPPS')\n\n data = {\n 'zoneKey': zone_key,\n 'datetime': india_date.datetime,\n 'production': {\n 'biomass': 0.0,\n 'coal': thermal_value,\n 'gas': gas_value,\n 'hydro': hydro_value,\n 'nuclear': 0.0,\n 'oil': 0.0,\n 'solar': solar_value,\n 'wind': wind_value,\n 'geothermal': 0.0,\n 'unknown': round(cgs_value + ipp_value, 2)\n },\n 'storage': {\n 'hydro': 0.0\n },\n 'source': 'core.ap.gov.in',\n }\n\n return data\n\n\ndef fetch_consumption(zone_key='IN-AP', session=None, target_datetime=None, logger=None):\n \"\"\"Fetch Andhra Pradesh consumption\"\"\"\n if target_datetime:\n raise NotImplementedError('This parser is not yet able to parse past dates')\n\n zonekey.assert_zone_key(zone_key, 'IN-AP')\n\n html = web.get_response_soup(zone_key,\n 'https://core.ap.gov.in/CMDashBoard/UserInterface/Power/PowerReport.aspx', session)\n india_date = IN.read_datetime_from_span_id(html, 'lblPowerStatusDate', 'DD-MM-YYYY HH:mm')\n\n demand_value = IN.read_value_from_span_id(html, 'lblGridDemand')\n\n data = {\n 'zoneKey': zone_key,\n 'datetime': india_date.datetime,\n 'consumption': demand_value,\n 'source': 'core.ap.gov.in'\n }\n\n return data\n\n\nif __name__ == '__main__':\n session = Session()\n print(fetch_production('IN-AP', session))\n print(fetch_consumption('IN-AP', session))\n", "path": "parsers/IN_AP.py"}]}
1,142
357
gh_patches_debug_16923
rasdani/github-patches
git_diff
Mailu__Mailu-1130
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Unnecessary assignment on `HOST_WEBMAIL` We came across another piece of garbage: https://github.com/Mailu/Mailu/blob/f3f0c3190be9ab9b53a29c5b0326fc9a4602df46/core/nginx/config.py#L19 https://github.com/Mailu/Mailu/blob/f3f0c3190be9ab9b53a29c5b0326fc9a4602df46/core/nginx/config.py#L22 --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `core/nginx/config.py` Content: ``` 1 #!/usr/bin/python3 2 3 import os 4 import logging as log 5 import sys 6 from socrate import system, conf 7 8 args = os.environ.copy() 9 10 log.basicConfig(stream=sys.stderr, level=args.get("LOG_LEVEL", "WARNING")) 11 12 # Get the first DNS server 13 with open("/etc/resolv.conf") as handle: 14 content = handle.read().split() 15 args["RESOLVER"] = content[content.index("nameserver") + 1] 16 17 args["ADMIN_ADDRESS"] = system.resolve_address(args.get("HOST_ADMIN", "admin")) 18 args["ANTISPAM_ADDRESS"] = system.resolve_address(args.get("HOST_ANTISPAM", "antispam:11334")) 19 args["HOST_WEBMAIL"] = args.get("HOST_WEBMAIL", "webmail") 20 if args["WEBMAIL"] != "none": 21 args["WEBMAIL_ADDRESS"] = system.resolve_address(args.get("HOST_WEBMAIL")) 22 args["HOST_WEBDAV"] = args.get("HOST_WEBDAV", "webdav:5232") 23 if args["WEBDAV"] != "none": 24 args["WEBDAV_ADDRESS"] = system.resolve_address(args.get("HOST_WEBDAV")) 25 26 # TLS configuration 27 cert_name = os.getenv("TLS_CERT_FILENAME", default="cert.pem") 28 keypair_name = os.getenv("TLS_KEYPAIR_FILENAME", default="key.pem") 29 args["TLS"] = { 30 "cert": ("/certs/%s" % cert_name, "/certs/%s" % keypair_name), 31 "letsencrypt": ("/certs/letsencrypt/live/mailu/fullchain.pem", 32 "/certs/letsencrypt/live/mailu/privkey.pem"), 33 "mail": ("/certs/%s" % cert_name, "/certs/%s" % keypair_name), 34 "mail-letsencrypt": ("/certs/letsencrypt/live/mailu/fullchain.pem", 35 "/certs/letsencrypt/live/mailu/privkey.pem"), 36 "notls": None 37 }[args["TLS_FLAVOR"]] 38 39 if args["TLS"] and not all(os.path.exists(file_path) for file_path in args["TLS"]): 40 print("Missing cert or key file, disabling TLS") 41 args["TLS_ERROR"] = "yes" 42 43 # Build final configuration paths 44 conf.jinja("/conf/tls.conf", args, "/etc/nginx/tls.conf") 45 conf.jinja("/conf/proxy.conf", args, "/etc/nginx/proxy.conf") 46 conf.jinja("/conf/nginx.conf", args, "/etc/nginx/nginx.conf") 47 if os.path.exists("/var/run/nginx.pid"): 48 os.system("nginx -s reload") 49 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/core/nginx/config.py b/core/nginx/config.py --- a/core/nginx/config.py +++ b/core/nginx/config.py @@ -16,12 +16,10 @@ args["ADMIN_ADDRESS"] = system.resolve_address(args.get("HOST_ADMIN", "admin")) args["ANTISPAM_ADDRESS"] = system.resolve_address(args.get("HOST_ANTISPAM", "antispam:11334")) -args["HOST_WEBMAIL"] = args.get("HOST_WEBMAIL", "webmail") if args["WEBMAIL"] != "none": - args["WEBMAIL_ADDRESS"] = system.resolve_address(args.get("HOST_WEBMAIL")) -args["HOST_WEBDAV"] = args.get("HOST_WEBDAV", "webdav:5232") + args["WEBMAIL_ADDRESS"] = system.resolve_address(args.get("HOST_WEBMAIL", "webmail")) if args["WEBDAV"] != "none": - args["WEBDAV_ADDRESS"] = system.resolve_address(args.get("HOST_WEBDAV")) + args["WEBDAV_ADDRESS"] = system.resolve_address(args.get("HOST_WEBDAV", "webdav:5232")) # TLS configuration cert_name = os.getenv("TLS_CERT_FILENAME", default="cert.pem")
{"golden_diff": "diff --git a/core/nginx/config.py b/core/nginx/config.py\n--- a/core/nginx/config.py\n+++ b/core/nginx/config.py\n@@ -16,12 +16,10 @@\n \n args[\"ADMIN_ADDRESS\"] = system.resolve_address(args.get(\"HOST_ADMIN\", \"admin\"))\n args[\"ANTISPAM_ADDRESS\"] = system.resolve_address(args.get(\"HOST_ANTISPAM\", \"antispam:11334\"))\n-args[\"HOST_WEBMAIL\"] = args.get(\"HOST_WEBMAIL\", \"webmail\")\n if args[\"WEBMAIL\"] != \"none\":\n- args[\"WEBMAIL_ADDRESS\"] = system.resolve_address(args.get(\"HOST_WEBMAIL\"))\n-args[\"HOST_WEBDAV\"] = args.get(\"HOST_WEBDAV\", \"webdav:5232\")\n+ args[\"WEBMAIL_ADDRESS\"] = system.resolve_address(args.get(\"HOST_WEBMAIL\", \"webmail\"))\n if args[\"WEBDAV\"] != \"none\":\n- args[\"WEBDAV_ADDRESS\"] = system.resolve_address(args.get(\"HOST_WEBDAV\"))\n+ args[\"WEBDAV_ADDRESS\"] = system.resolve_address(args.get(\"HOST_WEBDAV\", \"webdav:5232\"))\n \n # TLS configuration\n cert_name = os.getenv(\"TLS_CERT_FILENAME\", default=\"cert.pem\")\n", "issue": "Unnecessary assignment on `HOST_WEBMAIL`\nWe came across another piece of garbage:\r\n\r\nhttps://github.com/Mailu/Mailu/blob/f3f0c3190be9ab9b53a29c5b0326fc9a4602df46/core/nginx/config.py#L19\r\n\r\nhttps://github.com/Mailu/Mailu/blob/f3f0c3190be9ab9b53a29c5b0326fc9a4602df46/core/nginx/config.py#L22\n", "before_files": [{"content": "#!/usr/bin/python3\n\nimport os\nimport logging as log\nimport sys\nfrom socrate import system, conf\n\nargs = os.environ.copy()\n\nlog.basicConfig(stream=sys.stderr, level=args.get(\"LOG_LEVEL\", \"WARNING\"))\n\n# Get the first DNS server\nwith open(\"/etc/resolv.conf\") as handle:\n content = handle.read().split()\n args[\"RESOLVER\"] = content[content.index(\"nameserver\") + 1]\n\nargs[\"ADMIN_ADDRESS\"] = system.resolve_address(args.get(\"HOST_ADMIN\", \"admin\"))\nargs[\"ANTISPAM_ADDRESS\"] = system.resolve_address(args.get(\"HOST_ANTISPAM\", \"antispam:11334\"))\nargs[\"HOST_WEBMAIL\"] = args.get(\"HOST_WEBMAIL\", \"webmail\")\nif args[\"WEBMAIL\"] != \"none\":\n args[\"WEBMAIL_ADDRESS\"] = system.resolve_address(args.get(\"HOST_WEBMAIL\"))\nargs[\"HOST_WEBDAV\"] = args.get(\"HOST_WEBDAV\", \"webdav:5232\")\nif args[\"WEBDAV\"] != \"none\":\n args[\"WEBDAV_ADDRESS\"] = system.resolve_address(args.get(\"HOST_WEBDAV\"))\n\n# TLS configuration\ncert_name = os.getenv(\"TLS_CERT_FILENAME\", default=\"cert.pem\")\nkeypair_name = os.getenv(\"TLS_KEYPAIR_FILENAME\", default=\"key.pem\")\nargs[\"TLS\"] = {\n \"cert\": (\"/certs/%s\" % cert_name, \"/certs/%s\" % keypair_name),\n \"letsencrypt\": (\"/certs/letsencrypt/live/mailu/fullchain.pem\",\n \"/certs/letsencrypt/live/mailu/privkey.pem\"),\n \"mail\": (\"/certs/%s\" % cert_name, \"/certs/%s\" % keypair_name),\n \"mail-letsencrypt\": (\"/certs/letsencrypt/live/mailu/fullchain.pem\",\n \"/certs/letsencrypt/live/mailu/privkey.pem\"),\n \"notls\": None\n}[args[\"TLS_FLAVOR\"]]\n\nif args[\"TLS\"] and not all(os.path.exists(file_path) for file_path in args[\"TLS\"]):\n print(\"Missing cert or key file, disabling TLS\")\n args[\"TLS_ERROR\"] = \"yes\"\n\n# Build final configuration paths\nconf.jinja(\"/conf/tls.conf\", args, \"/etc/nginx/tls.conf\")\nconf.jinja(\"/conf/proxy.conf\", args, \"/etc/nginx/proxy.conf\")\nconf.jinja(\"/conf/nginx.conf\", args, \"/etc/nginx/nginx.conf\")\nif os.path.exists(\"/var/run/nginx.pid\"):\n os.system(\"nginx -s reload\")\n", "path": "core/nginx/config.py"}], "after_files": [{"content": "#!/usr/bin/python3\n\nimport os\nimport logging as log\nimport sys\nfrom socrate import system, conf\n\nargs = os.environ.copy()\n\nlog.basicConfig(stream=sys.stderr, level=args.get(\"LOG_LEVEL\", \"WARNING\"))\n\n# Get the first DNS server\nwith open(\"/etc/resolv.conf\") as handle:\n content = handle.read().split()\n args[\"RESOLVER\"] = content[content.index(\"nameserver\") + 1]\n\nargs[\"ADMIN_ADDRESS\"] = system.resolve_address(args.get(\"HOST_ADMIN\", \"admin\"))\nargs[\"ANTISPAM_ADDRESS\"] = system.resolve_address(args.get(\"HOST_ANTISPAM\", \"antispam:11334\"))\nif args[\"WEBMAIL\"] != \"none\":\n args[\"WEBMAIL_ADDRESS\"] = system.resolve_address(args.get(\"HOST_WEBMAIL\", \"webmail\"))\nif args[\"WEBDAV\"] != \"none\":\n args[\"WEBDAV_ADDRESS\"] = system.resolve_address(args.get(\"HOST_WEBDAV\", \"webdav:5232\"))\n\n# TLS configuration\ncert_name = os.getenv(\"TLS_CERT_FILENAME\", default=\"cert.pem\")\nkeypair_name = os.getenv(\"TLS_KEYPAIR_FILENAME\", default=\"key.pem\")\nargs[\"TLS\"] = {\n \"cert\": (\"/certs/%s\" % cert_name, \"/certs/%s\" % keypair_name),\n \"letsencrypt\": (\"/certs/letsencrypt/live/mailu/fullchain.pem\",\n \"/certs/letsencrypt/live/mailu/privkey.pem\"),\n \"mail\": (\"/certs/%s\" % cert_name, \"/certs/%s\" % keypair_name),\n \"mail-letsencrypt\": (\"/certs/letsencrypt/live/mailu/fullchain.pem\",\n \"/certs/letsencrypt/live/mailu/privkey.pem\"),\n \"notls\": None\n}[args[\"TLS_FLAVOR\"]]\n\nif args[\"TLS\"] and not all(os.path.exists(file_path) for file_path in args[\"TLS\"]):\n print(\"Missing cert or key file, disabling TLS\")\n args[\"TLS_ERROR\"] = \"yes\"\n\n# Build final configuration paths\nconf.jinja(\"/conf/tls.conf\", args, \"/etc/nginx/tls.conf\")\nconf.jinja(\"/conf/proxy.conf\", args, \"/etc/nginx/proxy.conf\")\nconf.jinja(\"/conf/nginx.conf\", args, \"/etc/nginx/nginx.conf\")\nif os.path.exists(\"/var/run/nginx.pid\"):\n os.system(\"nginx -s reload\")\n", "path": "core/nginx/config.py"}]}
1,027
272
gh_patches_debug_15711
rasdani/github-patches
git_diff
translate__pootle-6087
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Delete a TP from an old style project and the Project page stays cached 1. Create a new TP 2. TP is available 3. Delete TP 4. Project page still shows project listed - though it should be gone 5. Going to supposedly deleted TP and we get 404 We're not expiring cache when a TP is deleted. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `pootle/apps/pootle_revision/receivers.py` Content: ``` 1 # -*- coding: utf-8 -*- 2 # 3 # Copyright (C) Pootle contributors. 4 # 5 # This file is a part of the Pootle project. It is distributed under the GPL3 6 # or later license. See the LICENSE file for a copy of the license and the 7 # AUTHORS file for copyright and authorship information. 8 9 from django.db.models.signals import post_save, pre_delete 10 from django.dispatch import receiver 11 12 from pootle.core.delegate import revision_updater 13 from pootle_app.models import Directory 14 from pootle_data.models import StoreData 15 from pootle_store.models import Store 16 17 18 @receiver(post_save, sender=StoreData) 19 def handle_storedata_save(**kwargs): 20 revision_updater.get(Store)( 21 context=kwargs["instance"].store).update(keys=["stats", "checks"]) 22 23 24 @receiver(post_save, sender=Directory) 25 def handle_directory_save(**kwargs): 26 context = ( 27 kwargs["instance"].parent 28 if kwargs.get("created") 29 else kwargs["instance"]) 30 revision_updater.get(Directory)( 31 context=context).update(keys=["stats", "checks"]) 32 33 34 @receiver(pre_delete, sender=Directory) 35 def handle_directory_delete(**kwargs): 36 revision_updater.get(Directory)( 37 context=kwargs["instance"].parent).update(keys=["stats", "checks"]) 38 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/pootle/apps/pootle_revision/receivers.py b/pootle/apps/pootle_revision/receivers.py --- a/pootle/apps/pootle_revision/receivers.py +++ b/pootle/apps/pootle_revision/receivers.py @@ -13,6 +13,7 @@ from pootle_app.models import Directory from pootle_data.models import StoreData from pootle_store.models import Store +from pootle_translationproject.models import TranslationProject @receiver(post_save, sender=StoreData) @@ -35,3 +36,9 @@ def handle_directory_delete(**kwargs): revision_updater.get(Directory)( context=kwargs["instance"].parent).update(keys=["stats", "checks"]) + + +@receiver(pre_delete, sender=TranslationProject) +def handle_tp_delete(**kwargs): + revision_updater.get(Directory)( + context=kwargs["instance"].directory).update(keys=["stats", "checks"])
{"golden_diff": "diff --git a/pootle/apps/pootle_revision/receivers.py b/pootle/apps/pootle_revision/receivers.py\n--- a/pootle/apps/pootle_revision/receivers.py\n+++ b/pootle/apps/pootle_revision/receivers.py\n@@ -13,6 +13,7 @@\n from pootle_app.models import Directory\n from pootle_data.models import StoreData\n from pootle_store.models import Store\n+from pootle_translationproject.models import TranslationProject\n \n \n @receiver(post_save, sender=StoreData)\n@@ -35,3 +36,9 @@\n def handle_directory_delete(**kwargs):\n revision_updater.get(Directory)(\n context=kwargs[\"instance\"].parent).update(keys=[\"stats\", \"checks\"])\n+\n+\n+@receiver(pre_delete, sender=TranslationProject)\n+def handle_tp_delete(**kwargs):\n+ revision_updater.get(Directory)(\n+ context=kwargs[\"instance\"].directory).update(keys=[\"stats\", \"checks\"])\n", "issue": "Delete a TP from an old style project and the Project page stays cached\n1. Create a new TP\r\n2. TP is available\r\n3. Delete TP\r\n4. Project page still shows project listed - though it should be gone\r\n5. Going to supposedly deleted TP and we get 404\r\n\r\nWe're not expiring cache when a TP is deleted.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom django.db.models.signals import post_save, pre_delete\nfrom django.dispatch import receiver\n\nfrom pootle.core.delegate import revision_updater\nfrom pootle_app.models import Directory\nfrom pootle_data.models import StoreData\nfrom pootle_store.models import Store\n\n\n@receiver(post_save, sender=StoreData)\ndef handle_storedata_save(**kwargs):\n revision_updater.get(Store)(\n context=kwargs[\"instance\"].store).update(keys=[\"stats\", \"checks\"])\n\n\n@receiver(post_save, sender=Directory)\ndef handle_directory_save(**kwargs):\n context = (\n kwargs[\"instance\"].parent\n if kwargs.get(\"created\")\n else kwargs[\"instance\"])\n revision_updater.get(Directory)(\n context=context).update(keys=[\"stats\", \"checks\"])\n\n\n@receiver(pre_delete, sender=Directory)\ndef handle_directory_delete(**kwargs):\n revision_updater.get(Directory)(\n context=kwargs[\"instance\"].parent).update(keys=[\"stats\", \"checks\"])\n", "path": "pootle/apps/pootle_revision/receivers.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom django.db.models.signals import post_save, pre_delete\nfrom django.dispatch import receiver\n\nfrom pootle.core.delegate import revision_updater\nfrom pootle_app.models import Directory\nfrom pootle_data.models import StoreData\nfrom pootle_store.models import Store\nfrom pootle_translationproject.models import TranslationProject\n\n\n@receiver(post_save, sender=StoreData)\ndef handle_storedata_save(**kwargs):\n revision_updater.get(Store)(\n context=kwargs[\"instance\"].store).update(keys=[\"stats\", \"checks\"])\n\n\n@receiver(post_save, sender=Directory)\ndef handle_directory_save(**kwargs):\n context = (\n kwargs[\"instance\"].parent\n if kwargs.get(\"created\")\n else kwargs[\"instance\"])\n revision_updater.get(Directory)(\n context=context).update(keys=[\"stats\", \"checks\"])\n\n\n@receiver(pre_delete, sender=Directory)\ndef handle_directory_delete(**kwargs):\n revision_updater.get(Directory)(\n context=kwargs[\"instance\"].parent).update(keys=[\"stats\", \"checks\"])\n\n\n@receiver(pre_delete, sender=TranslationProject)\ndef handle_tp_delete(**kwargs):\n revision_updater.get(Directory)(\n context=kwargs[\"instance\"].directory).update(keys=[\"stats\", \"checks\"])\n", "path": "pootle/apps/pootle_revision/receivers.py"}]}
686
215
gh_patches_debug_54708
rasdani/github-patches
git_diff
qutebrowser__qutebrowser-4743
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Launching keyhint widget causes 100% usage of one CPU core That's how it was for as long as I can remember, reproducible with all of my hardware (pressing _g_ or _;_ is enough). I don't think that's an intended behavior. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `qutebrowser/misc/keyhintwidget.py` Content: ``` 1 # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: 2 3 # Copyright 2016-2019 Ryan Roden-Corrent (rcorre) <[email protected]> 4 # 5 # This file is part of qutebrowser. 6 # 7 # qutebrowser is free software: you can redistribute it and/or modify 8 # it under the terms of the GNU General Public License as published by 9 # the Free Software Foundation, either version 3 of the License, or 10 # (at your option) any later version. 11 # 12 # qutebrowser is distributed in the hope that it will be useful, 13 # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 # GNU General Public License for more details. 16 # 17 # You should have received a copy of the GNU General Public License 18 # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. 19 20 """Small window that pops up to show hints for possible keystrings. 21 22 When a user inputs a key that forms a partial match, this shows a small window 23 with each possible completion of that keystring and the corresponding command. 24 It is intended to help discoverability of keybindings. 25 """ 26 27 import html 28 import fnmatch 29 import re 30 31 from PyQt5.QtWidgets import QLabel, QSizePolicy 32 from PyQt5.QtCore import pyqtSlot, pyqtSignal, Qt 33 34 from qutebrowser.config import config 35 from qutebrowser.utils import utils, usertypes 36 from qutebrowser.misc import objects 37 from qutebrowser.keyinput import keyutils 38 39 40 class KeyHintView(QLabel): 41 42 """The view showing hints for key bindings based on the current key string. 43 44 Attributes: 45 _win_id: Window ID of parent. 46 47 Signals: 48 update_geometry: Emitted when this widget should be resized/positioned. 49 """ 50 51 STYLESHEET = """ 52 QLabel { 53 font: {{ conf.fonts.keyhint }}; 54 color: {{ conf.colors.keyhint.fg }}; 55 background-color: {{ conf.colors.keyhint.bg }}; 56 padding: 6px; 57 {% if conf.statusbar.position == 'top' %} 58 border-bottom-right-radius: {{ conf.keyhint.radius }}px; 59 {% else %} 60 border-top-right-radius: {{ conf.keyhint.radius }}px; 61 {% endif %} 62 } 63 """ 64 update_geometry = pyqtSignal() 65 66 def __init__(self, win_id, parent=None): 67 super().__init__(parent) 68 self.setTextFormat(Qt.RichText) 69 self._win_id = win_id 70 self.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Minimum) 71 self.hide() 72 self._show_timer = usertypes.Timer(self, 'keyhint_show') 73 self._show_timer.timeout.connect(self.show) 74 config.set_register_stylesheet(self) 75 76 def __repr__(self): 77 return utils.get_repr(self, win_id=self._win_id) 78 79 def showEvent(self, e): 80 """Adjust the keyhint size when it's freshly shown.""" 81 self.update_geometry.emit() 82 super().showEvent(e) 83 84 @pyqtSlot(str) 85 def update_keyhint(self, modename, prefix): 86 """Show hints for the given prefix (or hide if prefix is empty). 87 88 Args: 89 prefix: The current partial keystring. 90 """ 91 countstr, prefix = re.fullmatch(r'(\d*)(.*)', prefix).groups() 92 if not prefix: 93 self._show_timer.stop() 94 self.hide() 95 return 96 97 def blacklisted(keychain): 98 return any(fnmatch.fnmatchcase(keychain, glob) 99 for glob in config.val.keyhint.blacklist) 100 101 def takes_count(cmdstr): 102 """Return true iff this command can take a count argument.""" 103 cmdname = cmdstr.split(' ')[0] 104 cmd = objects.commands.get(cmdname) 105 return cmd and cmd.takes_count() 106 107 bindings_dict = config.key_instance.get_bindings_for(modename) 108 bindings = [(k, v) for (k, v) in sorted(bindings_dict.items()) 109 if keyutils.KeySequence.parse(prefix).matches(k) and 110 not blacklisted(str(k)) and 111 (takes_count(v) or not countstr)] 112 113 if not bindings: 114 self._show_timer.stop() 115 return 116 117 # delay so a quickly typed keychain doesn't display hints 118 self._show_timer.setInterval(config.val.keyhint.delay) 119 self._show_timer.start() 120 suffix_color = html.escape(config.val.colors.keyhint.suffix.fg) 121 122 text = '' 123 for seq, cmd in bindings: 124 text += ( 125 "<tr>" 126 "<td>{}</td>" 127 "<td style='color: {}'>{}</td>" 128 "<td style='padding-left: 2ex'>{}</td>" 129 "</tr>" 130 ).format( 131 html.escape(prefix), 132 suffix_color, 133 html.escape(str(seq)[len(prefix):]), 134 html.escape(cmd) 135 ) 136 text = '<table>{}</table>'.format(text) 137 138 self.setText(text) 139 self.adjustSize() 140 self.update_geometry.emit() 141 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/qutebrowser/misc/keyhintwidget.py b/qutebrowser/misc/keyhintwidget.py --- a/qutebrowser/misc/keyhintwidget.py +++ b/qutebrowser/misc/keyhintwidget.py @@ -71,6 +71,7 @@ self.hide() self._show_timer = usertypes.Timer(self, 'keyhint_show') self._show_timer.timeout.connect(self.show) + self._show_timer.setSingleShot(True) config.set_register_stylesheet(self) def __repr__(self):
{"golden_diff": "diff --git a/qutebrowser/misc/keyhintwidget.py b/qutebrowser/misc/keyhintwidget.py\n--- a/qutebrowser/misc/keyhintwidget.py\n+++ b/qutebrowser/misc/keyhintwidget.py\n@@ -71,6 +71,7 @@\n self.hide()\n self._show_timer = usertypes.Timer(self, 'keyhint_show')\n self._show_timer.timeout.connect(self.show)\n+ self._show_timer.setSingleShot(True)\n config.set_register_stylesheet(self)\n \n def __repr__(self):\n", "issue": "Launching keyhint widget causes 100% usage of one CPU core\nThat's how it was for as long as I can remember, reproducible with all of my hardware (pressing _g_ or _;_ is enough). I don't think that's an intended behavior.\n", "before_files": [{"content": "# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:\n\n# Copyright 2016-2019 Ryan Roden-Corrent (rcorre) <[email protected]>\n#\n# This file is part of qutebrowser.\n#\n# qutebrowser is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# qutebrowser is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.\n\n\"\"\"Small window that pops up to show hints for possible keystrings.\n\nWhen a user inputs a key that forms a partial match, this shows a small window\nwith each possible completion of that keystring and the corresponding command.\nIt is intended to help discoverability of keybindings.\n\"\"\"\n\nimport html\nimport fnmatch\nimport re\n\nfrom PyQt5.QtWidgets import QLabel, QSizePolicy\nfrom PyQt5.QtCore import pyqtSlot, pyqtSignal, Qt\n\nfrom qutebrowser.config import config\nfrom qutebrowser.utils import utils, usertypes\nfrom qutebrowser.misc import objects\nfrom qutebrowser.keyinput import keyutils\n\n\nclass KeyHintView(QLabel):\n\n \"\"\"The view showing hints for key bindings based on the current key string.\n\n Attributes:\n _win_id: Window ID of parent.\n\n Signals:\n update_geometry: Emitted when this widget should be resized/positioned.\n \"\"\"\n\n STYLESHEET = \"\"\"\n QLabel {\n font: {{ conf.fonts.keyhint }};\n color: {{ conf.colors.keyhint.fg }};\n background-color: {{ conf.colors.keyhint.bg }};\n padding: 6px;\n {% if conf.statusbar.position == 'top' %}\n border-bottom-right-radius: {{ conf.keyhint.radius }}px;\n {% else %}\n border-top-right-radius: {{ conf.keyhint.radius }}px;\n {% endif %}\n }\n \"\"\"\n update_geometry = pyqtSignal()\n\n def __init__(self, win_id, parent=None):\n super().__init__(parent)\n self.setTextFormat(Qt.RichText)\n self._win_id = win_id\n self.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Minimum)\n self.hide()\n self._show_timer = usertypes.Timer(self, 'keyhint_show')\n self._show_timer.timeout.connect(self.show)\n config.set_register_stylesheet(self)\n\n def __repr__(self):\n return utils.get_repr(self, win_id=self._win_id)\n\n def showEvent(self, e):\n \"\"\"Adjust the keyhint size when it's freshly shown.\"\"\"\n self.update_geometry.emit()\n super().showEvent(e)\n\n @pyqtSlot(str)\n def update_keyhint(self, modename, prefix):\n \"\"\"Show hints for the given prefix (or hide if prefix is empty).\n\n Args:\n prefix: The current partial keystring.\n \"\"\"\n countstr, prefix = re.fullmatch(r'(\\d*)(.*)', prefix).groups()\n if not prefix:\n self._show_timer.stop()\n self.hide()\n return\n\n def blacklisted(keychain):\n return any(fnmatch.fnmatchcase(keychain, glob)\n for glob in config.val.keyhint.blacklist)\n\n def takes_count(cmdstr):\n \"\"\"Return true iff this command can take a count argument.\"\"\"\n cmdname = cmdstr.split(' ')[0]\n cmd = objects.commands.get(cmdname)\n return cmd and cmd.takes_count()\n\n bindings_dict = config.key_instance.get_bindings_for(modename)\n bindings = [(k, v) for (k, v) in sorted(bindings_dict.items())\n if keyutils.KeySequence.parse(prefix).matches(k) and\n not blacklisted(str(k)) and\n (takes_count(v) or not countstr)]\n\n if not bindings:\n self._show_timer.stop()\n return\n\n # delay so a quickly typed keychain doesn't display hints\n self._show_timer.setInterval(config.val.keyhint.delay)\n self._show_timer.start()\n suffix_color = html.escape(config.val.colors.keyhint.suffix.fg)\n\n text = ''\n for seq, cmd in bindings:\n text += (\n \"<tr>\"\n \"<td>{}</td>\"\n \"<td style='color: {}'>{}</td>\"\n \"<td style='padding-left: 2ex'>{}</td>\"\n \"</tr>\"\n ).format(\n html.escape(prefix),\n suffix_color,\n html.escape(str(seq)[len(prefix):]),\n html.escape(cmd)\n )\n text = '<table>{}</table>'.format(text)\n\n self.setText(text)\n self.adjustSize()\n self.update_geometry.emit()\n", "path": "qutebrowser/misc/keyhintwidget.py"}], "after_files": [{"content": "# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:\n\n# Copyright 2016-2019 Ryan Roden-Corrent (rcorre) <[email protected]>\n#\n# This file is part of qutebrowser.\n#\n# qutebrowser is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# qutebrowser is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.\n\n\"\"\"Small window that pops up to show hints for possible keystrings.\n\nWhen a user inputs a key that forms a partial match, this shows a small window\nwith each possible completion of that keystring and the corresponding command.\nIt is intended to help discoverability of keybindings.\n\"\"\"\n\nimport html\nimport fnmatch\nimport re\n\nfrom PyQt5.QtWidgets import QLabel, QSizePolicy\nfrom PyQt5.QtCore import pyqtSlot, pyqtSignal, Qt\n\nfrom qutebrowser.config import config\nfrom qutebrowser.utils import utils, usertypes\nfrom qutebrowser.misc import objects\nfrom qutebrowser.keyinput import keyutils\n\n\nclass KeyHintView(QLabel):\n\n \"\"\"The view showing hints for key bindings based on the current key string.\n\n Attributes:\n _win_id: Window ID of parent.\n\n Signals:\n update_geometry: Emitted when this widget should be resized/positioned.\n \"\"\"\n\n STYLESHEET = \"\"\"\n QLabel {\n font: {{ conf.fonts.keyhint }};\n color: {{ conf.colors.keyhint.fg }};\n background-color: {{ conf.colors.keyhint.bg }};\n padding: 6px;\n {% if conf.statusbar.position == 'top' %}\n border-bottom-right-radius: {{ conf.keyhint.radius }}px;\n {% else %}\n border-top-right-radius: {{ conf.keyhint.radius }}px;\n {% endif %}\n }\n \"\"\"\n update_geometry = pyqtSignal()\n\n def __init__(self, win_id, parent=None):\n super().__init__(parent)\n self.setTextFormat(Qt.RichText)\n self._win_id = win_id\n self.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Minimum)\n self.hide()\n self._show_timer = usertypes.Timer(self, 'keyhint_show')\n self._show_timer.timeout.connect(self.show)\n self._show_timer.setSingleShot(True)\n config.set_register_stylesheet(self)\n\n def __repr__(self):\n return utils.get_repr(self, win_id=self._win_id)\n\n def showEvent(self, e):\n \"\"\"Adjust the keyhint size when it's freshly shown.\"\"\"\n self.update_geometry.emit()\n super().showEvent(e)\n\n @pyqtSlot(str)\n def update_keyhint(self, modename, prefix):\n \"\"\"Show hints for the given prefix (or hide if prefix is empty).\n\n Args:\n prefix: The current partial keystring.\n \"\"\"\n countstr, prefix = re.fullmatch(r'(\\d*)(.*)', prefix).groups()\n if not prefix:\n self._show_timer.stop()\n self.hide()\n return\n\n def blacklisted(keychain):\n return any(fnmatch.fnmatchcase(keychain, glob)\n for glob in config.val.keyhint.blacklist)\n\n def takes_count(cmdstr):\n \"\"\"Return true iff this command can take a count argument.\"\"\"\n cmdname = cmdstr.split(' ')[0]\n cmd = objects.commands.get(cmdname)\n return cmd and cmd.takes_count()\n\n bindings_dict = config.key_instance.get_bindings_for(modename)\n bindings = [(k, v) for (k, v) in sorted(bindings_dict.items())\n if keyutils.KeySequence.parse(prefix).matches(k) and\n not blacklisted(str(k)) and\n (takes_count(v) or not countstr)]\n\n if not bindings:\n self._show_timer.stop()\n return\n\n # delay so a quickly typed keychain doesn't display hints\n self._show_timer.setInterval(config.val.keyhint.delay)\n self._show_timer.start()\n suffix_color = html.escape(config.val.colors.keyhint.suffix.fg)\n\n text = ''\n for seq, cmd in bindings:\n text += (\n \"<tr>\"\n \"<td>{}</td>\"\n \"<td style='color: {}'>{}</td>\"\n \"<td style='padding-left: 2ex'>{}</td>\"\n \"</tr>\"\n ).format(\n html.escape(prefix),\n suffix_color,\n html.escape(str(seq)[len(prefix):]),\n html.escape(cmd)\n )\n text = '<table>{}</table>'.format(text)\n\n self.setText(text)\n self.adjustSize()\n self.update_geometry.emit()\n", "path": "qutebrowser/misc/keyhintwidget.py"}]}
1,719
113
gh_patches_debug_11218
rasdani/github-patches
git_diff
openstates__openstates-scrapers-2984
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- FL failing since at least 2019-06-03 FL has been failing since 2019-06-03 Based on automated runs it appears that FL has not run successfully in 2 days (2019-06-03). ``` 04:01:17 CRITICAL pupa: Session(s) 2009B, 2003C, 2003B, 2002E, 2004A, 2012 Org., 2007D, 1998 Org, 2000A (Jan.), 2007C, 2007A, 2000A (Dec.), 2006 Org., 2000 Org., 2001C, 2005B, 2002D, 2008 Org., 2018 Org., 2003A, 2010 Org., 2004 Org., 2003D, 2007B, 2009A, 2001B, 2014 Org., 2002 Org., 2016 Org., 2010C, 2003E were reported by Florida.get_session_list() but were not found in Florida.legislative_sessions or Florida.ignored_scraped_sessions. loaded Open States pupa settings... fl (scrape, import) bills: {} ``` Visit http://bobsled.openstates.org for more info. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `openstates/fl/__init__.py` Content: ``` 1 # encoding=utf-8 2 import logging 3 from pupa.scrape import Jurisdiction, Organization 4 from .bills import FlBillScraper 5 from .people import FlPersonScraper 6 # from .committees import FlCommitteeScraper 7 # from .events import FlEventScraper 8 from openstates.utils import url_xpath 9 10 logging.getLogger(__name__).addHandler(logging.NullHandler()) 11 12 13 class Florida(Jurisdiction): 14 division_id = "ocd-division/country:us/state:fl" 15 classification = "government" 16 name = "Florida" 17 url = "http://myflorida.com" 18 19 scrapers = { 20 "bills": FlBillScraper, 21 "people": FlPersonScraper, 22 # "committees": FlCommitteeScraper, 23 # "events": FlEventScraper, 24 } 25 legislative_sessions = [ 26 {'name': '2011 Regular Session', 'identifier': '2011', 27 'classification': 'primary'}, 28 {'name': '2012 Regular Session', 'identifier': '2012', 29 'classification': 'primary'}, 30 {'name': '2012 Extraordinary Apportionment Session', 'identifier': '2012B', 31 'classification': 'special'}, 32 {'name': '2013 Regular Session', 'identifier': '2013', 33 'classification': 'primary'}, 34 {'name': '2014 Regular Session', 'identifier': '2014', 35 'classification': 'primary'}, 36 {'name': '2014 Special Session A', 37 'identifier': '2014A', 'classification': 'special'}, 38 # data for the below 39 {'name': '2015 Regular Session', 'identifier': '2015', 40 'classification': 'primary'}, 41 {'name': '2015 Special Session A', 42 'identifier': '2015A', 'classification': 'special'}, 43 {'name': '2015 Special Session B', 44 'identifier': '2015B', 'classification': 'special'}, 45 {'name': '2015 Special Session C', 46 'identifier': '2015C', 'classification': 'special'}, 47 {'name': '2016 Regular Session', 'identifier': '2016', 48 'classification': 'primary'}, 49 {'name': '2017 Regular Session', 'identifier': '2017', 'classification': 'primary', 50 'start_date': '2017-03-07', 'end_date': '2017-05-05'}, 51 {'name': '2017 Special Session A', 52 'identifier': '2017A', 'classification': 'special'}, 53 {'name': '2018 Regular Session', 'identifier': '2018', 'classification': 'primary', 54 'start_date': '2018-01-08', 'end_date': '2018-03-09'}, 55 {'name': '2019 Regular Session', 'identifier': '2019', 'classification': 'primary', 56 'start_date': '2019-03-05', 'end_date': '2019-05-03'}, 57 ] 58 ignored_scraped_sessions = [ 59 *(str(each) for each in range(1997, 2010)), 60 '2010', '2010A', '2010O', 61 '2012O', 62 '2014O', 63 '2016O', 64 '2018O', 65 ] 66 67 def get_organizations(self): 68 legis = Organization(name="Florida Legislature", 69 classification="legislature") 70 71 upper = Organization( 72 'Florida Senate', classification='upper', parent_id=legis._id) 73 lower = Organization('Florida House of Representatives', classification='lower', 74 parent_id=legis._id) 75 76 yield legis 77 yield upper 78 yield lower 79 80 def get_session_list(self): 81 return url_xpath('http://flsenate.gov', '//option/text()') 82 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/openstates/fl/__init__.py b/openstates/fl/__init__.py --- a/openstates/fl/__init__.py +++ b/openstates/fl/__init__.py @@ -62,6 +62,37 @@ '2014O', '2016O', '2018O', + '2018 Org.', + '2016 Org.', + '2014 Org.', + '2012 Org.', + '2010 Org.', + '2010C', + '2009B', + '2009A', + '2008 Org.', + '2007D', + '2007C', + '2007B', + '2007A', + '2006 Org.', + '2005B', + '2004A', + '2004 Org.', + '2003E', + '2003D', + '2003C', + '2003B', + '2003A', + '2002E', + '2002D', + '2002 Org.', + '2001C', + '2001B', + '2000A (Jan.)', + '2000A (Dec.)', + '2000 Org.', + '1998 Org', ] def get_organizations(self):
{"golden_diff": "diff --git a/openstates/fl/__init__.py b/openstates/fl/__init__.py\n--- a/openstates/fl/__init__.py\n+++ b/openstates/fl/__init__.py\n@@ -62,6 +62,37 @@\n '2014O',\n '2016O',\n '2018O',\n+ '2018 Org.',\n+ '2016 Org.',\n+ '2014 Org.',\n+ '2012 Org.',\n+ '2010 Org.',\n+ '2010C',\n+ '2009B',\n+ '2009A',\n+ '2008 Org.',\n+ '2007D',\n+ '2007C',\n+ '2007B',\n+ '2007A',\n+ '2006 Org.',\n+ '2005B',\n+ '2004A',\n+ '2004 Org.',\n+ '2003E',\n+ '2003D',\n+ '2003C',\n+ '2003B',\n+ '2003A',\n+ '2002E',\n+ '2002D',\n+ '2002 Org.',\n+ '2001C',\n+ '2001B',\n+ '2000A (Jan.)',\n+ '2000A (Dec.)',\n+ '2000 Org.',\n+ '1998 Org',\n ]\n \n def get_organizations(self):\n", "issue": "FL failing since at least 2019-06-03\nFL has been failing since 2019-06-03\n\nBased on automated runs it appears that FL has not run successfully in 2 days (2019-06-03).\n\n\n```\n 04:01:17 CRITICAL pupa: Session(s) 2009B, 2003C, 2003B, 2002E, 2004A, 2012 Org., 2007D, 1998 Org, 2000A (Jan.), 2007C, 2007A, 2000A (Dec.), 2006 Org., 2000 Org., 2001C, 2005B, 2002D, 2008 Org., 2018 Org., 2003A, 2010 Org., 2004 Org., 2003D, 2007B, 2009A, 2001B, 2014 Org., 2002 Org., 2016 Org., 2010C, 2003E were reported by Florida.get_session_list() but were not found in Florida.legislative_sessions or Florida.ignored_scraped_sessions.\nloaded Open States pupa settings...\nfl (scrape, import)\n bills: {}\n```\n\nVisit http://bobsled.openstates.org for more info.\n\n", "before_files": [{"content": "# encoding=utf-8\nimport logging\nfrom pupa.scrape import Jurisdiction, Organization\nfrom .bills import FlBillScraper\nfrom .people import FlPersonScraper\n# from .committees import FlCommitteeScraper\n# from .events import FlEventScraper\nfrom openstates.utils import url_xpath\n\nlogging.getLogger(__name__).addHandler(logging.NullHandler())\n\n\nclass Florida(Jurisdiction):\n division_id = \"ocd-division/country:us/state:fl\"\n classification = \"government\"\n name = \"Florida\"\n url = \"http://myflorida.com\"\n\n scrapers = {\n \"bills\": FlBillScraper,\n \"people\": FlPersonScraper,\n # \"committees\": FlCommitteeScraper,\n # \"events\": FlEventScraper,\n }\n legislative_sessions = [\n {'name': '2011 Regular Session', 'identifier': '2011',\n 'classification': 'primary'},\n {'name': '2012 Regular Session', 'identifier': '2012',\n 'classification': 'primary'},\n {'name': '2012 Extraordinary Apportionment Session', 'identifier': '2012B',\n 'classification': 'special'},\n {'name': '2013 Regular Session', 'identifier': '2013',\n 'classification': 'primary'},\n {'name': '2014 Regular Session', 'identifier': '2014',\n 'classification': 'primary'},\n {'name': '2014 Special Session A',\n 'identifier': '2014A', 'classification': 'special'},\n # data for the below\n {'name': '2015 Regular Session', 'identifier': '2015',\n 'classification': 'primary'},\n {'name': '2015 Special Session A',\n 'identifier': '2015A', 'classification': 'special'},\n {'name': '2015 Special Session B',\n 'identifier': '2015B', 'classification': 'special'},\n {'name': '2015 Special Session C',\n 'identifier': '2015C', 'classification': 'special'},\n {'name': '2016 Regular Session', 'identifier': '2016',\n 'classification': 'primary'},\n {'name': '2017 Regular Session', 'identifier': '2017', 'classification': 'primary',\n 'start_date': '2017-03-07', 'end_date': '2017-05-05'},\n {'name': '2017 Special Session A',\n 'identifier': '2017A', 'classification': 'special'},\n {'name': '2018 Regular Session', 'identifier': '2018', 'classification': 'primary',\n 'start_date': '2018-01-08', 'end_date': '2018-03-09'},\n {'name': '2019 Regular Session', 'identifier': '2019', 'classification': 'primary',\n 'start_date': '2019-03-05', 'end_date': '2019-05-03'},\n ]\n ignored_scraped_sessions = [\n *(str(each) for each in range(1997, 2010)),\n '2010', '2010A', '2010O',\n '2012O',\n '2014O',\n '2016O',\n '2018O',\n ]\n\n def get_organizations(self):\n legis = Organization(name=\"Florida Legislature\",\n classification=\"legislature\")\n\n upper = Organization(\n 'Florida Senate', classification='upper', parent_id=legis._id)\n lower = Organization('Florida House of Representatives', classification='lower',\n parent_id=legis._id)\n\n yield legis\n yield upper\n yield lower\n\n def get_session_list(self):\n return url_xpath('http://flsenate.gov', '//option/text()')\n", "path": "openstates/fl/__init__.py"}], "after_files": [{"content": "# encoding=utf-8\nimport logging\nfrom pupa.scrape import Jurisdiction, Organization\nfrom .bills import FlBillScraper\nfrom .people import FlPersonScraper\n# from .committees import FlCommitteeScraper\n# from .events import FlEventScraper\nfrom openstates.utils import url_xpath\n\nlogging.getLogger(__name__).addHandler(logging.NullHandler())\n\n\nclass Florida(Jurisdiction):\n division_id = \"ocd-division/country:us/state:fl\"\n classification = \"government\"\n name = \"Florida\"\n url = \"http://myflorida.com\"\n\n scrapers = {\n \"bills\": FlBillScraper,\n \"people\": FlPersonScraper,\n # \"committees\": FlCommitteeScraper,\n # \"events\": FlEventScraper,\n }\n legislative_sessions = [\n {'name': '2011 Regular Session', 'identifier': '2011',\n 'classification': 'primary'},\n {'name': '2012 Regular Session', 'identifier': '2012',\n 'classification': 'primary'},\n {'name': '2012 Extraordinary Apportionment Session', 'identifier': '2012B',\n 'classification': 'special'},\n {'name': '2013 Regular Session', 'identifier': '2013',\n 'classification': 'primary'},\n {'name': '2014 Regular Session', 'identifier': '2014',\n 'classification': 'primary'},\n {'name': '2014 Special Session A',\n 'identifier': '2014A', 'classification': 'special'},\n # data for the below\n {'name': '2015 Regular Session', 'identifier': '2015',\n 'classification': 'primary'},\n {'name': '2015 Special Session A',\n 'identifier': '2015A', 'classification': 'special'},\n {'name': '2015 Special Session B',\n 'identifier': '2015B', 'classification': 'special'},\n {'name': '2015 Special Session C',\n 'identifier': '2015C', 'classification': 'special'},\n {'name': '2016 Regular Session', 'identifier': '2016',\n 'classification': 'primary'},\n {'name': '2017 Regular Session', 'identifier': '2017', 'classification': 'primary',\n 'start_date': '2017-03-07', 'end_date': '2017-05-05'},\n {'name': '2017 Special Session A',\n 'identifier': '2017A', 'classification': 'special'},\n {'name': '2018 Regular Session', 'identifier': '2018', 'classification': 'primary',\n 'start_date': '2018-01-08', 'end_date': '2018-03-09'},\n {'name': '2019 Regular Session', 'identifier': '2019', 'classification': 'primary',\n 'start_date': '2019-03-05', 'end_date': '2019-05-03'},\n ]\n ignored_scraped_sessions = [\n *(str(each) for each in range(1997, 2010)),\n '2010', '2010A', '2010O',\n '2012O',\n '2014O',\n '2016O',\n '2018O',\n '2018 Org.',\n '2016 Org.',\n '2014 Org.',\n '2012 Org.',\n '2010 Org.',\n '2010C',\n '2009B',\n '2009A',\n '2008 Org.',\n '2007D',\n '2007C',\n '2007B',\n '2007A',\n '2006 Org.',\n '2005B',\n '2004A',\n '2004 Org.',\n '2003E',\n '2003D',\n '2003C',\n '2003B',\n '2003A',\n '2002E',\n '2002D',\n '2002 Org.',\n '2001C',\n '2001B',\n '2000A (Jan.)',\n '2000A (Dec.)',\n '2000 Org.',\n '1998 Org',\n ]\n\n def get_organizations(self):\n legis = Organization(name=\"Florida Legislature\",\n classification=\"legislature\")\n\n upper = Organization(\n 'Florida Senate', classification='upper', parent_id=legis._id)\n lower = Organization('Florida House of Representatives', classification='lower',\n parent_id=legis._id)\n\n yield legis\n yield upper\n yield lower\n\n def get_session_list(self):\n return url_xpath('http://flsenate.gov', '//option/text()')\n", "path": "openstates/fl/__init__.py"}]}
1,694
372
gh_patches_debug_51797
rasdani/github-patches
git_diff
HypothesisWorks__hypothesis-1379
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- ImportError: cannot import name canonical_filename Hi, I'm getting an import error on startup: ``` File "/Users/adaszko/repos/fieldaware/fieldaware-venv/lib/python2.7/site-packages/hypothesis/core.py", line 38, in <module> from coverage.files import canonical_filename ImportError: cannot import name canonical_filename ``` I've downloaded https://files.pythonhosted.org/packages/4b/e4/5ebf3220993de03f2120a16d9e91cfd053f4c11ada0cf033f2bfe9683fcf/hypothesis-3.65.0-py2-none-any.whl and the `METADATA` file there specifies dependency on `coverage` without any version number: ``` % grep coverage METADATA Requires-Dist: coverage ``` My local `coverage` is at `3.7.1`. It works if I upgrade `coverage` to `4.4.1`, so I think there's an issue in hypothesis in that it doesn't specify the version bound on `coverage`. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `hypothesis-python/setup.py` Content: ``` 1 # coding=utf-8 2 # 3 # This file is part of Hypothesis, which may be found at 4 # https://github.com/HypothesisWorks/hypothesis-python 5 # 6 # Most of this work is copyright (C) 2013-2018 David R. MacIver 7 # ([email protected]), but it contains contributions by others. See 8 # CONTRIBUTING.rst for a full list of people who may hold copyright, and 9 # consult the git log if you need to determine who owns an individual 10 # contribution. 11 # 12 # This Source Code Form is subject to the terms of the Mozilla Public License, 13 # v. 2.0. If a copy of the MPL was not distributed with this file, You can 14 # obtain one at http://mozilla.org/MPL/2.0/. 15 # 16 # END HEADER 17 18 from __future__ import division, print_function, absolute_import 19 20 import os 21 import sys 22 import warnings 23 24 import setuptools 25 26 27 def local_file(name): 28 return os.path.relpath(os.path.join(os.path.dirname(__file__), name)) 29 30 31 SOURCE = local_file('src') 32 README = local_file('README.rst') 33 34 setuptools_version = tuple(map(int, setuptools.__version__.split('.')[:2])) 35 36 if setuptools_version < (36, 2): 37 # Warning only - very bad if uploading bdist but fine if installing sdist. 38 warnings.warn( 39 'This version of setuptools is too old to correctly store ' 40 'conditional dependencies in binary wheels. For more info, see: ' 41 'https://hynek.me/articles/conditional-python-dependencies/' 42 ) 43 44 45 # Assignment to placate pyflakes. The actual version is from the exec that 46 # follows. 47 __version__ = None 48 49 with open(local_file('src/hypothesis/version.py')) as o: 50 exec(o.read()) 51 52 assert __version__ is not None 53 54 55 extras = { 56 'datetime': ['pytz'], 57 'pytz': ['pytz'], 58 'dateutil': ['python-dateutil'], 59 'fakefactory': ['Faker>=0.7'], 60 'numpy': ['numpy>=1.9.0'], 61 'pytest': ['pytest>=2.8.0'], 62 # We only support Django versions with upstream support - see 63 # https://www.djangoproject.com/download/#supported-versions 64 'django': ['pytz', 'django>=1.11'], 65 } 66 67 extras['faker'] = extras['fakefactory'] 68 extras['all'] = sorted(sum(extras.values(), [])) 69 70 71 install_requires = ['attrs>=16.0.0', 'coverage'] 72 # Using an environment marker on enum34 makes the dependency condition 73 # independent of the build environemnt, which is important for wheels. 74 # https://www.python.org/dev/peps/pep-0345/#environment-markers 75 if sys.version_info[0] < 3 and setuptools_version < (8, 0): 76 # Except really old systems, where we give up and install unconditionally 77 install_requires.append('enum34') 78 else: 79 install_requires.append('enum34; python_version=="2.7"') 80 81 82 setuptools.setup( 83 name='hypothesis', 84 version=__version__, 85 author='David R. MacIver', 86 author_email='[email protected]', 87 packages=setuptools.find_packages(SOURCE), 88 package_dir={'': SOURCE}, 89 package_data={'hypothesis': ['py.typed']}, 90 url=( 91 'https://github.com/HypothesisWorks/hypothesis/' 92 'tree/master/hypothesis-python' 93 ), 94 license='MPL v2', 95 description='A library for property based testing', 96 zip_safe=False, 97 extras_require=extras, 98 install_requires=install_requires, 99 python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', 100 classifiers=[ 101 'Development Status :: 5 - Production/Stable', 102 'Intended Audience :: Developers', 103 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)', 104 'Operating System :: Unix', 105 'Operating System :: POSIX', 106 'Operating System :: Microsoft :: Windows', 107 'Programming Language :: Python', 108 'Programming Language :: Python :: 2.7', 109 'Programming Language :: Python :: 3', 110 'Programming Language :: Python :: 3.4', 111 'Programming Language :: Python :: 3.5', 112 'Programming Language :: Python :: 3.6', 113 'Programming Language :: Python :: Implementation :: CPython', 114 'Programming Language :: Python :: Implementation :: PyPy', 115 'Topic :: Software Development :: Testing', 116 'Framework :: Pytest', 117 ], 118 entry_points={ 119 'pytest11': ['hypothesispytest = hypothesis.extra.pytestplugin'], 120 }, 121 long_description=open(README).read(), 122 ) 123 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/hypothesis-python/setup.py b/hypothesis-python/setup.py --- a/hypothesis-python/setup.py +++ b/hypothesis-python/setup.py @@ -68,7 +68,7 @@ extras['all'] = sorted(sum(extras.values(), [])) -install_requires = ['attrs>=16.0.0', 'coverage'] +install_requires = ['attrs>=16.0.0', 'coverage>=4.0'] # Using an environment marker on enum34 makes the dependency condition # independent of the build environemnt, which is important for wheels. # https://www.python.org/dev/peps/pep-0345/#environment-markers
{"golden_diff": "diff --git a/hypothesis-python/setup.py b/hypothesis-python/setup.py\n--- a/hypothesis-python/setup.py\n+++ b/hypothesis-python/setup.py\n@@ -68,7 +68,7 @@\n extras['all'] = sorted(sum(extras.values(), []))\n \n \n-install_requires = ['attrs>=16.0.0', 'coverage']\n+install_requires = ['attrs>=16.0.0', 'coverage>=4.0']\n # Using an environment marker on enum34 makes the dependency condition\n # independent of the build environemnt, which is important for wheels.\n # https://www.python.org/dev/peps/pep-0345/#environment-markers\n", "issue": "ImportError: cannot import name canonical_filename\nHi, I'm getting an import error on startup:\r\n\r\n```\r\n File \"/Users/adaszko/repos/fieldaware/fieldaware-venv/lib/python2.7/site-packages/hypothesis/core.py\", line 38, in <module>\r\n from coverage.files import canonical_filename\r\nImportError: cannot import name canonical_filename\r\n```\r\n\r\nI've downloaded https://files.pythonhosted.org/packages/4b/e4/5ebf3220993de03f2120a16d9e91cfd053f4c11ada0cf033f2bfe9683fcf/hypothesis-3.65.0-py2-none-any.whl and the `METADATA` file there specifies dependency on `coverage` without any version number:\r\n\r\n```\r\n% grep coverage METADATA\r\nRequires-Dist: coverage\r\n```\r\n\r\nMy local `coverage` is at `3.7.1`. It works if I upgrade `coverage` to `4.4.1`, so I think there's an issue in hypothesis in that it doesn't specify the version bound on `coverage`.\n", "before_files": [{"content": "# coding=utf-8\n#\n# This file is part of Hypothesis, which may be found at\n# https://github.com/HypothesisWorks/hypothesis-python\n#\n# Most of this work is copyright (C) 2013-2018 David R. MacIver\n# ([email protected]), but it contains contributions by others. See\n# CONTRIBUTING.rst for a full list of people who may hold copyright, and\n# consult the git log if you need to determine who owns an individual\n# contribution.\n#\n# This Source Code Form is subject to the terms of the Mozilla Public License,\n# v. 2.0. If a copy of the MPL was not distributed with this file, You can\n# obtain one at http://mozilla.org/MPL/2.0/.\n#\n# END HEADER\n\nfrom __future__ import division, print_function, absolute_import\n\nimport os\nimport sys\nimport warnings\n\nimport setuptools\n\n\ndef local_file(name):\n return os.path.relpath(os.path.join(os.path.dirname(__file__), name))\n\n\nSOURCE = local_file('src')\nREADME = local_file('README.rst')\n\nsetuptools_version = tuple(map(int, setuptools.__version__.split('.')[:2]))\n\nif setuptools_version < (36, 2):\n # Warning only - very bad if uploading bdist but fine if installing sdist.\n warnings.warn(\n 'This version of setuptools is too old to correctly store '\n 'conditional dependencies in binary wheels. For more info, see: '\n 'https://hynek.me/articles/conditional-python-dependencies/'\n )\n\n\n# Assignment to placate pyflakes. The actual version is from the exec that\n# follows.\n__version__ = None\n\nwith open(local_file('src/hypothesis/version.py')) as o:\n exec(o.read())\n\nassert __version__ is not None\n\n\nextras = {\n 'datetime': ['pytz'],\n 'pytz': ['pytz'],\n 'dateutil': ['python-dateutil'],\n 'fakefactory': ['Faker>=0.7'],\n 'numpy': ['numpy>=1.9.0'],\n 'pytest': ['pytest>=2.8.0'],\n # We only support Django versions with upstream support - see\n # https://www.djangoproject.com/download/#supported-versions\n 'django': ['pytz', 'django>=1.11'],\n}\n\nextras['faker'] = extras['fakefactory']\nextras['all'] = sorted(sum(extras.values(), []))\n\n\ninstall_requires = ['attrs>=16.0.0', 'coverage']\n# Using an environment marker on enum34 makes the dependency condition\n# independent of the build environemnt, which is important for wheels.\n# https://www.python.org/dev/peps/pep-0345/#environment-markers\nif sys.version_info[0] < 3 and setuptools_version < (8, 0):\n # Except really old systems, where we give up and install unconditionally\n install_requires.append('enum34')\nelse:\n install_requires.append('enum34; python_version==\"2.7\"')\n\n\nsetuptools.setup(\n name='hypothesis',\n version=__version__,\n author='David R. MacIver',\n author_email='[email protected]',\n packages=setuptools.find_packages(SOURCE),\n package_dir={'': SOURCE},\n package_data={'hypothesis': ['py.typed']},\n url=(\n 'https://github.com/HypothesisWorks/hypothesis/'\n 'tree/master/hypothesis-python'\n ),\n license='MPL v2',\n description='A library for property based testing',\n zip_safe=False,\n extras_require=extras,\n install_requires=install_requires,\n python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',\n 'Operating System :: Unix',\n 'Operating System :: POSIX',\n 'Operating System :: Microsoft :: Windows',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Programming Language :: Python :: Implementation :: PyPy',\n 'Topic :: Software Development :: Testing',\n 'Framework :: Pytest',\n ],\n entry_points={\n 'pytest11': ['hypothesispytest = hypothesis.extra.pytestplugin'],\n },\n long_description=open(README).read(),\n)\n", "path": "hypothesis-python/setup.py"}], "after_files": [{"content": "# coding=utf-8\n#\n# This file is part of Hypothesis, which may be found at\n# https://github.com/HypothesisWorks/hypothesis-python\n#\n# Most of this work is copyright (C) 2013-2018 David R. MacIver\n# ([email protected]), but it contains contributions by others. See\n# CONTRIBUTING.rst for a full list of people who may hold copyright, and\n# consult the git log if you need to determine who owns an individual\n# contribution.\n#\n# This Source Code Form is subject to the terms of the Mozilla Public License,\n# v. 2.0. If a copy of the MPL was not distributed with this file, You can\n# obtain one at http://mozilla.org/MPL/2.0/.\n#\n# END HEADER\n\nfrom __future__ import division, print_function, absolute_import\n\nimport os\nimport sys\nimport warnings\n\nimport setuptools\n\n\ndef local_file(name):\n return os.path.relpath(os.path.join(os.path.dirname(__file__), name))\n\n\nSOURCE = local_file('src')\nREADME = local_file('README.rst')\n\nsetuptools_version = tuple(map(int, setuptools.__version__.split('.')[:2]))\n\nif setuptools_version < (36, 2):\n # Warning only - very bad if uploading bdist but fine if installing sdist.\n warnings.warn(\n 'This version of setuptools is too old to correctly store '\n 'conditional dependencies in binary wheels. For more info, see: '\n 'https://hynek.me/articles/conditional-python-dependencies/'\n )\n\n\n# Assignment to placate pyflakes. The actual version is from the exec that\n# follows.\n__version__ = None\n\nwith open(local_file('src/hypothesis/version.py')) as o:\n exec(o.read())\n\nassert __version__ is not None\n\n\nextras = {\n 'datetime': ['pytz'],\n 'pytz': ['pytz'],\n 'dateutil': ['python-dateutil'],\n 'fakefactory': ['Faker>=0.7'],\n 'numpy': ['numpy>=1.9.0'],\n 'pytest': ['pytest>=2.8.0'],\n # We only support Django versions with upstream support - see\n # https://www.djangoproject.com/download/#supported-versions\n 'django': ['pytz', 'django>=1.11'],\n}\n\nextras['faker'] = extras['fakefactory']\nextras['all'] = sorted(sum(extras.values(), []))\n\n\ninstall_requires = ['attrs>=16.0.0', 'coverage>=4.0']\n# Using an environment marker on enum34 makes the dependency condition\n# independent of the build environemnt, which is important for wheels.\n# https://www.python.org/dev/peps/pep-0345/#environment-markers\nif sys.version_info[0] < 3 and setuptools_version < (8, 0):\n # Except really old systems, where we give up and install unconditionally\n install_requires.append('enum34')\nelse:\n install_requires.append('enum34; python_version==\"2.7\"')\n\n\nsetuptools.setup(\n name='hypothesis',\n version=__version__,\n author='David R. MacIver',\n author_email='[email protected]',\n packages=setuptools.find_packages(SOURCE),\n package_dir={'': SOURCE},\n # package_data={'': ['py.typed']}, # un-comment to release type hints\n url=(\n 'https://github.com/HypothesisWorks/hypothesis/'\n 'tree/master/hypothesis-python'\n ),\n license='MPL v2',\n description='A library for property based testing',\n zip_safe=False,\n extras_require=extras,\n install_requires=install_requires,\n python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',\n 'Operating System :: Unix',\n 'Operating System :: POSIX',\n 'Operating System :: Microsoft :: Windows',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Programming Language :: Python :: Implementation :: PyPy',\n 'Topic :: Software Development :: Testing',\n 'Framework :: Pytest',\n ],\n entry_points={\n 'pytest11': ['hypothesispytest = hypothesis.extra.pytestplugin'],\n },\n long_description=open(README).read(),\n)\n", "path": "hypothesis-python/setup.py"}]}
1,825
151
gh_patches_debug_36606
rasdani/github-patches
git_diff
electricitymaps__electricitymaps-contrib-3442
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- TW production parser down ## Description This is an automatic error report generated for Taiwan (TW). Issues: - No recent data found for `production` parser ## Suggestions - Try running the parser locally using the command `poetry run test_parser TW production` - <a href="https://kibana.electricitymap.org/app/kibana#/discover/10af54f0-0c4a-11e9-85c1-1d63df8c862c?_g=(refreshInterval:('$$hashKey':'object:232',display:'5%20minutes',pause:!f,section:2,value:300000),time:(from:now-24h,mode:quick,to:now))&_a=(columns:!(message,extra.key,level),filters:!(('$state':(store:appState),meta:(alias:!n,disabled:!t,index:'96f67170-0c49-11e9-85c1-1d63df8c862c',key:level,negate:!f,params:(query:ERROR,type:phrase),type:phrase,value:ERROR),query:(match:(level:(query:ERROR,type:phrase)))),('$state':(store:appState),meta:(alias:!n,disabled:!f,index:'96f67170-0c49-11e9-85c1-1d63df8c862c',key:extra.key,negate:!f,params:(query:TW,type:phrase),type:phrase,value:TW),query:(match:(extra.key:(query:TW,type:phrase))))),index:'96f67170-0c49-11e9-85c1-1d63df8c862c',interval:auto,query:(language:lucene,query:''),sort:!('@timestamp',desc))">Explore the runtime logs</a> You can see an overview of all parser issues [here](https://github.com/tmrowco/electricitymap-contrib/wiki/Parser-issues). --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `parsers/TW.py` Content: ``` 1 #!/usr/bin/env python3 2 import arrow 3 import requests 4 import pandas 5 import dateutil 6 7 8 def fetch_production(zone_key='TW', session=None, target_datetime=None, logger=None) -> dict: 9 if target_datetime: 10 raise NotImplementedError('This parser is not yet able to parse past dates') 11 12 url = 'http://www.taipower.com.tw/d006/loadGraph/loadGraph/data/genary.txt' 13 s = session or requests.Session() 14 response = s.get(url) 15 data = response.json() 16 17 dumpDate = data[''] 18 prodData = data['aaData'] 19 20 tz = 'Asia/Taipei' 21 dumpDate = arrow.get(dumpDate, 'YYYY-MM-DD HH:mm').replace(tzinfo=dateutil.tz.gettz(tz)) 22 23 objData = pandas.DataFrame(prodData) 24 25 objData.columns = ['fueltype', 'name', 'capacity', 'output', 'percentage', 26 'additional'] 27 28 objData['fueltype'] = objData.fueltype.str.split('(').str[1] 29 objData['fueltype'] = objData.fueltype.str.split(')').str[0] 30 objData.drop('additional', axis=1, inplace=True) 31 objData.drop('percentage', axis=1, inplace=True) 32 33 objData['capacity'] = pandas.to_numeric(objData['capacity'], errors='coerce') 34 objData['output'] = pandas.to_numeric(objData['output'], errors='coerce') 35 production = pandas.DataFrame(objData.groupby('fueltype').sum()) 36 production.columns = ['capacity', 'output'] 37 38 coal_capacity = production.loc['Coal'].capacity + production.loc['IPP-Coal'].capacity 39 gas_capacity = production.loc['LNG'].capacity + production.loc['IPP-LNG'].capacity 40 oil_capacity = production.loc['Oil'].capacity + production.loc['Diesel'].capacity 41 42 coal_production = production.loc['Coal'].output + production.loc['IPP-Coal'].output 43 gas_production = production.loc['LNG'].output + production.loc['IPP-LNG'].output 44 oil_production = production.loc['Oil'].output + production.loc['Diesel'].output 45 46 # For storage, note that load will be negative, and generation positive. 47 # We require the opposite 48 49 returndata = { 50 'zoneKey': zone_key, 51 'datetime': dumpDate.datetime, 52 'production': { 53 'coal': coal_production, 54 'gas': gas_production, 55 'oil': oil_production, 56 'hydro': production.loc['Hydro'].output, 57 'nuclear': production.loc['Nuclear'].output, 58 'solar': production.loc['Solar'].output, 59 'wind': production.loc['Wind'].output, 60 'unknown': production.loc['Co-Gen'].output 61 }, 62 'capacity': { 63 'coal': coal_capacity, 64 'gas': gas_capacity, 65 'oil': oil_capacity, 66 'hydro': production.loc['Hydro'].capacity, 67 'hydro storage':production.loc['Pumping Gen'].capacity, 68 'nuclear': production.loc['Nuclear'].capacity, 69 'solar': production.loc['Solar'].capacity, 70 'wind': production.loc['Wind'].capacity, 71 'unknown': production.loc['Co-Gen'].capacity 72 }, 73 'storage': { 74 'hydro': -1 * production.loc['Pumping Load'].output - production.loc['Pumping Gen'].output 75 }, 76 'source': 'taipower.com.tw' 77 } 78 79 return returndata 80 81 82 if __name__ == '__main__': 83 print(fetch_production()) 84 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/parsers/TW.py b/parsers/TW.py --- a/parsers/TW.py +++ b/parsers/TW.py @@ -1,8 +1,8 @@ #!/usr/bin/env python3 import arrow -import requests -import pandas import dateutil +import pandas as pd +import requests def fetch_production(zone_key='TW', session=None, target_datetime=None, logger=None) -> dict: @@ -20,21 +20,27 @@ tz = 'Asia/Taipei' dumpDate = arrow.get(dumpDate, 'YYYY-MM-DD HH:mm').replace(tzinfo=dateutil.tz.gettz(tz)) - objData = pandas.DataFrame(prodData) + objData = pd.DataFrame(prodData) - objData.columns = ['fueltype', 'name', 'capacity', 'output', 'percentage', - 'additional'] + columns = ['fueltype', 'additional_1', 'name', 'capacity', 'output', 'percentage', 'additional_2'] + assert len(objData.iloc[0]) == len(columns), "number of input columns changed" + objData.columns = columns objData['fueltype'] = objData.fueltype.str.split('(').str[1] objData['fueltype'] = objData.fueltype.str.split(')').str[0] - objData.drop('additional', axis=1, inplace=True) - objData.drop('percentage', axis=1, inplace=True) + objData.loc[:,['capacity', 'output']] = objData[['capacity', 'output']].apply(pd.to_numeric, errors='coerce') + assert not objData.capacity.isna().all(), "capacity data is entirely NaN - input column order may have changed" + assert not objData.output.isna().all(), "output data is entirely NaN - input column order may have changed" - objData['capacity'] = pandas.to_numeric(objData['capacity'], errors='coerce') - objData['output'] = pandas.to_numeric(objData['output'], errors='coerce') - production = pandas.DataFrame(objData.groupby('fueltype').sum()) + objData.drop(columns=['additional_1', 'name', 'additional_2', 'percentage'], axis=1, inplace=True) + # summing because items in returned object are for each power plant and operational units + production = pd.DataFrame(objData.groupby('fueltype').sum()) production.columns = ['capacity', 'output'] + # check output values coincide with total capacity by fuel type + check_values = production.output <= production.capacity + assert check_values.loc[~check_values.index.isin(["Co-Gen"])].all(), "output > capacity" # HACK: Co-Gen capacity is underestimated + coal_capacity = production.loc['Coal'].capacity + production.loc['IPP-Coal'].capacity gas_capacity = production.loc['LNG'].capacity + production.loc['IPP-LNG'].capacity oil_capacity = production.loc['Oil'].capacity + production.loc['Diesel'].capacity
{"golden_diff": "diff --git a/parsers/TW.py b/parsers/TW.py\n--- a/parsers/TW.py\n+++ b/parsers/TW.py\n@@ -1,8 +1,8 @@\n #!/usr/bin/env python3\n import arrow\n-import requests\n-import pandas\n import dateutil\n+import pandas as pd\n+import requests\n \n \n def fetch_production(zone_key='TW', session=None, target_datetime=None, logger=None) -> dict:\n@@ -20,21 +20,27 @@\n tz = 'Asia/Taipei'\n dumpDate = arrow.get(dumpDate, 'YYYY-MM-DD HH:mm').replace(tzinfo=dateutil.tz.gettz(tz))\n \n- objData = pandas.DataFrame(prodData)\n+ objData = pd.DataFrame(prodData)\n \n- objData.columns = ['fueltype', 'name', 'capacity', 'output', 'percentage',\n- 'additional']\n+ columns = ['fueltype', 'additional_1', 'name', 'capacity', 'output', 'percentage', 'additional_2']\n+ assert len(objData.iloc[0]) == len(columns), \"number of input columns changed\"\n+ objData.columns = columns\n \n objData['fueltype'] = objData.fueltype.str.split('(').str[1]\n objData['fueltype'] = objData.fueltype.str.split(')').str[0]\n- objData.drop('additional', axis=1, inplace=True)\n- objData.drop('percentage', axis=1, inplace=True)\n+ objData.loc[:,['capacity', 'output']] = objData[['capacity', 'output']].apply(pd.to_numeric, errors='coerce')\n+ assert not objData.capacity.isna().all(), \"capacity data is entirely NaN - input column order may have changed\"\n+ assert not objData.output.isna().all(), \"output data is entirely NaN - input column order may have changed\"\n \n- objData['capacity'] = pandas.to_numeric(objData['capacity'], errors='coerce')\n- objData['output'] = pandas.to_numeric(objData['output'], errors='coerce')\n- production = pandas.DataFrame(objData.groupby('fueltype').sum())\n+ objData.drop(columns=['additional_1', 'name', 'additional_2', 'percentage'], axis=1, inplace=True)\n+ # summing because items in returned object are for each power plant and operational units\n+ production = pd.DataFrame(objData.groupby('fueltype').sum())\n production.columns = ['capacity', 'output']\n \n+ # check output values coincide with total capacity by fuel type\n+ check_values = production.output <= production.capacity\n+ assert check_values.loc[~check_values.index.isin([\"Co-Gen\"])].all(), \"output > capacity\" # HACK: Co-Gen capacity is underestimated\n+\n coal_capacity = production.loc['Coal'].capacity + production.loc['IPP-Coal'].capacity\n gas_capacity = production.loc['LNG'].capacity + production.loc['IPP-LNG'].capacity\n oil_capacity = production.loc['Oil'].capacity + production.loc['Diesel'].capacity\n", "issue": "TW production parser down\n## Description\n\nThis is an automatic error report generated for Taiwan (TW).\n\nIssues:\n- No recent data found for `production` parser\n\n## Suggestions\n- Try running the parser locally using the command `poetry run test_parser TW production`\n- <a href=\"https://kibana.electricitymap.org/app/kibana#/discover/10af54f0-0c4a-11e9-85c1-1d63df8c862c?_g=(refreshInterval:('$$hashKey':'object:232',display:'5%20minutes',pause:!f,section:2,value:300000),time:(from:now-24h,mode:quick,to:now))&_a=(columns:!(message,extra.key,level),filters:!(('$state':(store:appState),meta:(alias:!n,disabled:!t,index:'96f67170-0c49-11e9-85c1-1d63df8c862c',key:level,negate:!f,params:(query:ERROR,type:phrase),type:phrase,value:ERROR),query:(match:(level:(query:ERROR,type:phrase)))),('$state':(store:appState),meta:(alias:!n,disabled:!f,index:'96f67170-0c49-11e9-85c1-1d63df8c862c',key:extra.key,negate:!f,params:(query:TW,type:phrase),type:phrase,value:TW),query:(match:(extra.key:(query:TW,type:phrase))))),index:'96f67170-0c49-11e9-85c1-1d63df8c862c',interval:auto,query:(language:lucene,query:''),sort:!('@timestamp',desc))\">Explore the runtime logs</a>\n\nYou can see an overview of all parser issues [here](https://github.com/tmrowco/electricitymap-contrib/wiki/Parser-issues).\n\n", "before_files": [{"content": "#!/usr/bin/env python3\nimport arrow\nimport requests\nimport pandas\nimport dateutil\n\n\ndef fetch_production(zone_key='TW', session=None, target_datetime=None, logger=None) -> dict:\n if target_datetime:\n raise NotImplementedError('This parser is not yet able to parse past dates')\n\n url = 'http://www.taipower.com.tw/d006/loadGraph/loadGraph/data/genary.txt'\n s = session or requests.Session()\n response = s.get(url)\n data = response.json()\n\n dumpDate = data['']\n prodData = data['aaData']\n\n tz = 'Asia/Taipei'\n dumpDate = arrow.get(dumpDate, 'YYYY-MM-DD HH:mm').replace(tzinfo=dateutil.tz.gettz(tz))\n\n objData = pandas.DataFrame(prodData)\n\n objData.columns = ['fueltype', 'name', 'capacity', 'output', 'percentage',\n 'additional']\n\n objData['fueltype'] = objData.fueltype.str.split('(').str[1]\n objData['fueltype'] = objData.fueltype.str.split(')').str[0]\n objData.drop('additional', axis=1, inplace=True)\n objData.drop('percentage', axis=1, inplace=True)\n\n objData['capacity'] = pandas.to_numeric(objData['capacity'], errors='coerce')\n objData['output'] = pandas.to_numeric(objData['output'], errors='coerce')\n production = pandas.DataFrame(objData.groupby('fueltype').sum())\n production.columns = ['capacity', 'output']\n\n coal_capacity = production.loc['Coal'].capacity + production.loc['IPP-Coal'].capacity\n gas_capacity = production.loc['LNG'].capacity + production.loc['IPP-LNG'].capacity\n oil_capacity = production.loc['Oil'].capacity + production.loc['Diesel'].capacity\n\n coal_production = production.loc['Coal'].output + production.loc['IPP-Coal'].output\n gas_production = production.loc['LNG'].output + production.loc['IPP-LNG'].output\n oil_production = production.loc['Oil'].output + production.loc['Diesel'].output\n\n # For storage, note that load will be negative, and generation positive.\n # We require the opposite\n\n returndata = {\n 'zoneKey': zone_key,\n 'datetime': dumpDate.datetime,\n 'production': {\n 'coal': coal_production,\n 'gas': gas_production,\n 'oil': oil_production,\n 'hydro': production.loc['Hydro'].output,\n 'nuclear': production.loc['Nuclear'].output,\n 'solar': production.loc['Solar'].output,\n 'wind': production.loc['Wind'].output,\n 'unknown': production.loc['Co-Gen'].output\n },\n 'capacity': {\n 'coal': coal_capacity,\n 'gas': gas_capacity,\n 'oil': oil_capacity,\n 'hydro': production.loc['Hydro'].capacity,\n 'hydro storage':production.loc['Pumping Gen'].capacity,\n 'nuclear': production.loc['Nuclear'].capacity,\n 'solar': production.loc['Solar'].capacity,\n 'wind': production.loc['Wind'].capacity,\n 'unknown': production.loc['Co-Gen'].capacity\n },\n 'storage': {\n 'hydro': -1 * production.loc['Pumping Load'].output - production.loc['Pumping Gen'].output\n },\n 'source': 'taipower.com.tw'\n }\n\n return returndata\n\n\nif __name__ == '__main__':\n print(fetch_production())\n", "path": "parsers/TW.py"}], "after_files": [{"content": "#!/usr/bin/env python3\nimport arrow\nimport dateutil\nimport pandas as pd\nimport requests\n\n\ndef fetch_production(zone_key='TW', session=None, target_datetime=None, logger=None) -> dict:\n if target_datetime:\n raise NotImplementedError('This parser is not yet able to parse past dates')\n\n url = 'http://www.taipower.com.tw/d006/loadGraph/loadGraph/data/genary.txt'\n s = session or requests.Session()\n response = s.get(url)\n data = response.json()\n\n dumpDate = data['']\n prodData = data['aaData']\n\n tz = 'Asia/Taipei'\n dumpDate = arrow.get(dumpDate, 'YYYY-MM-DD HH:mm').replace(tzinfo=dateutil.tz.gettz(tz))\n\n objData = pd.DataFrame(prodData)\n\n columns = ['fueltype', 'additional_1', 'name', 'capacity', 'output', 'percentage', 'additional_2']\n assert len(objData.iloc[0]) == len(columns), \"number of input columns changed\"\n objData.columns = columns\n\n objData['fueltype'] = objData.fueltype.str.split('(').str[1]\n objData['fueltype'] = objData.fueltype.str.split(')').str[0]\n objData.loc[:,['capacity', 'output']] = objData[['capacity', 'output']].apply(pd.to_numeric, errors='coerce')\n assert not objData.capacity.isna().all(), \"capacity data is entirely NaN - input column order may have changed\"\n assert not objData.output.isna().all(), \"output data is entirely NaN - input column order may have changed\"\n\n objData.drop(columns=['additional_1', 'name', 'additional_2', 'percentage'], axis=1, inplace=True)\n # summing because items in returned object are for each power plant and operational units\n production = pd.DataFrame(objData.groupby('fueltype').sum())\n production.columns = ['capacity', 'output']\n\n # check output values coincide with total capacity by fuel type\n check_values = production.output <= production.capacity\n assert check_values.loc[~check_values.index.isin([\"Co-Gen\"])].all(), \"output > capacity\" # HACK: Co-Gen capacity is underestimated\n\n coal_capacity = production.loc['Coal'].capacity + production.loc['IPP-Coal'].capacity\n gas_capacity = production.loc['LNG'].capacity + production.loc['IPP-LNG'].capacity\n oil_capacity = production.loc['Oil'].capacity + production.loc['Diesel'].capacity\n\n coal_production = production.loc['Coal'].output + production.loc['IPP-Coal'].output\n gas_production = production.loc['LNG'].output + production.loc['IPP-LNG'].output\n oil_production = production.loc['Oil'].output + production.loc['Diesel'].output\n\n # For storage, note that load will be negative, and generation positive.\n # We require the opposite\n\n returndata = {\n 'zoneKey': zone_key,\n 'datetime': dumpDate.datetime,\n 'production': {\n 'coal': coal_production,\n 'gas': gas_production,\n 'oil': oil_production,\n 'hydro': production.loc['Hydro'].output,\n 'nuclear': production.loc['Nuclear'].output,\n 'solar': production.loc['Solar'].output,\n 'wind': production.loc['Wind'].output,\n 'unknown': production.loc['Co-Gen'].output\n },\n 'capacity': {\n 'coal': coal_capacity,\n 'gas': gas_capacity,\n 'oil': oil_capacity,\n 'hydro': production.loc['Hydro'].capacity,\n 'hydro storage':production.loc['Pumping Gen'].capacity,\n 'nuclear': production.loc['Nuclear'].capacity,\n 'solar': production.loc['Solar'].capacity,\n 'wind': production.loc['Wind'].capacity,\n 'unknown': production.loc['Co-Gen'].capacity\n },\n 'storage': {\n 'hydro': -1 * production.loc['Pumping Load'].output - production.loc['Pumping Gen'].output\n },\n 'source': 'taipower.com.tw'\n }\n\n return returndata\n\n\nif __name__ == '__main__':\n print(fetch_production())\n", "path": "parsers/TW.py"}]}
1,655
665
gh_patches_debug_9786
rasdani/github-patches
git_diff
mkdocs__mkdocs-430
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Unable to add Table of Contents to docs When I build a markdown file containing the following information with, `mkdocs build --clean`, mkdocs throws: `AttributeError: 'Markdown' object has no attribute 'toc'`. Adding `[TOC]` like this was working before, but for some reason its throwing an exception now. I'm running version `0.11.1`. Markdown file: ``` markdown For api overview and usages, check out [this page](overview.md). [TOC] Auth ================================================= ## Check if user is registered `POST` `/auth/is_registered` **paramaters** - `email` ## Login `POST` `/auth` **Parameters** - `email` - `password` **Response** The response will be something like this: ``` Stack Trace: ``` bash Traceback (most recent call last): "/usr/local/Cellar/python/2.7.8_2/Frameworks/Python.framework/Versions/2.7/lib/python2.7/threading.py", line 810, in __bootstrap_inner self.run() File "/Users/administrator/dev/meet-web/venv/lib/python2.7/site-packages/watchdog/observers/api.py", line 199, in run self.dispatch_events(self.event_queue, self.timeout) File "/Users/administrator/dev/meet-web/venv/lib/python2.7/site-packages/watchdog/observers/api.py", line 368, in dispatch_events handler.dispatch(event) File "/Users/administrator/dev/meet-web/venv/lib/python2.7/site-packages/watchdog/events.py", line 322, in dispatch self.on_any_event(event) File "/Users/administrator/dev/meet-web/venv/lib/python2.7/site-packages/mkdocs/serve.py", line 28, in on_any_event build(config, live_server=True) File "/Users/administrator/dev/meet-web/venv/lib/python2.7/site-packages/mkdocs/build.py", line 223, in build build_pages(config) File "/Users/administrator/dev/meet-web/venv/lib/python2.7/site-packages/mkdocs/build.py", line 170, in build_pages extensions=config['markdown_extensions'], strict=config['strict'] File "/Users/administrator/dev/meet-web/venv/lib/python2.7/site-packages/mkdocs/build.py", line 36, in convert_markdown toc_html = md.toc AttributeError: 'Markdown' object has no attribute 'toc' ``` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `setup.py` Content: ``` 1 #!/usr/bin/env python 2 # -*- coding: utf-8 -*- 3 4 from __future__ import print_function 5 from setuptools import setup 6 import re 7 import os 8 import sys 9 10 11 name = 'mkdocs' 12 package = 'mkdocs' 13 description = 'Project documentation with Markdown.' 14 url = 'http://www.mkdocs.org' 15 author = 'Tom Christie' 16 author_email = '[email protected]' 17 license = 'BSD' 18 install_requires = [ 19 'Jinja2>=2.7.1', 20 'Markdown>=2.3.1,<2.5', 21 'PyYAML>=3.10', 22 'watchdog>=0.7.0', 23 'ghp-import>=0.4.1' 24 ] 25 26 long_description = ( 27 "MkDocs is a fast, simple and downright gorgeous static site generator " 28 "that's geared towards building project documentation. Documentation " 29 "source files are written in Markdown, and configured with a single YAML " 30 "configuration file." 31 ) 32 33 34 def get_version(package): 35 """ 36 Return package version as listed in `__version__` in `init.py`. 37 """ 38 init_py = open(os.path.join(package, '__init__.py')).read() 39 return re.search("^__version__ = ['\"]([^'\"]+)['\"]", init_py, re.MULTILINE).group(1) 40 41 42 def get_packages(package): 43 """ 44 Return root package and all sub-packages. 45 """ 46 return [dirpath 47 for dirpath, dirnames, filenames in os.walk(package) 48 if os.path.exists(os.path.join(dirpath, '__init__.py'))] 49 50 51 def get_package_data(package): 52 """ 53 Return all files under the root package, that are not in a 54 package themselves. 55 """ 56 walk = [(dirpath.replace(package + os.sep, '', 1), filenames) 57 for dirpath, dirnames, filenames in os.walk(package) 58 if not os.path.exists(os.path.join(dirpath, '__init__.py'))] 59 60 filepaths = [] 61 for base, filenames in walk: 62 filepaths.extend([os.path.join(base, filename) 63 for filename in filenames]) 64 return {package: filepaths} 65 66 67 if sys.argv[-1] == 'publish': 68 os.system("python setup.py sdist upload") 69 args = {'version': get_version(package)} 70 print("You probably want to also tag the version now:") 71 print(" git tag -a %(version)s -m 'version %(version)s'" % args) 72 print(" git push --tags") 73 sys.exit() 74 75 76 setup( 77 name=name, 78 version=get_version(package), 79 url=url, 80 license=license, 81 description=description, 82 long_description=long_description, 83 author=author, 84 author_email=author_email, 85 packages=get_packages(package), 86 package_data=get_package_data(package), 87 install_requires=install_requires, 88 entry_points={ 89 'console_scripts': [ 90 'mkdocs = mkdocs.main:run_main', 91 ], 92 }, 93 classifiers=[ 94 'Development Status :: 5 - Production/Stable', 95 'Environment :: Console', 96 'Environment :: Web Environment', 97 'Intended Audience :: Developers', 98 'License :: OSI Approved :: BSD License', 99 'Operating System :: OS Independent', 100 'Programming Language :: Python', 101 'Programming Language :: Python :: 2', 102 'Programming Language :: Python :: 2.6', 103 'Programming Language :: Python :: 2.7', 104 'Programming Language :: Python :: 3', 105 'Programming Language :: Python :: 3.3', 106 'Programming Language :: Python :: 3.4', 107 'Topic :: Documentation', 108 'Topic :: Text Processing', 109 ] 110 ) 111 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -7,6 +7,8 @@ import os import sys +PY26 = sys.version_info[:2] == (2, 6) + name = 'mkdocs' package = 'mkdocs' @@ -16,11 +18,11 @@ author_email = '[email protected]' license = 'BSD' install_requires = [ + 'ghp-import>=0.4.1', 'Jinja2>=2.7.1', - 'Markdown>=2.3.1,<2.5', + 'Markdown>=2.3.1,<2.5' if PY26 else 'Markdown>=2.3.1', 'PyYAML>=3.10', 'watchdog>=0.7.0', - 'ghp-import>=0.4.1' ] long_description = (
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -7,6 +7,8 @@\n import os\n import sys\n \n+PY26 = sys.version_info[:2] == (2, 6)\n+\n \n name = 'mkdocs'\n package = 'mkdocs'\n@@ -16,11 +18,11 @@\n author_email = '[email protected]'\n license = 'BSD'\n install_requires = [\n+ 'ghp-import>=0.4.1',\n 'Jinja2>=2.7.1',\n- 'Markdown>=2.3.1,<2.5',\n+ 'Markdown>=2.3.1,<2.5' if PY26 else 'Markdown>=2.3.1',\n 'PyYAML>=3.10',\n 'watchdog>=0.7.0',\n- 'ghp-import>=0.4.1'\n ]\n \n long_description = (\n", "issue": "Unable to add Table of Contents to docs\nWhen I build a markdown file containing the following information with, `mkdocs build --clean`, mkdocs throws: `AttributeError: 'Markdown' object has no attribute 'toc'`. Adding `[TOC]` like this was working before, but for some reason its throwing an exception now. I'm running version `0.11.1`.\n\nMarkdown file:\n\n``` markdown\nFor api overview and usages, check out [this page](overview.md).\n\n[TOC]\n\nAuth\n=================================================\n\n## Check if user is registered\n\n`POST` `/auth/is_registered`\n\n**paramaters**\n\n- `email`\n\n## Login\n\n`POST` `/auth`\n\n**Parameters**\n\n- `email`\n- `password`\n\n**Response**\n\nThe response will be something like this:\n```\n\nStack Trace:\n\n``` bash\nTraceback (most recent call last):\n\"/usr/local/Cellar/python/2.7.8_2/Frameworks/Python.framework/Versions/2.7/lib/python2.7/threading.py\", line 810, in __bootstrap_inner\n self.run()\n File \"/Users/administrator/dev/meet-web/venv/lib/python2.7/site-packages/watchdog/observers/api.py\", line 199, in run\n self.dispatch_events(self.event_queue, self.timeout)\n File \"/Users/administrator/dev/meet-web/venv/lib/python2.7/site-packages/watchdog/observers/api.py\", line 368, in dispatch_events\n handler.dispatch(event)\n File \"/Users/administrator/dev/meet-web/venv/lib/python2.7/site-packages/watchdog/events.py\", line 322, in dispatch\n self.on_any_event(event)\n File \"/Users/administrator/dev/meet-web/venv/lib/python2.7/site-packages/mkdocs/serve.py\", line 28, in on_any_event\n build(config, live_server=True)\n File \"/Users/administrator/dev/meet-web/venv/lib/python2.7/site-packages/mkdocs/build.py\", line 223, in build\n build_pages(config)\n File \"/Users/administrator/dev/meet-web/venv/lib/python2.7/site-packages/mkdocs/build.py\", line 170, in build_pages\n extensions=config['markdown_extensions'], strict=config['strict']\n File \"/Users/administrator/dev/meet-web/venv/lib/python2.7/site-packages/mkdocs/build.py\", line 36, in convert_markdown\n toc_html = md.toc\nAttributeError: 'Markdown' object has no attribute 'toc'\n```\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom __future__ import print_function\nfrom setuptools import setup\nimport re\nimport os\nimport sys\n\n\nname = 'mkdocs'\npackage = 'mkdocs'\ndescription = 'Project documentation with Markdown.'\nurl = 'http://www.mkdocs.org'\nauthor = 'Tom Christie'\nauthor_email = '[email protected]'\nlicense = 'BSD'\ninstall_requires = [\n 'Jinja2>=2.7.1',\n 'Markdown>=2.3.1,<2.5',\n 'PyYAML>=3.10',\n 'watchdog>=0.7.0',\n 'ghp-import>=0.4.1'\n]\n\nlong_description = (\n \"MkDocs is a fast, simple and downright gorgeous static site generator \"\n \"that's geared towards building project documentation. Documentation \"\n \"source files are written in Markdown, and configured with a single YAML \"\n \"configuration file.\"\n)\n\n\ndef get_version(package):\n \"\"\"\n Return package version as listed in `__version__` in `init.py`.\n \"\"\"\n init_py = open(os.path.join(package, '__init__.py')).read()\n return re.search(\"^__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", init_py, re.MULTILINE).group(1)\n\n\ndef get_packages(package):\n \"\"\"\n Return root package and all sub-packages.\n \"\"\"\n return [dirpath\n for dirpath, dirnames, filenames in os.walk(package)\n if os.path.exists(os.path.join(dirpath, '__init__.py'))]\n\n\ndef get_package_data(package):\n \"\"\"\n Return all files under the root package, that are not in a\n package themselves.\n \"\"\"\n walk = [(dirpath.replace(package + os.sep, '', 1), filenames)\n for dirpath, dirnames, filenames in os.walk(package)\n if not os.path.exists(os.path.join(dirpath, '__init__.py'))]\n\n filepaths = []\n for base, filenames in walk:\n filepaths.extend([os.path.join(base, filename)\n for filename in filenames])\n return {package: filepaths}\n\n\nif sys.argv[-1] == 'publish':\n os.system(\"python setup.py sdist upload\")\n args = {'version': get_version(package)}\n print(\"You probably want to also tag the version now:\")\n print(\" git tag -a %(version)s -m 'version %(version)s'\" % args)\n print(\" git push --tags\")\n sys.exit()\n\n\nsetup(\n name=name,\n version=get_version(package),\n url=url,\n license=license,\n description=description,\n long_description=long_description,\n author=author,\n author_email=author_email,\n packages=get_packages(package),\n package_data=get_package_data(package),\n install_requires=install_requires,\n entry_points={\n 'console_scripts': [\n 'mkdocs = mkdocs.main:run_main',\n ],\n },\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Topic :: Documentation',\n 'Topic :: Text Processing',\n ]\n)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom __future__ import print_function\nfrom setuptools import setup\nimport re\nimport os\nimport sys\n\nPY26 = sys.version_info[:2] == (2, 6)\n\n\nname = 'mkdocs'\npackage = 'mkdocs'\ndescription = 'Project documentation with Markdown.'\nurl = 'http://www.mkdocs.org'\nauthor = 'Tom Christie'\nauthor_email = '[email protected]'\nlicense = 'BSD'\ninstall_requires = [\n 'ghp-import>=0.4.1',\n 'Jinja2>=2.7.1',\n 'Markdown>=2.3.1,<2.5' if PY26 else 'Markdown>=2.3.1',\n 'PyYAML>=3.10',\n 'watchdog>=0.7.0',\n]\n\nlong_description = (\n \"MkDocs is a fast, simple and downright gorgeous static site generator \"\n \"that's geared towards building project documentation. Documentation \"\n \"source files are written in Markdown, and configured with a single YAML \"\n \"configuration file.\"\n)\n\n\ndef get_version(package):\n \"\"\"\n Return package version as listed in `__version__` in `init.py`.\n \"\"\"\n init_py = open(os.path.join(package, '__init__.py')).read()\n return re.search(\"^__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", init_py, re.MULTILINE).group(1)\n\n\ndef get_packages(package):\n \"\"\"\n Return root package and all sub-packages.\n \"\"\"\n return [dirpath\n for dirpath, dirnames, filenames in os.walk(package)\n if os.path.exists(os.path.join(dirpath, '__init__.py'))]\n\n\ndef get_package_data(package):\n \"\"\"\n Return all files under the root package, that are not in a\n package themselves.\n \"\"\"\n walk = [(dirpath.replace(package + os.sep, '', 1), filenames)\n for dirpath, dirnames, filenames in os.walk(package)\n if not os.path.exists(os.path.join(dirpath, '__init__.py'))]\n\n filepaths = []\n for base, filenames in walk:\n filepaths.extend([os.path.join(base, filename)\n for filename in filenames])\n return {package: filepaths}\n\n\nif sys.argv[-1] == 'publish':\n os.system(\"python setup.py sdist upload\")\n args = {'version': get_version(package)}\n print(\"You probably want to also tag the version now:\")\n print(\" git tag -a %(version)s -m 'version %(version)s'\" % args)\n print(\" git push --tags\")\n sys.exit()\n\n\nsetup(\n name=name,\n version=get_version(package),\n url=url,\n license=license,\n description=description,\n long_description=long_description,\n author=author,\n author_email=author_email,\n packages=get_packages(package),\n package_data=get_package_data(package),\n install_requires=install_requires,\n entry_points={\n 'console_scripts': [\n 'mkdocs = mkdocs.main:run_main',\n ],\n },\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Topic :: Documentation',\n 'Topic :: Text Processing',\n ]\n)\n", "path": "setup.py"}]}
1,822
212
gh_patches_debug_7033
rasdani/github-patches
git_diff
akvo__akvo-rsr-1942
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Organisations list gives timeout ## Test plan The organisations list should not give a timeout. Since this only happened on Live, it is hard to debug. ## Sentry See http://sentry.support.akvo-ops.org/rsr/live/group/742/ --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `akvo/rsr/views/organisation.py` Content: ``` 1 # -*- coding: utf-8 -*- 2 3 """Akvo RSR is covered by the GNU Affero General Public License. 4 5 See more details in the license.txt file located at the root folder of the 6 Akvo RSR module. For additional details on the GNU license please 7 see < http://www.gnu.org/licenses/agpl.html >. 8 """ 9 10 from django.db.models import Prefetch 11 from django.db.models import Count 12 from django.shortcuts import get_object_or_404, render 13 14 from ..filters import location_choices, OrganisationFilter, remove_empty_querydict_items 15 from ..models import Employment, Organisation, Project, ProjectUpdate 16 from ...utils import pagination, filter_query_string 17 from .utils import apply_keywords, org_projects, show_filter_class 18 19 ############################################################################### 20 # Organisation directory 21 ############################################################################### 22 23 24 def _public_projects(): 25 """Return all public projects.""" 26 return Project.objects.public().published().select_related('partners').order_by('-id') 27 28 29 def _page_organisations(page): 30 """Dig out the list or organisations to use.""" 31 projects = org_projects(page.organisation) if page.partner_projects else _public_projects() 32 keyword_projects = apply_keywords(page, projects) 33 return keyword_projects.all_partners() 34 35 36 def _organisation_directory_coll(request): 37 """Dig out and pass correct organisations to the view.""" 38 page = request.rsr_page 39 if not page: 40 return Organisation.objects.all() 41 return _page_organisations(page) 42 43 44 def directory(request): 45 """The Organisation list view.""" 46 qs = remove_empty_querydict_items(request.GET) 47 48 # Set show_filters to "in" if any filter is selected 49 filter_class = show_filter_class(qs, ['location', ]) 50 51 # Yank Organisation collection 52 all_organisations = _organisation_directory_coll(request) 53 54 # Easter egg feature 55 creator_organisations = request.GET.get('creator', False) 56 if creator_organisations: 57 all_organisations = all_organisations.filter(can_create_projects=True) 58 59 f = OrganisationFilter(qs, queryset=all_organisations) 60 61 # Change filter options further when on an Akvo Page 62 if request.rsr_page: 63 # Filter location filter list to only populated locations 64 f.filters['location'].extra['choices'] = location_choices(all_organisations) 65 66 # Build page 67 page = request.GET.get('page') 68 page, paginator, page_range = pagination(page, f.qs.distinct(), 10) 69 70 # Get organisations to be displayed on the map 71 if request.rsr_page and request.rsr_page.all_maps: 72 map_orgs = all_organisations 73 else: 74 map_orgs = page.object_list 75 map_orgs = map_orgs 76 77 # Get related objects of page at once 78 page.object_list = page.object_list.select_related( 79 'primary_location__country', 80 ).annotate( 81 num_employees=Count('employees', distinct=True), 82 num_projects=Count('projects', distinct=True), 83 num_updates=Count('projects__project_updates', distinct=True), 84 ) 85 86 return render(request, 'organisation_directory.html', { 87 'orgs_count': f.qs.distinct().count(), 88 'filter': f, 89 'page': page, 90 'paginator': paginator, 91 'page_range': page_range, 92 'show_filters': filter_class, 93 'q': filter_query_string(qs), 94 'map_organisations': map_orgs, 95 }) 96 97 98 ############################################################################### 99 # Organisation main 100 ############################################################################### 101 102 103 def main(request, organisation_id): 104 """The organisation main view.""" 105 return render(request, 'organisation_main.html', { 106 'organisation': get_object_or_404(Organisation, pk=organisation_id)}) 107 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/akvo/rsr/views/organisation.py b/akvo/rsr/views/organisation.py --- a/akvo/rsr/views/organisation.py +++ b/akvo/rsr/views/organisation.py @@ -77,10 +77,6 @@ # Get related objects of page at once page.object_list = page.object_list.select_related( 'primary_location__country', - ).annotate( - num_employees=Count('employees', distinct=True), - num_projects=Count('projects', distinct=True), - num_updates=Count('projects__project_updates', distinct=True), ) return render(request, 'organisation_directory.html', {
{"golden_diff": "diff --git a/akvo/rsr/views/organisation.py b/akvo/rsr/views/organisation.py\n--- a/akvo/rsr/views/organisation.py\n+++ b/akvo/rsr/views/organisation.py\n@@ -77,10 +77,6 @@\n # Get related objects of page at once\n page.object_list = page.object_list.select_related(\n 'primary_location__country',\n- ).annotate(\n- num_employees=Count('employees', distinct=True),\n- num_projects=Count('projects', distinct=True),\n- num_updates=Count('projects__project_updates', distinct=True),\n )\n \n return render(request, 'organisation_directory.html', {\n", "issue": "Organisations list gives timeout\n## Test plan\n\nThe organisations list should not give a timeout. Since this only happened on Live, it is hard to debug.\n## Sentry\n\nSee http://sentry.support.akvo-ops.org/rsr/live/group/742/\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n\"\"\"Akvo RSR is covered by the GNU Affero General Public License.\n\nSee more details in the license.txt file located at the root folder of the\nAkvo RSR module. For additional details on the GNU license please\nsee < http://www.gnu.org/licenses/agpl.html >.\n\"\"\"\n\nfrom django.db.models import Prefetch\nfrom django.db.models import Count\nfrom django.shortcuts import get_object_or_404, render\n\nfrom ..filters import location_choices, OrganisationFilter, remove_empty_querydict_items\nfrom ..models import Employment, Organisation, Project, ProjectUpdate\nfrom ...utils import pagination, filter_query_string\nfrom .utils import apply_keywords, org_projects, show_filter_class\n\n###############################################################################\n# Organisation directory\n###############################################################################\n\n\ndef _public_projects():\n \"\"\"Return all public projects.\"\"\"\n return Project.objects.public().published().select_related('partners').order_by('-id')\n\n\ndef _page_organisations(page):\n \"\"\"Dig out the list or organisations to use.\"\"\"\n projects = org_projects(page.organisation) if page.partner_projects else _public_projects()\n keyword_projects = apply_keywords(page, projects)\n return keyword_projects.all_partners()\n\n\ndef _organisation_directory_coll(request):\n \"\"\"Dig out and pass correct organisations to the view.\"\"\"\n page = request.rsr_page\n if not page:\n return Organisation.objects.all()\n return _page_organisations(page)\n\n\ndef directory(request):\n \"\"\"The Organisation list view.\"\"\"\n qs = remove_empty_querydict_items(request.GET)\n\n # Set show_filters to \"in\" if any filter is selected\n filter_class = show_filter_class(qs, ['location', ])\n\n # Yank Organisation collection\n all_organisations = _organisation_directory_coll(request)\n\n # Easter egg feature\n creator_organisations = request.GET.get('creator', False)\n if creator_organisations:\n all_organisations = all_organisations.filter(can_create_projects=True)\n\n f = OrganisationFilter(qs, queryset=all_organisations)\n\n # Change filter options further when on an Akvo Page\n if request.rsr_page:\n # Filter location filter list to only populated locations\n f.filters['location'].extra['choices'] = location_choices(all_organisations)\n\n # Build page\n page = request.GET.get('page')\n page, paginator, page_range = pagination(page, f.qs.distinct(), 10)\n\n # Get organisations to be displayed on the map\n if request.rsr_page and request.rsr_page.all_maps:\n map_orgs = all_organisations\n else:\n map_orgs = page.object_list\n map_orgs = map_orgs\n\n # Get related objects of page at once\n page.object_list = page.object_list.select_related(\n 'primary_location__country',\n ).annotate(\n num_employees=Count('employees', distinct=True),\n num_projects=Count('projects', distinct=True),\n num_updates=Count('projects__project_updates', distinct=True),\n )\n\n return render(request, 'organisation_directory.html', {\n 'orgs_count': f.qs.distinct().count(),\n 'filter': f,\n 'page': page,\n 'paginator': paginator,\n 'page_range': page_range,\n 'show_filters': filter_class,\n 'q': filter_query_string(qs),\n 'map_organisations': map_orgs,\n })\n\n\n###############################################################################\n# Organisation main\n###############################################################################\n\n\ndef main(request, organisation_id):\n \"\"\"The organisation main view.\"\"\"\n return render(request, 'organisation_main.html', {\n 'organisation': get_object_or_404(Organisation, pk=organisation_id)})\n", "path": "akvo/rsr/views/organisation.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\n\"\"\"Akvo RSR is covered by the GNU Affero General Public License.\n\nSee more details in the license.txt file located at the root folder of the\nAkvo RSR module. For additional details on the GNU license please\nsee < http://www.gnu.org/licenses/agpl.html >.\n\"\"\"\n\nfrom django.db.models import Prefetch\nfrom django.db.models import Count\nfrom django.shortcuts import get_object_or_404, render\n\nfrom ..filters import location_choices, OrganisationFilter, remove_empty_querydict_items\nfrom ..models import Employment, Organisation, Project, ProjectUpdate\nfrom ...utils import pagination, filter_query_string\nfrom .utils import apply_keywords, org_projects, show_filter_class\n\n###############################################################################\n# Organisation directory\n###############################################################################\n\n\ndef _public_projects():\n \"\"\"Return all public projects.\"\"\"\n return Project.objects.public().published().select_related('partners').order_by('-id')\n\n\ndef _page_organisations(page):\n \"\"\"Dig out the list or organisations to use.\"\"\"\n projects = org_projects(page.organisation) if page.partner_projects else _public_projects()\n keyword_projects = apply_keywords(page, projects)\n return keyword_projects.all_partners()\n\n\ndef _organisation_directory_coll(request):\n \"\"\"Dig out and pass correct organisations to the view.\"\"\"\n page = request.rsr_page\n if not page:\n return Organisation.objects.all()\n return _page_organisations(page)\n\n\ndef directory(request):\n \"\"\"The Organisation list view.\"\"\"\n qs = remove_empty_querydict_items(request.GET)\n\n # Set show_filters to \"in\" if any filter is selected\n filter_class = show_filter_class(qs, ['location', ])\n\n # Yank Organisation collection\n all_organisations = _organisation_directory_coll(request)\n\n # Easter egg feature\n creator_organisations = request.GET.get('creator', False)\n if creator_organisations:\n all_organisations = all_organisations.filter(can_create_projects=True)\n\n f = OrganisationFilter(qs, queryset=all_organisations)\n\n # Change filter options further when on an Akvo Page\n if request.rsr_page:\n # Filter location filter list to only populated locations\n f.filters['location'].extra['choices'] = location_choices(all_organisations)\n\n # Build page\n page = request.GET.get('page')\n page, paginator, page_range = pagination(page, f.qs.distinct(), 10)\n\n # Get organisations to be displayed on the map\n if request.rsr_page and request.rsr_page.all_maps:\n map_orgs = all_organisations\n else:\n map_orgs = page.object_list\n map_orgs = map_orgs\n\n # Get related objects of page at once\n page.object_list = page.object_list.select_related(\n 'primary_location__country',\n )\n\n return render(request, 'organisation_directory.html', {\n 'orgs_count': f.qs.distinct().count(),\n 'filter': f,\n 'page': page,\n 'paginator': paginator,\n 'page_range': page_range,\n 'show_filters': filter_class,\n 'q': filter_query_string(qs),\n 'map_organisations': map_orgs,\n })\n\n\n###############################################################################\n# Organisation main\n###############################################################################\n\n\ndef main(request, organisation_id):\n \"\"\"The organisation main view.\"\"\"\n return render(request, 'organisation_main.html', {\n 'organisation': get_object_or_404(Organisation, pk=organisation_id)})\n", "path": "akvo/rsr/views/organisation.py"}]}
1,319
148
gh_patches_debug_23098
rasdani/github-patches
git_diff
easybuilders__easybuild-framework-4292
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Regression with versionsuffix types Commit https://github.com/easybuilders/easybuild-framework/commit/0e5ba5c858 introduced a check for string-type for `versionsuffix`, while `None` used to be an accepted value for `versionsuffix`. Our hooks replace many version suffixes with `None`. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `easybuild/tools/module_naming_scheme/utilities.py` Content: ``` 1 ## 2 # Copyright 2009-2023 Ghent University 3 # 4 # This file is part of EasyBuild, 5 # originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), 6 # with support of Ghent University (http://ugent.be/hpc), 7 # the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), 8 # Flemish Research Foundation (FWO) (http://www.fwo.be/en) 9 # and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). 10 # 11 # https://github.com/easybuilders/easybuild 12 # 13 # EasyBuild is free software: you can redistribute it and/or modify 14 # it under the terms of the GNU General Public License as published by 15 # the Free Software Foundation v2. 16 # 17 # EasyBuild is distributed in the hope that it will be useful, 18 # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 20 # GNU General Public License for more details. 21 # 22 # You should have received a copy of the GNU General Public License 23 # along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. 24 ## 25 """ 26 Utility functions for implementating module naming schemes. 27 28 Authors: 29 30 * Stijn De Weirdt (Ghent University) 31 * Dries Verdegem (Ghent University) 32 * Kenneth Hoste (Ghent University) 33 * Pieter De Baets (Ghent University) 34 * Jens Timmerman (Ghent University) 35 * Fotis Georgatos (Uni.Lu, NTUA) 36 """ 37 import os 38 import string 39 40 from easybuild.base import fancylogger 41 from easybuild.tools.build_log import EasyBuildError 42 from easybuild.tools.module_naming_scheme.mns import ModuleNamingScheme 43 from easybuild.tools.py2vs3 import string_type 44 from easybuild.tools.toolchain.toolchain import SYSTEM_TOOLCHAIN_NAME, is_system_toolchain 45 from easybuild.tools.utilities import get_subclasses, import_available_modules 46 47 _log = fancylogger.getLogger('module_naming_scheme.utilities', fname=False) 48 49 50 def det_full_ec_version(ec): 51 """ 52 Determine exact install version, based on supplied easyconfig. 53 e.g. 1.2.3-goalf-1.1.0-no-OFED or 1.2.3 (for system toolchains) 54 """ 55 56 ecver = None 57 toolchain = ec.get('toolchain', {'name': SYSTEM_TOOLCHAIN_NAME}) 58 59 # determine main install version based on toolchain 60 if is_system_toolchain(toolchain['name']): 61 ecver = ec['version'] 62 else: 63 ecver = "%s-%s-%s" % (ec['version'], toolchain['name'], toolchain['version']) 64 65 # prepend/append version prefix/suffix 66 versionprefix = ec.get('versionprefix', '') 67 if not isinstance(versionprefix, string_type): 68 raise EasyBuildError("versionprefix value should be a string, found '%s': %s (full spec: %s)", 69 type(versionprefix).__name__, versionprefix, ec) 70 71 versionsuffix = ec.get('versionsuffix', '') 72 if not isinstance(versionsuffix, string_type): 73 raise EasyBuildError("versionsuffix value should be a string, found '%s': %s (full spec: %s)", 74 type(versionsuffix).__name__, versionsuffix, ec) 75 76 ecver = ''.join([x for x in [versionprefix, ecver, versionsuffix] if x]) 77 78 return ecver 79 80 81 def avail_module_naming_schemes(): 82 """ 83 Returns a list of available module naming schemes. 84 """ 85 # all ModuleNamingScheme subclasses available in easybuild.tools.module_naming_scheme namespace are eligible 86 import_available_modules('easybuild.tools.module_naming_scheme') 87 88 # construct name-to-class dict of available module naming scheme 89 avail_mnss = dict([(x.__name__, x) for x in get_subclasses(ModuleNamingScheme)]) 90 91 return avail_mnss 92 93 94 def is_valid_module_name(mod_name): 95 """Check whether the specified value is a valid module name.""" 96 # module name must be a string 97 if not isinstance(mod_name, string_type): 98 _log.warning("Wrong type for module name %s (%s), should be a string" % (mod_name, type(mod_name))) 99 return False 100 # module name must be relative path 101 elif mod_name.startswith(os.path.sep): 102 _log.warning("Module name (%s) should be a relative file path" % mod_name) 103 return False 104 # module name should not be empty 105 elif not len(mod_name) > 0: 106 _log.warning("Module name (%s) should have length > 0." % mod_name) 107 return False 108 else: 109 # check whether module name only contains printable characters, since it's used as a filename 110 # (except for carriage-control characters \r, \x0b and \xoc) 111 invalid_chars = [x for x in mod_name if x not in string.printable or x in '\r\x0b\x0c'] 112 if len(invalid_chars) > 0: 113 _log.warning("Module name %s contains invalid characters: %s" % (mod_name, invalid_chars)) 114 return False 115 _log.debug("Module name %s validated" % mod_name) 116 return True 117 118 119 def det_hidden_modname(modname): 120 """Determine the hidden equivalent of the specified module name.""" 121 moddir = os.path.dirname(modname) 122 modfile = os.path.basename(modname) 123 return os.path.join(moddir, '.%s' % modfile).lstrip(os.path.sep) 124 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/easybuild/tools/module_naming_scheme/utilities.py b/easybuild/tools/module_naming_scheme/utilities.py --- a/easybuild/tools/module_naming_scheme/utilities.py +++ b/easybuild/tools/module_naming_scheme/utilities.py @@ -64,16 +64,16 @@ # prepend/append version prefix/suffix versionprefix = ec.get('versionprefix', '') - if not isinstance(versionprefix, string_type): + if versionprefix and not isinstance(versionprefix, string_type): raise EasyBuildError("versionprefix value should be a string, found '%s': %s (full spec: %s)", type(versionprefix).__name__, versionprefix, ec) versionsuffix = ec.get('versionsuffix', '') - if not isinstance(versionsuffix, string_type): + if versionsuffix and not isinstance(versionsuffix, string_type): raise EasyBuildError("versionsuffix value should be a string, found '%s': %s (full spec: %s)", type(versionsuffix).__name__, versionsuffix, ec) - ecver = ''.join([x for x in [versionprefix, ecver, versionsuffix] if x]) + ecver = ''.join([x for x in [versionprefix or '', ecver, versionsuffix or ''] if x]) return ecver
{"golden_diff": "diff --git a/easybuild/tools/module_naming_scheme/utilities.py b/easybuild/tools/module_naming_scheme/utilities.py\n--- a/easybuild/tools/module_naming_scheme/utilities.py\n+++ b/easybuild/tools/module_naming_scheme/utilities.py\n@@ -64,16 +64,16 @@\n \n # prepend/append version prefix/suffix\n versionprefix = ec.get('versionprefix', '')\n- if not isinstance(versionprefix, string_type):\n+ if versionprefix and not isinstance(versionprefix, string_type):\n raise EasyBuildError(\"versionprefix value should be a string, found '%s': %s (full spec: %s)\",\n type(versionprefix).__name__, versionprefix, ec)\n \n versionsuffix = ec.get('versionsuffix', '')\n- if not isinstance(versionsuffix, string_type):\n+ if versionsuffix and not isinstance(versionsuffix, string_type):\n raise EasyBuildError(\"versionsuffix value should be a string, found '%s': %s (full spec: %s)\",\n type(versionsuffix).__name__, versionsuffix, ec)\n \n- ecver = ''.join([x for x in [versionprefix, ecver, versionsuffix] if x])\n+ ecver = ''.join([x for x in [versionprefix or '', ecver, versionsuffix or ''] if x])\n \n return ecver\n", "issue": "Regression with versionsuffix types\nCommit https://github.com/easybuilders/easybuild-framework/commit/0e5ba5c858\r\nintroduced a check for string-type for `versionsuffix`, while `None` used to be an accepted value for `versionsuffix`. Our hooks replace many version suffixes with `None`. \n", "before_files": [{"content": "##\n# Copyright 2009-2023 Ghent University\n#\n# This file is part of EasyBuild,\n# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),\n# with support of Ghent University (http://ugent.be/hpc),\n# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),\n# Flemish Research Foundation (FWO) (http://www.fwo.be/en)\n# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).\n#\n# https://github.com/easybuilders/easybuild\n#\n# EasyBuild is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation v2.\n#\n# EasyBuild is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.\n##\n\"\"\"\nUtility functions for implementating module naming schemes.\n\nAuthors:\n\n* Stijn De Weirdt (Ghent University)\n* Dries Verdegem (Ghent University)\n* Kenneth Hoste (Ghent University)\n* Pieter De Baets (Ghent University)\n* Jens Timmerman (Ghent University)\n* Fotis Georgatos (Uni.Lu, NTUA)\n\"\"\"\nimport os\nimport string\n\nfrom easybuild.base import fancylogger\nfrom easybuild.tools.build_log import EasyBuildError\nfrom easybuild.tools.module_naming_scheme.mns import ModuleNamingScheme\nfrom easybuild.tools.py2vs3 import string_type\nfrom easybuild.tools.toolchain.toolchain import SYSTEM_TOOLCHAIN_NAME, is_system_toolchain\nfrom easybuild.tools.utilities import get_subclasses, import_available_modules\n\n_log = fancylogger.getLogger('module_naming_scheme.utilities', fname=False)\n\n\ndef det_full_ec_version(ec):\n \"\"\"\n Determine exact install version, based on supplied easyconfig.\n e.g. 1.2.3-goalf-1.1.0-no-OFED or 1.2.3 (for system toolchains)\n \"\"\"\n\n ecver = None\n toolchain = ec.get('toolchain', {'name': SYSTEM_TOOLCHAIN_NAME})\n\n # determine main install version based on toolchain\n if is_system_toolchain(toolchain['name']):\n ecver = ec['version']\n else:\n ecver = \"%s-%s-%s\" % (ec['version'], toolchain['name'], toolchain['version'])\n\n # prepend/append version prefix/suffix\n versionprefix = ec.get('versionprefix', '')\n if not isinstance(versionprefix, string_type):\n raise EasyBuildError(\"versionprefix value should be a string, found '%s': %s (full spec: %s)\",\n type(versionprefix).__name__, versionprefix, ec)\n\n versionsuffix = ec.get('versionsuffix', '')\n if not isinstance(versionsuffix, string_type):\n raise EasyBuildError(\"versionsuffix value should be a string, found '%s': %s (full spec: %s)\",\n type(versionsuffix).__name__, versionsuffix, ec)\n\n ecver = ''.join([x for x in [versionprefix, ecver, versionsuffix] if x])\n\n return ecver\n\n\ndef avail_module_naming_schemes():\n \"\"\"\n Returns a list of available module naming schemes.\n \"\"\"\n # all ModuleNamingScheme subclasses available in easybuild.tools.module_naming_scheme namespace are eligible\n import_available_modules('easybuild.tools.module_naming_scheme')\n\n # construct name-to-class dict of available module naming scheme\n avail_mnss = dict([(x.__name__, x) for x in get_subclasses(ModuleNamingScheme)])\n\n return avail_mnss\n\n\ndef is_valid_module_name(mod_name):\n \"\"\"Check whether the specified value is a valid module name.\"\"\"\n # module name must be a string\n if not isinstance(mod_name, string_type):\n _log.warning(\"Wrong type for module name %s (%s), should be a string\" % (mod_name, type(mod_name)))\n return False\n # module name must be relative path\n elif mod_name.startswith(os.path.sep):\n _log.warning(\"Module name (%s) should be a relative file path\" % mod_name)\n return False\n # module name should not be empty\n elif not len(mod_name) > 0:\n _log.warning(\"Module name (%s) should have length > 0.\" % mod_name)\n return False\n else:\n # check whether module name only contains printable characters, since it's used as a filename\n # (except for carriage-control characters \\r, \\x0b and \\xoc)\n invalid_chars = [x for x in mod_name if x not in string.printable or x in '\\r\\x0b\\x0c']\n if len(invalid_chars) > 0:\n _log.warning(\"Module name %s contains invalid characters: %s\" % (mod_name, invalid_chars))\n return False\n _log.debug(\"Module name %s validated\" % mod_name)\n return True\n\n\ndef det_hidden_modname(modname):\n \"\"\"Determine the hidden equivalent of the specified module name.\"\"\"\n moddir = os.path.dirname(modname)\n modfile = os.path.basename(modname)\n return os.path.join(moddir, '.%s' % modfile).lstrip(os.path.sep)\n", "path": "easybuild/tools/module_naming_scheme/utilities.py"}], "after_files": [{"content": "##\n# Copyright 2009-2023 Ghent University\n#\n# This file is part of EasyBuild,\n# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),\n# with support of Ghent University (http://ugent.be/hpc),\n# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),\n# Flemish Research Foundation (FWO) (http://www.fwo.be/en)\n# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).\n#\n# https://github.com/easybuilders/easybuild\n#\n# EasyBuild is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation v2.\n#\n# EasyBuild is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.\n##\n\"\"\"\nUtility functions for implementating module naming schemes.\n\nAuthors:\n\n* Stijn De Weirdt (Ghent University)\n* Dries Verdegem (Ghent University)\n* Kenneth Hoste (Ghent University)\n* Pieter De Baets (Ghent University)\n* Jens Timmerman (Ghent University)\n* Fotis Georgatos (Uni.Lu, NTUA)\n\"\"\"\nimport os\nimport string\n\nfrom easybuild.base import fancylogger\nfrom easybuild.tools.build_log import EasyBuildError\nfrom easybuild.tools.module_naming_scheme.mns import ModuleNamingScheme\nfrom easybuild.tools.py2vs3 import string_type\nfrom easybuild.tools.toolchain.toolchain import SYSTEM_TOOLCHAIN_NAME, is_system_toolchain\nfrom easybuild.tools.utilities import get_subclasses, import_available_modules\n\n_log = fancylogger.getLogger('module_naming_scheme.utilities', fname=False)\n\n\ndef det_full_ec_version(ec):\n \"\"\"\n Determine exact install version, based on supplied easyconfig.\n e.g. 1.2.3-goalf-1.1.0-no-OFED or 1.2.3 (for system toolchains)\n \"\"\"\n\n ecver = None\n toolchain = ec.get('toolchain', {'name': SYSTEM_TOOLCHAIN_NAME})\n\n # determine main install version based on toolchain\n if is_system_toolchain(toolchain['name']):\n ecver = ec['version']\n else:\n ecver = \"%s-%s-%s\" % (ec['version'], toolchain['name'], toolchain['version'])\n\n # prepend/append version prefix/suffix\n versionprefix = ec.get('versionprefix', '')\n if versionprefix and not isinstance(versionprefix, string_type):\n raise EasyBuildError(\"versionprefix value should be a string, found '%s': %s (full spec: %s)\",\n type(versionprefix).__name__, versionprefix, ec)\n\n versionsuffix = ec.get('versionsuffix', '')\n if versionsuffix and not isinstance(versionsuffix, string_type):\n raise EasyBuildError(\"versionsuffix value should be a string, found '%s': %s (full spec: %s)\",\n type(versionsuffix).__name__, versionsuffix, ec)\n\n ecver = ''.join([x for x in [versionprefix or '', ecver, versionsuffix or ''] if x])\n\n return ecver\n\n\ndef avail_module_naming_schemes():\n \"\"\"\n Returns a list of available module naming schemes.\n \"\"\"\n # all ModuleNamingScheme subclasses available in easybuild.tools.module_naming_scheme namespace are eligible\n import_available_modules('easybuild.tools.module_naming_scheme')\n\n # construct name-to-class dict of available module naming scheme\n avail_mnss = dict([(x.__name__, x) for x in get_subclasses(ModuleNamingScheme)])\n\n return avail_mnss\n\n\ndef is_valid_module_name(mod_name):\n \"\"\"Check whether the specified value is a valid module name.\"\"\"\n # module name must be a string\n if not isinstance(mod_name, string_type):\n _log.warning(\"Wrong type for module name %s (%s), should be a string\" % (mod_name, type(mod_name)))\n return False\n # module name must be relative path\n elif mod_name.startswith(os.path.sep):\n _log.warning(\"Module name (%s) should be a relative file path\" % mod_name)\n return False\n # module name should not be empty\n elif not len(mod_name) > 0:\n _log.warning(\"Module name (%s) should have length > 0.\" % mod_name)\n return False\n else:\n # check whether module name only contains printable characters, since it's used as a filename\n # (except for carriage-control characters \\r, \\x0b and \\xoc)\n invalid_chars = [x for x in mod_name if x not in string.printable or x in '\\r\\x0b\\x0c']\n if len(invalid_chars) > 0:\n _log.warning(\"Module name %s contains invalid characters: %s\" % (mod_name, invalid_chars))\n return False\n _log.debug(\"Module name %s validated\" % mod_name)\n return True\n\n\ndef det_hidden_modname(modname):\n \"\"\"Determine the hidden equivalent of the specified module name.\"\"\"\n moddir = os.path.dirname(modname)\n modfile = os.path.basename(modname)\n return os.path.join(moddir, '.%s' % modfile).lstrip(os.path.sep)\n", "path": "easybuild/tools/module_naming_scheme/utilities.py"}]}
1,826
295
gh_patches_debug_18476
rasdani/github-patches
git_diff
learningequality__kolibri-11433
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Allow site title to be customised ## Overview Allow the site title to be customised; it’s currently hardcoded as ‘Kolibri’. #### Description and outcomes The site title is used in only a few places: the `<title>` of the base page and the ‘unsupported browser’ page, and the name in the PWA manifest. Almost all of the time, the title is overridden by the plugin being used, via vuejs, so users will typically see something like ‘Explore’ or ‘Library’ instead of ‘Kolibri’. The place where the default ‘Kolibri’ title is slightly problematic at the moment is in the PWA plugin: the name of the PWA is set to ‘Kolibri’, and that’s shown much more prominently in the browser’s list of PWA apps, or on the desktop app chooser when trying to run it. For Endless Key in particular, that’s a bit problematic because users will likely try to find the PWA from their desktop by searching for ‘Endless Key’ rather than ‘Kolibri’. So it would be good to either be able to: - Separate the site title from the name of the platform (which will always be Kolibri), and allow the site title to be customised. - Or, specifically set the site title in the configuration for the PWA plugin. The second option is much more self-contained, but doesn’t seem semantically correct to me. The PWA manifest should be reflecting the main site’s configuration. #### Resources - https://developer.mozilla.org/en-US/docs/Web/Manifest/name - https://developer.mozilla.org/en-US/docs/Web/Manifest/short_name #### Accessibility Requirements Having an installed PWA use the name the users will be most familiar with it seems like an accessibility issue, although I have not been approaching it from that angle and don’t know which specific accessibility spec applies here. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `kolibri/core/templatetags/core_tags.py` Content: ``` 1 """ 2 Kolibri template tags 3 ===================== 4 """ 5 from __future__ import absolute_import 6 from __future__ import print_function 7 from __future__ import unicode_literals 8 9 from django import template 10 from django.templatetags.static import static 11 from django.utils.html import format_html 12 13 from kolibri.core.hooks import FrontEndBaseASyncHook 14 from kolibri.core.hooks import FrontEndBaseHeadHook 15 from kolibri.core.hooks import FrontEndBaseSyncHook 16 from kolibri.core.theme_hook import ThemeHook 17 18 register = template.Library() 19 20 21 @register.simple_tag() 22 def frontend_base_assets(): 23 """ 24 This is a script tag for all ``FrontEndAssetHook`` hooks that implement a 25 render_to_html() method - this is used in ``/base.html`` template to 26 populate any Javascript and CSS that should be loaded at page load. 27 28 :return: HTML of script tags to insert into base.html 29 """ 30 return FrontEndBaseSyncHook.html() 31 32 33 @register.simple_tag() 34 def frontend_base_async_assets(): 35 """ 36 This is a script tag for all ``FrontEndAssetHook`` hooks that implement a 37 render_to_html() method - this is used in ``/base.html`` template to 38 populate any Javascript and CSS that should be loaded at page load. 39 40 :return: HTML of script tags to insert into base.html 41 """ 42 return FrontEndBaseASyncHook.html() 43 44 45 @register.simple_tag() 46 def frontend_base_head_markup(): 47 """ 48 This is a script tag for all ``FrontEndBaseHeadHook`` hooks that implement 49 a render_to_html() method - this is used in the ``/base.html`` template to 50 inject arbitrary markup into the ``<head>`` element. 51 52 :return: HTML to insert into head of base.html 53 """ 54 return FrontEndBaseHeadHook.html() 55 56 57 @register.simple_tag() 58 def theme_favicon(): 59 """ 60 Render a favicon link to put in the <head> tag of base.html, if a favicon is 61 provided by the theme. If not, a default will be returned. 62 """ 63 favicon_urls = [ 64 logo["src"] 65 for logo in ThemeHook.get_theme().get("logos", []) 66 if logo.get("content_type", "") == "image/vnd.microsoft.icon" 67 ] 68 69 # Choose the first available .ico file. It's unlikely there's more than 70 # one specified in the theme. 71 favicon_url = favicon_urls[0] if favicon_urls else static("assets/logo.ico") 72 73 return format_html('<link rel="shortcut icon" href="{}">', favicon_url) 74 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/kolibri/core/templatetags/core_tags.py b/kolibri/core/templatetags/core_tags.py --- a/kolibri/core/templatetags/core_tags.py +++ b/kolibri/core/templatetags/core_tags.py @@ -14,6 +14,7 @@ from kolibri.core.hooks import FrontEndBaseHeadHook from kolibri.core.hooks import FrontEndBaseSyncHook from kolibri.core.theme_hook import ThemeHook +from kolibri.utils.translation import ugettext as _ register = template.Library() @@ -71,3 +72,13 @@ favicon_url = favicon_urls[0] if favicon_urls else static("assets/logo.ico") return format_html('<link rel="shortcut icon" href="{}">', favicon_url) + + [email protected]_tag() +def site_title(): + """ + Return the text of the site title, if provided by the theme. If not, the + default will be returned. The site title may be translated, to allow for + transliteration into other alphabets where needed. + """ + return ThemeHook.get_theme().get("siteTitle", _("Kolibri"))
{"golden_diff": "diff --git a/kolibri/core/templatetags/core_tags.py b/kolibri/core/templatetags/core_tags.py\n--- a/kolibri/core/templatetags/core_tags.py\n+++ b/kolibri/core/templatetags/core_tags.py\n@@ -14,6 +14,7 @@\n from kolibri.core.hooks import FrontEndBaseHeadHook\n from kolibri.core.hooks import FrontEndBaseSyncHook\n from kolibri.core.theme_hook import ThemeHook\n+from kolibri.utils.translation import ugettext as _\n \n register = template.Library()\n \n@@ -71,3 +72,13 @@\n favicon_url = favicon_urls[0] if favicon_urls else static(\"assets/logo.ico\")\n \n return format_html('<link rel=\"shortcut icon\" href=\"{}\">', favicon_url)\n+\n+\[email protected]_tag()\n+def site_title():\n+ \"\"\"\n+ Return the text of the site title, if provided by the theme. If not, the\n+ default will be returned. The site title may be translated, to allow for\n+ transliteration into other alphabets where needed.\n+ \"\"\"\n+ return ThemeHook.get_theme().get(\"siteTitle\", _(\"Kolibri\"))\n", "issue": "Allow site title to be customised\n## Overview\r\n\r\nAllow the site title to be customised; it\u2019s currently hardcoded as \u2018Kolibri\u2019.\r\n\r\n#### Description and outcomes\r\n\r\nThe site title is used in only a few places: the `<title>` of the base page and the \u2018unsupported browser\u2019 page, and the name in the PWA manifest.\r\n\r\nAlmost all of the time, the title is overridden by the plugin being used, via vuejs, so users will typically see something like \u2018Explore\u2019 or \u2018Library\u2019 instead of \u2018Kolibri\u2019.\r\n\r\nThe place where the default \u2018Kolibri\u2019 title is slightly problematic at the moment is in the PWA plugin: the name of the PWA is set to \u2018Kolibri\u2019, and that\u2019s shown much more prominently in the browser\u2019s list of PWA apps, or on the desktop app chooser when trying to run it.\r\n\r\nFor Endless Key in particular, that\u2019s a bit problematic because users will likely try to find the PWA from their desktop by searching for \u2018Endless Key\u2019 rather than \u2018Kolibri\u2019.\r\n\r\nSo it would be good to either be able to:\r\n - Separate the site title from the name of the platform (which will always be Kolibri), and allow the site title to be customised.\r\n - Or, specifically set the site title in the configuration for the PWA plugin.\r\n\r\nThe second option is much more self-contained, but doesn\u2019t seem semantically correct to me. The PWA manifest should be reflecting the main site\u2019s configuration.\r\n\r\n#### Resources\r\n\r\n - https://developer.mozilla.org/en-US/docs/Web/Manifest/name\r\n - https://developer.mozilla.org/en-US/docs/Web/Manifest/short_name\r\n\r\n#### Accessibility Requirements\r\n\r\nHaving an installed PWA use the name the users will be most familiar with it seems like an accessibility issue, although I have not been approaching it from that angle and don\u2019t know which specific accessibility spec applies here.\n", "before_files": [{"content": "\"\"\"\nKolibri template tags\n=====================\n\"\"\"\nfrom __future__ import absolute_import\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nfrom django import template\nfrom django.templatetags.static import static\nfrom django.utils.html import format_html\n\nfrom kolibri.core.hooks import FrontEndBaseASyncHook\nfrom kolibri.core.hooks import FrontEndBaseHeadHook\nfrom kolibri.core.hooks import FrontEndBaseSyncHook\nfrom kolibri.core.theme_hook import ThemeHook\n\nregister = template.Library()\n\n\[email protected]_tag()\ndef frontend_base_assets():\n \"\"\"\n This is a script tag for all ``FrontEndAssetHook`` hooks that implement a\n render_to_html() method - this is used in ``/base.html`` template to\n populate any Javascript and CSS that should be loaded at page load.\n\n :return: HTML of script tags to insert into base.html\n \"\"\"\n return FrontEndBaseSyncHook.html()\n\n\[email protected]_tag()\ndef frontend_base_async_assets():\n \"\"\"\n This is a script tag for all ``FrontEndAssetHook`` hooks that implement a\n render_to_html() method - this is used in ``/base.html`` template to\n populate any Javascript and CSS that should be loaded at page load.\n\n :return: HTML of script tags to insert into base.html\n \"\"\"\n return FrontEndBaseASyncHook.html()\n\n\[email protected]_tag()\ndef frontend_base_head_markup():\n \"\"\"\n This is a script tag for all ``FrontEndBaseHeadHook`` hooks that implement\n a render_to_html() method - this is used in the ``/base.html`` template to\n inject arbitrary markup into the ``<head>`` element.\n\n :return: HTML to insert into head of base.html\n \"\"\"\n return FrontEndBaseHeadHook.html()\n\n\[email protected]_tag()\ndef theme_favicon():\n \"\"\"\n Render a favicon link to put in the <head> tag of base.html, if a favicon is\n provided by the theme. If not, a default will be returned.\n \"\"\"\n favicon_urls = [\n logo[\"src\"]\n for logo in ThemeHook.get_theme().get(\"logos\", [])\n if logo.get(\"content_type\", \"\") == \"image/vnd.microsoft.icon\"\n ]\n\n # Choose the first available .ico file. It's unlikely there's more than\n # one specified in the theme.\n favicon_url = favicon_urls[0] if favicon_urls else static(\"assets/logo.ico\")\n\n return format_html('<link rel=\"shortcut icon\" href=\"{}\">', favicon_url)\n", "path": "kolibri/core/templatetags/core_tags.py"}], "after_files": [{"content": "\"\"\"\nKolibri template tags\n=====================\n\"\"\"\nfrom __future__ import absolute_import\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nfrom django import template\nfrom django.templatetags.static import static\nfrom django.utils.html import format_html\n\nfrom kolibri.core.hooks import FrontEndBaseASyncHook\nfrom kolibri.core.hooks import FrontEndBaseHeadHook\nfrom kolibri.core.hooks import FrontEndBaseSyncHook\nfrom kolibri.core.theme_hook import ThemeHook\nfrom kolibri.utils.translation import ugettext as _\n\nregister = template.Library()\n\n\[email protected]_tag()\ndef frontend_base_assets():\n \"\"\"\n This is a script tag for all ``FrontEndAssetHook`` hooks that implement a\n render_to_html() method - this is used in ``/base.html`` template to\n populate any Javascript and CSS that should be loaded at page load.\n\n :return: HTML of script tags to insert into base.html\n \"\"\"\n return FrontEndBaseSyncHook.html()\n\n\[email protected]_tag()\ndef frontend_base_async_assets():\n \"\"\"\n This is a script tag for all ``FrontEndAssetHook`` hooks that implement a\n render_to_html() method - this is used in ``/base.html`` template to\n populate any Javascript and CSS that should be loaded at page load.\n\n :return: HTML of script tags to insert into base.html\n \"\"\"\n return FrontEndBaseASyncHook.html()\n\n\[email protected]_tag()\ndef frontend_base_head_markup():\n \"\"\"\n This is a script tag for all ``FrontEndBaseHeadHook`` hooks that implement\n a render_to_html() method - this is used in the ``/base.html`` template to\n inject arbitrary markup into the ``<head>`` element.\n\n :return: HTML to insert into head of base.html\n \"\"\"\n return FrontEndBaseHeadHook.html()\n\n\[email protected]_tag()\ndef theme_favicon():\n \"\"\"\n Render a favicon link to put in the <head> tag of base.html, if a favicon is\n provided by the theme. If not, a default will be returned.\n \"\"\"\n favicon_urls = [\n logo[\"src\"]\n for logo in ThemeHook.get_theme().get(\"logos\", [])\n if logo.get(\"content_type\", \"\") == \"image/vnd.microsoft.icon\"\n ]\n\n # Choose the first available .ico file. It's unlikely there's more than\n # one specified in the theme.\n favicon_url = favicon_urls[0] if favicon_urls else static(\"assets/logo.ico\")\n\n return format_html('<link rel=\"shortcut icon\" href=\"{}\">', favicon_url)\n\n\[email protected]_tag()\ndef site_title():\n \"\"\"\n Return the text of the site title, if provided by the theme. If not, the\n default will be returned. The site title may be translated, to allow for\n transliteration into other alphabets where needed.\n \"\"\"\n return ThemeHook.get_theme().get(\"siteTitle\", _(\"Kolibri\"))\n", "path": "kolibri/core/templatetags/core_tags.py"}]}
1,350
265
gh_patches_debug_12329
rasdani/github-patches
git_diff
mitmproxy__mitmproxy-2833
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- [Configuration file] keys are not used ##### Steps to reproduce the problem: 1. Create a configuration file at .mitmproxy/config.yaml 2. Set this configuration: mode: "transparent" showhost: true 3. Start mitmproxy using this command : "mitmproxy --conf config.yaml" (I'm on the .mitmproxy folder of course) The process is started but the traffic is not working 4. Start mitmproxy using this command : "mitmproxy -T --host" The process is started and the traffic is working ##### Any other comments? What have you tried so far? I tried to use the ":" and "=" as separator for the YAML file but only the ":" is recognized apparently ##### System information I have the last release of mitmproxy (v2.0.2) Well I've just notice that there is a v3 release, maybe this could help me ? I prefer to create this ticket if someone else has the same issue :) --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `mitmproxy/addons/script.py` Content: ``` 1 import os 2 import importlib.util 3 import importlib.machinery 4 import time 5 import sys 6 import types 7 import typing 8 9 from mitmproxy import addonmanager 10 from mitmproxy import exceptions 11 from mitmproxy import flow 12 from mitmproxy import command 13 from mitmproxy import eventsequence 14 from mitmproxy import ctx 15 16 17 def load_script(path: str) -> types.ModuleType: 18 fullname = "__mitmproxy_script__.{}".format( 19 os.path.splitext(os.path.basename(path))[0] 20 ) 21 # the fullname is not unique among scripts, so if there already is an existing script with said 22 # fullname, remove it. 23 sys.modules.pop(fullname, None) 24 oldpath = sys.path 25 sys.path.insert(0, os.path.dirname(path)) 26 try: 27 loader = importlib.machinery.SourceFileLoader(fullname, path) 28 spec = importlib.util.spec_from_loader(fullname, loader=loader) 29 m = importlib.util.module_from_spec(spec) 30 loader.exec_module(m) 31 if not getattr(m, "name", None): 32 m.name = path # type: ignore 33 return m 34 finally: 35 sys.path[:] = oldpath 36 37 38 class Script: 39 """ 40 An addon that manages a single script. 41 """ 42 ReloadInterval = 2 43 44 def __init__(self, path): 45 self.name = "scriptmanager:" + path 46 self.path = path 47 self.fullpath = os.path.expanduser(path) 48 self.ns = None 49 50 self.last_load = 0 51 self.last_mtime = 0 52 if not os.path.isfile(self.fullpath): 53 raise exceptions.OptionsError("No such script: %s" % path) 54 55 @property 56 def addons(self): 57 return [self.ns] if self.ns else [] 58 59 def tick(self): 60 if time.time() - self.last_load > self.ReloadInterval: 61 try: 62 mtime = os.stat(self.fullpath).st_mtime 63 except FileNotFoundError: 64 scripts = list(ctx.options.scripts) 65 scripts.remove(self.path) 66 ctx.options.update(scripts=scripts) 67 return 68 69 if mtime > self.last_mtime: 70 ctx.log.info("Loading script: %s" % self.path) 71 if self.ns: 72 ctx.master.addons.remove(self.ns) 73 self.ns = None 74 with addonmanager.safecall(): 75 ns = load_script(self.fullpath) 76 ctx.master.addons.register(ns) 77 self.ns = ns 78 if self.ns: 79 # We're already running, so we have to explicitly register and 80 # configure the addon 81 ctx.master.addons.invoke_addon(self.ns, "running") 82 ctx.master.addons.invoke_addon( 83 self.ns, 84 "configure", 85 ctx.options.keys() 86 ) 87 self.last_load = time.time() 88 self.last_mtime = mtime 89 90 91 class ScriptLoader: 92 """ 93 An addon that manages loading scripts from options. 94 """ 95 def __init__(self): 96 self.is_running = False 97 self.addons = [] 98 99 def running(self): 100 self.is_running = True 101 102 @command.command("script.run") 103 def script_run(self, flows: typing.Sequence[flow.Flow], path: str) -> None: 104 """ 105 Run a script on the specified flows. The script is loaded with 106 default options, and all lifecycle events for each flow are 107 simulated. 108 """ 109 try: 110 s = Script(path) 111 l = addonmanager.Loader(ctx.master) 112 ctx.master.addons.invoke_addon(s, "load", l) 113 ctx.master.addons.invoke_addon(s, "configure", ctx.options.keys()) 114 # Script is loaded on the first tick 115 ctx.master.addons.invoke_addon(s, "tick") 116 for f in flows: 117 for evt, arg in eventsequence.iterate(f): 118 ctx.master.addons.invoke_addon(s, evt, arg) 119 except exceptions.OptionsError as e: 120 raise exceptions.CommandError("Error running script: %s" % e) from e 121 122 def configure(self, updated): 123 if "scripts" in updated: 124 for s in ctx.options.scripts: 125 if ctx.options.scripts.count(s) > 1: 126 raise exceptions.OptionsError("Duplicate script: %s" % s) 127 128 for a in self.addons[:]: 129 if a.path not in ctx.options.scripts: 130 ctx.log.info("Un-loading script: %s" % a.name) 131 ctx.master.addons.remove(a) 132 self.addons.remove(a) 133 134 # The machinations below are to ensure that: 135 # - Scripts remain in the same order 136 # - Scripts are not initialized un-necessarily. If only a 137 # script's order in the script list has changed, it is just 138 # moved. 139 140 current = {} 141 for a in self.addons: 142 current[a.path] = a 143 144 ordered = [] 145 newscripts = [] 146 for s in ctx.options.scripts: 147 if s in current: 148 ordered.append(current[s]) 149 else: 150 sc = Script(s) 151 ordered.append(sc) 152 newscripts.append(sc) 153 154 self.addons = ordered 155 156 for s in newscripts: 157 ctx.master.addons.register(s) 158 if self.is_running: 159 # If we're already running, we configure and tell the addon 160 # we're up and running. 161 ctx.master.addons.invoke_addon(s, "running") 162 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/mitmproxy/addons/script.py b/mitmproxy/addons/script.py --- a/mitmproxy/addons/script.py +++ b/mitmproxy/addons/script.py @@ -44,13 +44,15 @@ def __init__(self, path): self.name = "scriptmanager:" + path self.path = path - self.fullpath = os.path.expanduser(path) + self.fullpath = os.path.expanduser( + path.strip("'\" ") + ) self.ns = None self.last_load = 0 self.last_mtime = 0 if not os.path.isfile(self.fullpath): - raise exceptions.OptionsError("No such script: %s" % path) + raise exceptions.OptionsError('No such script: "%s"' % self.fullpath) @property def addons(self):
{"golden_diff": "diff --git a/mitmproxy/addons/script.py b/mitmproxy/addons/script.py\n--- a/mitmproxy/addons/script.py\n+++ b/mitmproxy/addons/script.py\n@@ -44,13 +44,15 @@\n def __init__(self, path):\n self.name = \"scriptmanager:\" + path\n self.path = path\n- self.fullpath = os.path.expanduser(path)\n+ self.fullpath = os.path.expanduser(\n+ path.strip(\"'\\\" \")\n+ )\n self.ns = None\n \n self.last_load = 0\n self.last_mtime = 0\n if not os.path.isfile(self.fullpath):\n- raise exceptions.OptionsError(\"No such script: %s\" % path)\n+ raise exceptions.OptionsError('No such script: \"%s\"' % self.fullpath)\n \n @property\n def addons(self):\n", "issue": "[Configuration file] keys are not used\n##### Steps to reproduce the problem:\r\n\r\n1. Create a configuration file at .mitmproxy/config.yaml\r\n2. Set this configuration:\r\nmode: \"transparent\"\r\nshowhost: true \r\n3. Start mitmproxy using this command : \"mitmproxy --conf config.yaml\" (I'm on the .mitmproxy folder of course)\r\nThe process is started but the traffic is not working\r\n4. Start mitmproxy using this command : \"mitmproxy -T --host\"\r\nThe process is started and the traffic is working\r\n\r\n\r\n##### Any other comments? What have you tried so far?\r\n\r\nI tried to use the \":\" and \"=\" as separator for the YAML file but only the \":\" is recognized apparently\r\n\r\n\r\n##### System information\r\nI have the last release of mitmproxy (v2.0.2)\r\n\r\nWell I've just notice that there is a v3 release, maybe this could help me ?\r\nI prefer to create this ticket if someone else has the same issue :)\n", "before_files": [{"content": "import os\nimport importlib.util\nimport importlib.machinery\nimport time\nimport sys\nimport types\nimport typing\n\nfrom mitmproxy import addonmanager\nfrom mitmproxy import exceptions\nfrom mitmproxy import flow\nfrom mitmproxy import command\nfrom mitmproxy import eventsequence\nfrom mitmproxy import ctx\n\n\ndef load_script(path: str) -> types.ModuleType:\n fullname = \"__mitmproxy_script__.{}\".format(\n os.path.splitext(os.path.basename(path))[0]\n )\n # the fullname is not unique among scripts, so if there already is an existing script with said\n # fullname, remove it.\n sys.modules.pop(fullname, None)\n oldpath = sys.path\n sys.path.insert(0, os.path.dirname(path))\n try:\n loader = importlib.machinery.SourceFileLoader(fullname, path)\n spec = importlib.util.spec_from_loader(fullname, loader=loader)\n m = importlib.util.module_from_spec(spec)\n loader.exec_module(m)\n if not getattr(m, \"name\", None):\n m.name = path # type: ignore\n return m\n finally:\n sys.path[:] = oldpath\n\n\nclass Script:\n \"\"\"\n An addon that manages a single script.\n \"\"\"\n ReloadInterval = 2\n\n def __init__(self, path):\n self.name = \"scriptmanager:\" + path\n self.path = path\n self.fullpath = os.path.expanduser(path)\n self.ns = None\n\n self.last_load = 0\n self.last_mtime = 0\n if not os.path.isfile(self.fullpath):\n raise exceptions.OptionsError(\"No such script: %s\" % path)\n\n @property\n def addons(self):\n return [self.ns] if self.ns else []\n\n def tick(self):\n if time.time() - self.last_load > self.ReloadInterval:\n try:\n mtime = os.stat(self.fullpath).st_mtime\n except FileNotFoundError:\n scripts = list(ctx.options.scripts)\n scripts.remove(self.path)\n ctx.options.update(scripts=scripts)\n return\n\n if mtime > self.last_mtime:\n ctx.log.info(\"Loading script: %s\" % self.path)\n if self.ns:\n ctx.master.addons.remove(self.ns)\n self.ns = None\n with addonmanager.safecall():\n ns = load_script(self.fullpath)\n ctx.master.addons.register(ns)\n self.ns = ns\n if self.ns:\n # We're already running, so we have to explicitly register and\n # configure the addon\n ctx.master.addons.invoke_addon(self.ns, \"running\")\n ctx.master.addons.invoke_addon(\n self.ns,\n \"configure\",\n ctx.options.keys()\n )\n self.last_load = time.time()\n self.last_mtime = mtime\n\n\nclass ScriptLoader:\n \"\"\"\n An addon that manages loading scripts from options.\n \"\"\"\n def __init__(self):\n self.is_running = False\n self.addons = []\n\n def running(self):\n self.is_running = True\n\n @command.command(\"script.run\")\n def script_run(self, flows: typing.Sequence[flow.Flow], path: str) -> None:\n \"\"\"\n Run a script on the specified flows. The script is loaded with\n default options, and all lifecycle events for each flow are\n simulated.\n \"\"\"\n try:\n s = Script(path)\n l = addonmanager.Loader(ctx.master)\n ctx.master.addons.invoke_addon(s, \"load\", l)\n ctx.master.addons.invoke_addon(s, \"configure\", ctx.options.keys())\n # Script is loaded on the first tick\n ctx.master.addons.invoke_addon(s, \"tick\")\n for f in flows:\n for evt, arg in eventsequence.iterate(f):\n ctx.master.addons.invoke_addon(s, evt, arg)\n except exceptions.OptionsError as e:\n raise exceptions.CommandError(\"Error running script: %s\" % e) from e\n\n def configure(self, updated):\n if \"scripts\" in updated:\n for s in ctx.options.scripts:\n if ctx.options.scripts.count(s) > 1:\n raise exceptions.OptionsError(\"Duplicate script: %s\" % s)\n\n for a in self.addons[:]:\n if a.path not in ctx.options.scripts:\n ctx.log.info(\"Un-loading script: %s\" % a.name)\n ctx.master.addons.remove(a)\n self.addons.remove(a)\n\n # The machinations below are to ensure that:\n # - Scripts remain in the same order\n # - Scripts are not initialized un-necessarily. If only a\n # script's order in the script list has changed, it is just\n # moved.\n\n current = {}\n for a in self.addons:\n current[a.path] = a\n\n ordered = []\n newscripts = []\n for s in ctx.options.scripts:\n if s in current:\n ordered.append(current[s])\n else:\n sc = Script(s)\n ordered.append(sc)\n newscripts.append(sc)\n\n self.addons = ordered\n\n for s in newscripts:\n ctx.master.addons.register(s)\n if self.is_running:\n # If we're already running, we configure and tell the addon\n # we're up and running.\n ctx.master.addons.invoke_addon(s, \"running\")\n", "path": "mitmproxy/addons/script.py"}], "after_files": [{"content": "import os\nimport importlib.util\nimport importlib.machinery\nimport time\nimport sys\nimport types\nimport typing\n\nfrom mitmproxy import addonmanager\nfrom mitmproxy import exceptions\nfrom mitmproxy import flow\nfrom mitmproxy import command\nfrom mitmproxy import eventsequence\nfrom mitmproxy import ctx\n\n\ndef load_script(path: str) -> types.ModuleType:\n fullname = \"__mitmproxy_script__.{}\".format(\n os.path.splitext(os.path.basename(path))[0]\n )\n # the fullname is not unique among scripts, so if there already is an existing script with said\n # fullname, remove it.\n sys.modules.pop(fullname, None)\n oldpath = sys.path\n sys.path.insert(0, os.path.dirname(path))\n try:\n loader = importlib.machinery.SourceFileLoader(fullname, path)\n spec = importlib.util.spec_from_loader(fullname, loader=loader)\n m = importlib.util.module_from_spec(spec)\n loader.exec_module(m)\n if not getattr(m, \"name\", None):\n m.name = path # type: ignore\n return m\n finally:\n sys.path[:] = oldpath\n\n\nclass Script:\n \"\"\"\n An addon that manages a single script.\n \"\"\"\n ReloadInterval = 2\n\n def __init__(self, path):\n self.name = \"scriptmanager:\" + path\n self.path = path\n self.fullpath = os.path.expanduser(\n path.strip(\"'\\\" \")\n )\n self.ns = None\n\n self.last_load = 0\n self.last_mtime = 0\n if not os.path.isfile(self.fullpath):\n raise exceptions.OptionsError('No such script: \"%s\"' % self.fullpath)\n\n @property\n def addons(self):\n return [self.ns] if self.ns else []\n\n def tick(self):\n if time.time() - self.last_load > self.ReloadInterval:\n try:\n mtime = os.stat(self.fullpath).st_mtime\n except FileNotFoundError:\n scripts = list(ctx.options.scripts)\n scripts.remove(self.path)\n ctx.options.update(scripts=scripts)\n return\n\n if mtime > self.last_mtime:\n ctx.log.info(\"Loading script: %s\" % self.path)\n if self.ns:\n ctx.master.addons.remove(self.ns)\n self.ns = None\n with addonmanager.safecall():\n ns = load_script(self.fullpath)\n ctx.master.addons.register(ns)\n self.ns = ns\n if self.ns:\n # We're already running, so we have to explicitly register and\n # configure the addon\n ctx.master.addons.invoke_addon(self.ns, \"running\")\n ctx.master.addons.invoke_addon(\n self.ns,\n \"configure\",\n ctx.options.keys()\n )\n self.last_load = time.time()\n self.last_mtime = mtime\n\n\nclass ScriptLoader:\n \"\"\"\n An addon that manages loading scripts from options.\n \"\"\"\n def __init__(self):\n self.is_running = False\n self.addons = []\n\n def running(self):\n self.is_running = True\n\n @command.command(\"script.run\")\n def script_run(self, flows: typing.Sequence[flow.Flow], path: str) -> None:\n \"\"\"\n Run a script on the specified flows. The script is loaded with\n default options, and all lifecycle events for each flow are\n simulated.\n \"\"\"\n try:\n s = Script(path)\n l = addonmanager.Loader(ctx.master)\n ctx.master.addons.invoke_addon(s, \"load\", l)\n ctx.master.addons.invoke_addon(s, \"configure\", ctx.options.keys())\n # Script is loaded on the first tick\n ctx.master.addons.invoke_addon(s, \"tick\")\n for f in flows:\n for evt, arg in eventsequence.iterate(f):\n ctx.master.addons.invoke_addon(s, evt, arg)\n except exceptions.OptionsError as e:\n raise exceptions.CommandError(\"Error running script: %s\" % e) from e\n\n def configure(self, updated):\n if \"scripts\" in updated:\n for s in ctx.options.scripts:\n if ctx.options.scripts.count(s) > 1:\n raise exceptions.OptionsError(\"Duplicate script: %s\" % s)\n\n for a in self.addons[:]:\n if a.path not in ctx.options.scripts:\n ctx.log.info(\"Un-loading script: %s\" % a.name)\n ctx.master.addons.remove(a)\n self.addons.remove(a)\n\n # The machinations below are to ensure that:\n # - Scripts remain in the same order\n # - Scripts are not initialized un-necessarily. If only a\n # script's order in the script list has changed, it is just\n # moved.\n\n current = {}\n for a in self.addons:\n current[a.path] = a\n\n ordered = []\n newscripts = []\n for s in ctx.options.scripts:\n if s in current:\n ordered.append(current[s])\n else:\n sc = Script(s)\n ordered.append(sc)\n newscripts.append(sc)\n\n self.addons = ordered\n\n for s in newscripts:\n ctx.master.addons.register(s)\n if self.is_running:\n # If we're already running, we configure and tell the addon\n # we're up and running.\n ctx.master.addons.invoke_addon(s, \"running\")\n", "path": "mitmproxy/addons/script.py"}]}
2,013
191
gh_patches_debug_756
rasdani/github-patches
git_diff
vllm-project__vllm-1212
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- [v0.2.0] Release Tracker ## Major changes * Up to 60% performance improvement by optimizing de-tokenization and sampler * Initial support for AWQ (performance not optimized) * Support for RoPE scaling and LongChat * Support for Mistral-7B ## PRs to be merged before the release - [x] Vectorized sampler: #1048, #820 - [x] LongChat: #555 - [x] `TORCH_CUDA_ARCH_LIST` build option: #1074 - [x] Support for Mistral-7B: #1196 - [x] #1198 - ~~[ ] FP32 RoPE kernel: #1061~~ (deferred to the next PR) --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `vllm/__init__.py` Content: ``` 1 """vLLM: a high-throughput and memory-efficient inference engine for LLMs""" 2 3 from vllm.engine.arg_utils import AsyncEngineArgs, EngineArgs 4 from vllm.engine.async_llm_engine import AsyncLLMEngine 5 from vllm.engine.llm_engine import LLMEngine 6 from vllm.engine.ray_utils import initialize_cluster 7 from vllm.entrypoints.llm import LLM 8 from vllm.outputs import CompletionOutput, RequestOutput 9 from vllm.sampling_params import SamplingParams 10 11 __version__ = "0.1.7" 12 13 __all__ = [ 14 "LLM", 15 "SamplingParams", 16 "RequestOutput", 17 "CompletionOutput", 18 "LLMEngine", 19 "EngineArgs", 20 "AsyncLLMEngine", 21 "AsyncEngineArgs", 22 "initialize_cluster", 23 ] 24 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/vllm/__init__.py b/vllm/__init__.py --- a/vllm/__init__.py +++ b/vllm/__init__.py @@ -8,7 +8,7 @@ from vllm.outputs import CompletionOutput, RequestOutput from vllm.sampling_params import SamplingParams -__version__ = "0.1.7" +__version__ = "0.2.0" __all__ = [ "LLM",
{"golden_diff": "diff --git a/vllm/__init__.py b/vllm/__init__.py\n--- a/vllm/__init__.py\n+++ b/vllm/__init__.py\n@@ -8,7 +8,7 @@\n from vllm.outputs import CompletionOutput, RequestOutput\n from vllm.sampling_params import SamplingParams\n \n-__version__ = \"0.1.7\"\n+__version__ = \"0.2.0\"\n \n __all__ = [\n \"LLM\",\n", "issue": "[v0.2.0] Release Tracker\n## Major changes\r\n\r\n* Up to 60% performance improvement by optimizing de-tokenization and sampler\r\n* Initial support for AWQ (performance not optimized)\r\n* Support for RoPE scaling and LongChat\r\n* Support for Mistral-7B\r\n\r\n## PRs to be merged before the release\r\n\r\n- [x] Vectorized sampler: #1048, #820 \r\n- [x] LongChat: #555 \r\n- [x] `TORCH_CUDA_ARCH_LIST` build option: #1074 \r\n- [x] Support for Mistral-7B: #1196 \r\n- [x] #1198 \r\n- ~~[ ] FP32 RoPE kernel: #1061~~ (deferred to the next PR)\n", "before_files": [{"content": "\"\"\"vLLM: a high-throughput and memory-efficient inference engine for LLMs\"\"\"\n\nfrom vllm.engine.arg_utils import AsyncEngineArgs, EngineArgs\nfrom vllm.engine.async_llm_engine import AsyncLLMEngine\nfrom vllm.engine.llm_engine import LLMEngine\nfrom vllm.engine.ray_utils import initialize_cluster\nfrom vllm.entrypoints.llm import LLM\nfrom vllm.outputs import CompletionOutput, RequestOutput\nfrom vllm.sampling_params import SamplingParams\n\n__version__ = \"0.1.7\"\n\n__all__ = [\n \"LLM\",\n \"SamplingParams\",\n \"RequestOutput\",\n \"CompletionOutput\",\n \"LLMEngine\",\n \"EngineArgs\",\n \"AsyncLLMEngine\",\n \"AsyncEngineArgs\",\n \"initialize_cluster\",\n]\n", "path": "vllm/__init__.py"}], "after_files": [{"content": "\"\"\"vLLM: a high-throughput and memory-efficient inference engine for LLMs\"\"\"\n\nfrom vllm.engine.arg_utils import AsyncEngineArgs, EngineArgs\nfrom vllm.engine.async_llm_engine import AsyncLLMEngine\nfrom vllm.engine.llm_engine import LLMEngine\nfrom vllm.engine.ray_utils import initialize_cluster\nfrom vllm.entrypoints.llm import LLM\nfrom vllm.outputs import CompletionOutput, RequestOutput\nfrom vllm.sampling_params import SamplingParams\n\n__version__ = \"0.2.0\"\n\n__all__ = [\n \"LLM\",\n \"SamplingParams\",\n \"RequestOutput\",\n \"CompletionOutput\",\n \"LLMEngine\",\n \"EngineArgs\",\n \"AsyncLLMEngine\",\n \"AsyncEngineArgs\",\n \"initialize_cluster\",\n]\n", "path": "vllm/__init__.py"}]}
653
108
gh_patches_debug_30325
rasdani/github-patches
git_diff
mito-ds__mito-213
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Allow the installer to go pro after the user has already installed! **Describe the bug** A clear and concise description of what the bug is. **To Reproduce** Steps to reproduce the behavior: 1. Go to '...' 2. Click on '....' 3. Scroll down to '....' 4. See error Please include the relevant dataset if the bug you encountered is dataset specific. Make sure to anonymize the data properly. **Expected behavior** A clear and concise description of what you expected to happen. **Screenshots** If applicable, add screenshots to help explain your problem. **Desktop (please complete the following information):** - OS: [e.g. Windows 11] - Browser [e.g. Chrome, Firefox] - Mito Version [e.g. 0.3.331] (you can find this with `pip list`) **Additional context** Add any other context about the problem here. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `mitoinstaller/mitoinstaller/user_install.py` Content: ``` 1 import json 2 import os 3 from typing import Optional 4 import uuid 5 from copy import deepcopy 6 7 from mitoinstaller import __version__ 8 9 # Where all global .mito files are stored 10 MITO_FOLDER = os.path.join(os.path.expanduser("~"), '.mito') 11 12 # The path of the user.json file, which notably is the same 13 # path as the USER_JSON_PATH in mitosheet 14 USER_JSON_PATH = os.path.join(MITO_FOLDER, 'user.json') 15 16 def get_random_id() -> str: 17 """ 18 Creates a new random ID for the user, which for any given user, 19 should only happen once. 20 """ 21 return str(uuid.uuid1()) 22 23 def is_running_test() -> bool: 24 """ 25 A helper function that quickly returns if the current code is running inside 26 of a test, which is useful for making sure we don't generate tons of logs 27 """ 28 running_pytests = "PYTEST_CURRENT_TEST" in os.environ 29 running_ci = 'CI' in os.environ and os.environ['CI'] is not None 30 31 return running_pytests or running_ci 32 33 34 # NOTE: the installer only creates the static id for the user, and 35 # otherwise does nothing with the user_json file. This makes sure 36 # we keep the dependencies as simple as possible with this file. 37 # We also add the telemetry, which we turn off if the user has a 38 # pro subscription. 39 # NOTE: if you delete a field from this, you need to update the 40 # user_json_is_installer_default to handle this properly 41 USER_JSON_DEFAULT = { 42 'static_user_id': get_random_id() if not is_running_test() else 'github_action', 43 'mitosheet_telemetry': True, 44 'mitosheet_pro': False, 45 } 46 47 def try_create_user_json_file(is_pro: bool=False) -> None: 48 # Create the mito folder if it does not exist 49 if not os.path.exists(MITO_FOLDER): 50 os.mkdir(MITO_FOLDER) 51 52 # We only create a user.json file if it does not exist 53 if not os.path.exists(USER_JSON_PATH): 54 with open(USER_JSON_PATH, 'w+') as f: 55 # And write the default object 56 default_user_json = deepcopy(USER_JSON_DEFAULT) 57 default_user_json['mitosheet_telemetry'] = not is_pro 58 default_user_json['mitosheet_pro'] = is_pro 59 60 f.write(json.dumps(default_user_json)) 61 else: 62 # Otherwise, we make sure to update the mitosheet_telemetry variable 63 with open(USER_JSON_PATH, 'r') as f: 64 updated_user_json = json.loads(f.read()) 65 updated_user_json['mitosheet_telemetry'] = not is_pro 66 updated_user_json['mitosheet_pro'] = is_pro 67 with open(USER_JSON_PATH, 'w') as f: 68 f.write(json.dumps(updated_user_json)) 69 70 71 def get_static_user_id() -> Optional[str]: 72 try: 73 with open(USER_JSON_PATH) as f: 74 return json.load(f)['static_user_id'] 75 except: 76 return None 77 78 def get_mitosheet_telemetry() -> bool: 79 try: 80 with open(USER_JSON_PATH) as f: 81 return json.load(f)['mitosheet_telemetry'] 82 except: 83 return True 84 85 def user_json_is_installer_default() -> bool: 86 """ 87 Returns True if the user.json file is the installer default, 88 and otherwise returns False. 89 90 This allows us to not call identify if we have already done 91 so in the mitosheet package (which would overwrite things 92 we don't want to). 93 """ 94 try: 95 with open(USER_JSON_PATH) as f: 96 user_json_object = json.load(f) 97 return len(user_json_object) <= len(USER_JSON_DEFAULT) 98 except: 99 return False 100 ``` Path: `mitoinstaller/mitoinstaller/installer_steps/initial_installer_steps.py` Content: ``` 1 import os 2 import sys 3 4 from mitoinstaller import __version__ 5 from mitoinstaller.commands import upgrade_mito_installer 6 from mitoinstaller.installer_steps.installer_step import InstallerStep 7 from mitoinstaller.log_utils import identify, log 8 from mitoinstaller.user_install import (USER_JSON_PATH, 9 try_create_user_json_file) 10 11 12 def initial_install_step_create_user(): 13 14 if not os.path.exists(USER_JSON_PATH): 15 try_create_user_json_file(is_pro=('--pro' in sys.argv)) 16 17 # Only try and log if we're not pro 18 if not ('--pro' in sys.argv): 19 identify() 20 log('install_started', { 21 'mitoinstaller_version': __version__ 22 }) 23 24 25 INITIAL_INSTALLER_STEPS = [ 26 InstallerStep( 27 'Create mito user', 28 initial_install_step_create_user 29 ), 30 InstallerStep( 31 'Upgrade mitoinstaller', 32 upgrade_mito_installer, 33 optional=True 34 ), 35 ] 36 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/mitoinstaller/mitoinstaller/installer_steps/initial_installer_steps.py b/mitoinstaller/mitoinstaller/installer_steps/initial_installer_steps.py --- a/mitoinstaller/mitoinstaller/installer_steps/initial_installer_steps.py +++ b/mitoinstaller/mitoinstaller/installer_steps/initial_installer_steps.py @@ -5,7 +5,7 @@ from mitoinstaller.commands import upgrade_mito_installer from mitoinstaller.installer_steps.installer_step import InstallerStep from mitoinstaller.log_utils import identify, log -from mitoinstaller.user_install import (USER_JSON_PATH, +from mitoinstaller.user_install import (USER_JSON_PATH, go_pro, try_create_user_json_file) @@ -14,13 +14,15 @@ if not os.path.exists(USER_JSON_PATH): try_create_user_json_file(is_pro=('--pro' in sys.argv)) - # Only try and log if we're not pro if not ('--pro' in sys.argv): + # Only try and log if we're not pro identify() log('install_started', { 'mitoinstaller_version': __version__ }) - + else: + # If the user is going pro, make sure they are set to pro + go_pro() INITIAL_INSTALLER_STEPS = [ InstallerStep( diff --git a/mitoinstaller/mitoinstaller/user_install.py b/mitoinstaller/mitoinstaller/user_install.py --- a/mitoinstaller/mitoinstaller/user_install.py +++ b/mitoinstaller/mitoinstaller/user_install.py @@ -97,3 +97,12 @@ return len(user_json_object) <= len(USER_JSON_DEFAULT) except: return False + +def go_pro() -> None: + with open(USER_JSON_PATH, 'r') as f: + updated_user_json = json.loads(f.read()) + updated_user_json['mitosheet_telemetry'] = False + updated_user_json['mitosheet_pro'] = True + + with open(USER_JSON_PATH, 'w') as f: + f.write(json.dumps(updated_user_json)) \ No newline at end of file
{"golden_diff": "diff --git a/mitoinstaller/mitoinstaller/installer_steps/initial_installer_steps.py b/mitoinstaller/mitoinstaller/installer_steps/initial_installer_steps.py\n--- a/mitoinstaller/mitoinstaller/installer_steps/initial_installer_steps.py\n+++ b/mitoinstaller/mitoinstaller/installer_steps/initial_installer_steps.py\n@@ -5,7 +5,7 @@\n from mitoinstaller.commands import upgrade_mito_installer\n from mitoinstaller.installer_steps.installer_step import InstallerStep\n from mitoinstaller.log_utils import identify, log\n-from mitoinstaller.user_install import (USER_JSON_PATH,\n+from mitoinstaller.user_install import (USER_JSON_PATH, go_pro,\n try_create_user_json_file)\n \n \n@@ -14,13 +14,15 @@\n if not os.path.exists(USER_JSON_PATH):\n try_create_user_json_file(is_pro=('--pro' in sys.argv))\n \n- # Only try and log if we're not pro\n if not ('--pro' in sys.argv):\n+ # Only try and log if we're not pro\n identify()\n log('install_started', {\n 'mitoinstaller_version': __version__\n })\n-\n+ else:\n+ # If the user is going pro, make sure they are set to pro\n+ go_pro()\n \n INITIAL_INSTALLER_STEPS = [\n InstallerStep(\ndiff --git a/mitoinstaller/mitoinstaller/user_install.py b/mitoinstaller/mitoinstaller/user_install.py\n--- a/mitoinstaller/mitoinstaller/user_install.py\n+++ b/mitoinstaller/mitoinstaller/user_install.py\n@@ -97,3 +97,12 @@\n return len(user_json_object) <= len(USER_JSON_DEFAULT)\n except:\n return False\n+\n+def go_pro() -> None:\n+ with open(USER_JSON_PATH, 'r') as f:\n+ updated_user_json = json.loads(f.read())\n+ updated_user_json['mitosheet_telemetry'] = False\n+ updated_user_json['mitosheet_pro'] = True\n+ \n+ with open(USER_JSON_PATH, 'w') as f:\n+ f.write(json.dumps(updated_user_json))\n\\ No newline at end of file\n", "issue": "Allow the installer to go pro after the user has already installed!\n**Describe the bug**\r\nA clear and concise description of what the bug is.\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. Go to '...'\r\n2. Click on '....'\r\n3. Scroll down to '....'\r\n4. See error\r\n\r\nPlease include the relevant dataset if the bug you encountered is dataset specific. Make sure to anonymize the data properly.\r\n\r\n**Expected behavior**\r\nA clear and concise description of what you expected to happen.\r\n\r\n**Screenshots**\r\nIf applicable, add screenshots to help explain your problem.\r\n\r\n**Desktop (please complete the following information):**\r\n - OS: [e.g. Windows 11]\r\n - Browser [e.g. Chrome, Firefox]\r\n - Mito Version [e.g. 0.3.331] (you can find this with `pip list`)\r\n\r\n**Additional context**\r\nAdd any other context about the problem here.\r\n\n", "before_files": [{"content": "import json\nimport os\nfrom typing import Optional\nimport uuid\nfrom copy import deepcopy\n\nfrom mitoinstaller import __version__\n\n# Where all global .mito files are stored\nMITO_FOLDER = os.path.join(os.path.expanduser(\"~\"), '.mito')\n\n# The path of the user.json file, which notably is the same\n# path as the USER_JSON_PATH in mitosheet\nUSER_JSON_PATH = os.path.join(MITO_FOLDER, 'user.json')\n\ndef get_random_id() -> str:\n \"\"\"\n Creates a new random ID for the user, which for any given user,\n should only happen once.\n \"\"\"\n return str(uuid.uuid1())\n\ndef is_running_test() -> bool:\n \"\"\"\n A helper function that quickly returns if the current code is running inside \n of a test, which is useful for making sure we don't generate tons of logs \n \"\"\"\n running_pytests = \"PYTEST_CURRENT_TEST\" in os.environ\n running_ci = 'CI' in os.environ and os.environ['CI'] is not None\n\n return running_pytests or running_ci\n\n\n# NOTE: the installer only creates the static id for the user, and\n# otherwise does nothing with the user_json file. This makes sure\n# we keep the dependencies as simple as possible with this file. \n# We also add the telemetry, which we turn off if the user has a \n# pro subscription.\n# NOTE: if you delete a field from this, you need to update the \n# user_json_is_installer_default to handle this properly\nUSER_JSON_DEFAULT = {\n 'static_user_id': get_random_id() if not is_running_test() else 'github_action',\n 'mitosheet_telemetry': True,\n 'mitosheet_pro': False,\n}\n\ndef try_create_user_json_file(is_pro: bool=False) -> None:\n # Create the mito folder if it does not exist\n if not os.path.exists(MITO_FOLDER):\n os.mkdir(MITO_FOLDER)\n \n # We only create a user.json file if it does not exist\n if not os.path.exists(USER_JSON_PATH):\n with open(USER_JSON_PATH, 'w+') as f:\n # And write the default object\n default_user_json = deepcopy(USER_JSON_DEFAULT)\n default_user_json['mitosheet_telemetry'] = not is_pro\n default_user_json['mitosheet_pro'] = is_pro\n\n f.write(json.dumps(default_user_json))\n else:\n # Otherwise, we make sure to update the mitosheet_telemetry variable \n with open(USER_JSON_PATH, 'r') as f:\n updated_user_json = json.loads(f.read())\n updated_user_json['mitosheet_telemetry'] = not is_pro\n updated_user_json['mitosheet_pro'] = is_pro \n with open(USER_JSON_PATH, 'w') as f:\n f.write(json.dumps(updated_user_json))\n\n\ndef get_static_user_id() -> Optional[str]:\n try:\n with open(USER_JSON_PATH) as f:\n return json.load(f)['static_user_id']\n except: \n return None\n\ndef get_mitosheet_telemetry() -> bool:\n try:\n with open(USER_JSON_PATH) as f:\n return json.load(f)['mitosheet_telemetry']\n except: \n return True\n\ndef user_json_is_installer_default() -> bool:\n \"\"\"\n Returns True if the user.json file is the installer default, \n and otherwise returns False. \n\n This allows us to not call identify if we have already done\n so in the mitosheet package (which would overwrite things\n we don't want to).\n \"\"\"\n try:\n with open(USER_JSON_PATH) as f:\n user_json_object = json.load(f)\n return len(user_json_object) <= len(USER_JSON_DEFAULT)\n except:\n return False\n", "path": "mitoinstaller/mitoinstaller/user_install.py"}, {"content": "import os\nimport sys\n\nfrom mitoinstaller import __version__\nfrom mitoinstaller.commands import upgrade_mito_installer\nfrom mitoinstaller.installer_steps.installer_step import InstallerStep\nfrom mitoinstaller.log_utils import identify, log\nfrom mitoinstaller.user_install import (USER_JSON_PATH,\n try_create_user_json_file)\n\n\ndef initial_install_step_create_user():\n\n if not os.path.exists(USER_JSON_PATH):\n try_create_user_json_file(is_pro=('--pro' in sys.argv))\n\n # Only try and log if we're not pro\n if not ('--pro' in sys.argv):\n identify()\n log('install_started', {\n 'mitoinstaller_version': __version__\n })\n\n\nINITIAL_INSTALLER_STEPS = [\n InstallerStep(\n 'Create mito user',\n initial_install_step_create_user\n ),\n InstallerStep(\n 'Upgrade mitoinstaller',\n upgrade_mito_installer,\n optional=True\n ),\n]\n", "path": "mitoinstaller/mitoinstaller/installer_steps/initial_installer_steps.py"}], "after_files": [{"content": "import json\nimport os\nfrom typing import Optional\nimport uuid\nfrom copy import deepcopy\n\nfrom mitoinstaller import __version__\n\n# Where all global .mito files are stored\nMITO_FOLDER = os.path.join(os.path.expanduser(\"~\"), '.mito')\n\n# The path of the user.json file, which notably is the same\n# path as the USER_JSON_PATH in mitosheet\nUSER_JSON_PATH = os.path.join(MITO_FOLDER, 'user.json')\n\ndef get_random_id() -> str:\n \"\"\"\n Creates a new random ID for the user, which for any given user,\n should only happen once.\n \"\"\"\n return str(uuid.uuid1())\n\ndef is_running_test() -> bool:\n \"\"\"\n A helper function that quickly returns if the current code is running inside \n of a test, which is useful for making sure we don't generate tons of logs \n \"\"\"\n running_pytests = \"PYTEST_CURRENT_TEST\" in os.environ\n running_ci = 'CI' in os.environ and os.environ['CI'] is not None\n\n return running_pytests or running_ci\n\n\n# NOTE: the installer only creates the static id for the user, and\n# otherwise does nothing with the user_json file. This makes sure\n# we keep the dependencies as simple as possible with this file. \n# We also add the telemetry, which we turn off if the user has a \n# pro subscription.\n# NOTE: if you delete a field from this, you need to update the \n# user_json_is_installer_default to handle this properly\nUSER_JSON_DEFAULT = {\n 'static_user_id': get_random_id() if not is_running_test() else 'github_action',\n 'mitosheet_telemetry': True,\n 'mitosheet_pro': False,\n}\n\ndef try_create_user_json_file(is_pro: bool=False) -> None:\n # Create the mito folder if it does not exist\n if not os.path.exists(MITO_FOLDER):\n os.mkdir(MITO_FOLDER)\n \n # We only create a user.json file if it does not exist\n if not os.path.exists(USER_JSON_PATH):\n with open(USER_JSON_PATH, 'w+') as f:\n # And write the default object\n default_user_json = deepcopy(USER_JSON_DEFAULT)\n default_user_json['mitosheet_telemetry'] = not is_pro\n default_user_json['mitosheet_pro'] = is_pro\n\n f.write(json.dumps(default_user_json))\n else:\n # Otherwise, we make sure to update the mitosheet_telemetry variable \n with open(USER_JSON_PATH, 'r') as f:\n updated_user_json = json.loads(f.read())\n updated_user_json['mitosheet_telemetry'] = not is_pro\n updated_user_json['mitosheet_pro'] = is_pro \n with open(USER_JSON_PATH, 'w') as f:\n f.write(json.dumps(updated_user_json))\n\n\ndef get_static_user_id() -> Optional[str]:\n try:\n with open(USER_JSON_PATH) as f:\n return json.load(f)['static_user_id']\n except: \n return None\n\ndef get_mitosheet_telemetry() -> bool:\n try:\n with open(USER_JSON_PATH) as f:\n return json.load(f)['mitosheet_telemetry']\n except: \n return True\n\ndef user_json_is_installer_default() -> bool:\n \"\"\"\n Returns True if the user.json file is the installer default, \n and otherwise returns False. \n\n This allows us to not call identify if we have already done\n so in the mitosheet package (which would overwrite things\n we don't want to).\n \"\"\"\n try:\n with open(USER_JSON_PATH) as f:\n user_json_object = json.load(f)\n return len(user_json_object) <= len(USER_JSON_DEFAULT)\n except:\n return False\n\ndef go_pro() -> None:\n with open(USER_JSON_PATH, 'r') as f:\n updated_user_json = json.loads(f.read())\n updated_user_json['mitosheet_telemetry'] = False\n updated_user_json['mitosheet_pro'] = True\n \n with open(USER_JSON_PATH, 'w') as f:\n f.write(json.dumps(updated_user_json))", "path": "mitoinstaller/mitoinstaller/user_install.py"}, {"content": "import os\nimport sys\n\nfrom mitoinstaller import __version__\nfrom mitoinstaller.commands import upgrade_mito_installer\nfrom mitoinstaller.installer_steps.installer_step import InstallerStep\nfrom mitoinstaller.log_utils import identify, log\nfrom mitoinstaller.user_install import (USER_JSON_PATH, go_pro,\n try_create_user_json_file)\n\n\ndef initial_install_step_create_user():\n\n if not os.path.exists(USER_JSON_PATH):\n try_create_user_json_file(is_pro=('--pro' in sys.argv))\n\n if not ('--pro' in sys.argv):\n # Only try and log if we're not pro\n identify()\n log('install_started', {\n 'mitoinstaller_version': __version__\n })\n else:\n # If the user is going pro, make sure they are set to pro\n go_pro()\n\nINITIAL_INSTALLER_STEPS = [\n InstallerStep(\n 'Create mito user',\n initial_install_step_create_user\n ),\n InstallerStep(\n 'Upgrade mitoinstaller',\n upgrade_mito_installer,\n optional=True\n ),\n]\n", "path": "mitoinstaller/mitoinstaller/installer_steps/initial_installer_steps.py"}]}
1,782
496
gh_patches_debug_4574
rasdani/github-patches
git_diff
qtile__qtile-2716
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- stack trace from Clipboard widget ``` 2021-08-13 06:48:23,421 ERROR libqtile hook.py:fire():L381 Error in hook selection_change Traceback (most recent call last): File "/home/tycho/.local/lib/python3.9/site-packages/libqtile/hook.py", line 379, in fire i(*args, **kwargs) File "/home/tycho/.local/lib/python3.9/site-packages/libqtile/widget/clipboard.py", line 82, in hook_change if self.is_blacklisted(selection["owner"]): File "/home/tycho/.local/lib/python3.9/site-packages/libqtile/widget/clipboard.py", line 69, in is_blacklisted owner = xcbq.Window(self.qtile.core.conn, owner_id) AttributeError: module 'libqtile.backend.x11.xcbq' has no attribute 'Window' ``` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `libqtile/widget/clipboard.py` Content: ``` 1 # Copyright (c) 2014 Sean Vig 2 # Copyright (c) 2014 roger 3 # Copyright (c) 2014 Adi Sieker 4 # Copyright (c) 2014 Tycho Andersen 5 # 6 # Permission is hereby granted, free of charge, to any person obtaining a copy 7 # of this software and associated documentation files (the "Software"), to deal 8 # in the Software without restriction, including without limitation the rights 9 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 # copies of the Software, and to permit persons to whom the Software is 11 # furnished to do so, subject to the following conditions: 12 # 13 # The above copyright notice and this permission notice shall be included in 14 # all copies or substantial portions of the Software. 15 # 16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 # SOFTWARE. 23 24 from libqtile import bar, hook 25 from libqtile.backend.x11 import xcbq 26 from libqtile.widget import base 27 28 29 class Clipboard(base._TextBox): 30 """Display current clipboard contents""" 31 orientations = base.ORIENTATION_HORIZONTAL 32 defaults = [ 33 ("selection", "CLIPBOARD", 34 "the selection to display(CLIPBOARD or PRIMARY)"), 35 ("max_width", 10, "maximum number of characters to display " 36 "(None for all, useful when width is bar.STRETCH)"), 37 ("timeout", 10, 38 "Default timeout (seconds) for display text, None to keep forever"), 39 ("blacklist", ["keepassx"], 40 "list with blacklisted wm_class, sadly not every " 41 "clipboard window sets them, keepassx does." 42 "Clipboard contents from blacklisted wm_classes " 43 "will be replaced by the value of ``blacklist_text``."), 44 ("blacklist_text", "***********", 45 "text to display when the wm_class is blacklisted") 46 ] 47 48 def __init__(self, width=bar.CALCULATED, **config): 49 base._TextBox.__init__(self, "", width, **config) 50 self.add_defaults(Clipboard.defaults) 51 self.timeout_id = None 52 53 def _configure(self, qtile, bar): 54 base._TextBox._configure(self, qtile, bar) 55 self.text = "" 56 self.setup_hooks() 57 58 def clear(self, *args): 59 self.text = "" 60 self.bar.draw() 61 62 def is_blacklisted(self, owner_id): 63 if not self.blacklist: 64 return False 65 66 if owner_id in self.qtile.windows_map: 67 owner = self.qtile.windows_map[owner_id].window 68 else: 69 owner = xcbq.Window(self.qtile.core.conn, owner_id) 70 71 owner_class = owner.get_wm_class() 72 if owner_class: 73 for wm_class in self.blacklist: 74 if wm_class in owner_class: 75 return True 76 77 def setup_hooks(self): 78 def hook_change(name, selection): 79 if name != self.selection: 80 return 81 82 if self.is_blacklisted(selection["owner"]): 83 text = self.blacklist_text 84 else: 85 text = selection["selection"].replace("\n", " ") 86 87 text = text.strip() 88 if self.max_width is not None and len(text) > self.max_width: 89 text = text[:self.max_width] + "..." 90 91 self.text = text 92 93 if self.timeout_id: 94 self.timeout_id.cancel() 95 self.timeout_id = None 96 97 if self.timeout: 98 self.timeout_id = self.timeout_add(self.timeout, self.clear) 99 self.bar.draw() 100 101 def hook_notify(name, selection): 102 if name != self.selection: 103 return 104 105 if self.timeout_id: 106 self.timeout_id.cancel() 107 self.timeout_id = None 108 109 # only clear if don't change don't apply in .5 seconds 110 if self.timeout: 111 self.timeout_id = self.timeout_add(self.timeout, self.clear) 112 self.bar.draw() 113 114 hook.subscribe.selection_notify(hook_notify) 115 hook.subscribe.selection_change(hook_change) 116 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/libqtile/widget/clipboard.py b/libqtile/widget/clipboard.py --- a/libqtile/widget/clipboard.py +++ b/libqtile/widget/clipboard.py @@ -66,7 +66,7 @@ if owner_id in self.qtile.windows_map: owner = self.qtile.windows_map[owner_id].window else: - owner = xcbq.Window(self.qtile.core.conn, owner_id) + owner = xcbq.window.XWindow(self.qtile.core.conn, owner_id) owner_class = owner.get_wm_class() if owner_class:
{"golden_diff": "diff --git a/libqtile/widget/clipboard.py b/libqtile/widget/clipboard.py\n--- a/libqtile/widget/clipboard.py\n+++ b/libqtile/widget/clipboard.py\n@@ -66,7 +66,7 @@\n if owner_id in self.qtile.windows_map:\n owner = self.qtile.windows_map[owner_id].window\n else:\n- owner = xcbq.Window(self.qtile.core.conn, owner_id)\n+ owner = xcbq.window.XWindow(self.qtile.core.conn, owner_id)\n \n owner_class = owner.get_wm_class()\n if owner_class:\n", "issue": "stack trace from Clipboard widget\n```\r\n2021-08-13 06:48:23,421 ERROR libqtile hook.py:fire():L381 Error in hook selection_change\r\nTraceback (most recent call last):\r\n File \"/home/tycho/.local/lib/python3.9/site-packages/libqtile/hook.py\", line 379, in fire\r\n i(*args, **kwargs)\r\n File \"/home/tycho/.local/lib/python3.9/site-packages/libqtile/widget/clipboard.py\", line 82, in hook_change\r\n if self.is_blacklisted(selection[\"owner\"]):\r\n File \"/home/tycho/.local/lib/python3.9/site-packages/libqtile/widget/clipboard.py\", line 69, in is_blacklisted\r\n owner = xcbq.Window(self.qtile.core.conn, owner_id)\r\nAttributeError: module 'libqtile.backend.x11.xcbq' has no attribute 'Window'\r\n```\n", "before_files": [{"content": "# Copyright (c) 2014 Sean Vig\n# Copyright (c) 2014 roger\n# Copyright (c) 2014 Adi Sieker\n# Copyright (c) 2014 Tycho Andersen\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\nfrom libqtile import bar, hook\nfrom libqtile.backend.x11 import xcbq\nfrom libqtile.widget import base\n\n\nclass Clipboard(base._TextBox):\n \"\"\"Display current clipboard contents\"\"\"\n orientations = base.ORIENTATION_HORIZONTAL\n defaults = [\n (\"selection\", \"CLIPBOARD\",\n \"the selection to display(CLIPBOARD or PRIMARY)\"),\n (\"max_width\", 10, \"maximum number of characters to display \"\n \"(None for all, useful when width is bar.STRETCH)\"),\n (\"timeout\", 10,\n \"Default timeout (seconds) for display text, None to keep forever\"),\n (\"blacklist\", [\"keepassx\"],\n \"list with blacklisted wm_class, sadly not every \"\n \"clipboard window sets them, keepassx does.\"\n \"Clipboard contents from blacklisted wm_classes \"\n \"will be replaced by the value of ``blacklist_text``.\"),\n (\"blacklist_text\", \"***********\",\n \"text to display when the wm_class is blacklisted\")\n ]\n\n def __init__(self, width=bar.CALCULATED, **config):\n base._TextBox.__init__(self, \"\", width, **config)\n self.add_defaults(Clipboard.defaults)\n self.timeout_id = None\n\n def _configure(self, qtile, bar):\n base._TextBox._configure(self, qtile, bar)\n self.text = \"\"\n self.setup_hooks()\n\n def clear(self, *args):\n self.text = \"\"\n self.bar.draw()\n\n def is_blacklisted(self, owner_id):\n if not self.blacklist:\n return False\n\n if owner_id in self.qtile.windows_map:\n owner = self.qtile.windows_map[owner_id].window\n else:\n owner = xcbq.Window(self.qtile.core.conn, owner_id)\n\n owner_class = owner.get_wm_class()\n if owner_class:\n for wm_class in self.blacklist:\n if wm_class in owner_class:\n return True\n\n def setup_hooks(self):\n def hook_change(name, selection):\n if name != self.selection:\n return\n\n if self.is_blacklisted(selection[\"owner\"]):\n text = self.blacklist_text\n else:\n text = selection[\"selection\"].replace(\"\\n\", \" \")\n\n text = text.strip()\n if self.max_width is not None and len(text) > self.max_width:\n text = text[:self.max_width] + \"...\"\n\n self.text = text\n\n if self.timeout_id:\n self.timeout_id.cancel()\n self.timeout_id = None\n\n if self.timeout:\n self.timeout_id = self.timeout_add(self.timeout, self.clear)\n self.bar.draw()\n\n def hook_notify(name, selection):\n if name != self.selection:\n return\n\n if self.timeout_id:\n self.timeout_id.cancel()\n self.timeout_id = None\n\n # only clear if don't change don't apply in .5 seconds\n if self.timeout:\n self.timeout_id = self.timeout_add(self.timeout, self.clear)\n self.bar.draw()\n\n hook.subscribe.selection_notify(hook_notify)\n hook.subscribe.selection_change(hook_change)\n", "path": "libqtile/widget/clipboard.py"}], "after_files": [{"content": "# Copyright (c) 2014 Sean Vig\n# Copyright (c) 2014 roger\n# Copyright (c) 2014 Adi Sieker\n# Copyright (c) 2014 Tycho Andersen\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\nfrom libqtile import bar, hook\nfrom libqtile.backend.x11 import xcbq\nfrom libqtile.widget import base\n\n\nclass Clipboard(base._TextBox):\n \"\"\"Display current clipboard contents\"\"\"\n orientations = base.ORIENTATION_HORIZONTAL\n defaults = [\n (\"selection\", \"CLIPBOARD\",\n \"the selection to display(CLIPBOARD or PRIMARY)\"),\n (\"max_width\", 10, \"maximum number of characters to display \"\n \"(None for all, useful when width is bar.STRETCH)\"),\n (\"timeout\", 10,\n \"Default timeout (seconds) for display text, None to keep forever\"),\n (\"blacklist\", [\"keepassx\"],\n \"list with blacklisted wm_class, sadly not every \"\n \"clipboard window sets them, keepassx does.\"\n \"Clipboard contents from blacklisted wm_classes \"\n \"will be replaced by the value of ``blacklist_text``.\"),\n (\"blacklist_text\", \"***********\",\n \"text to display when the wm_class is blacklisted\")\n ]\n\n def __init__(self, width=bar.CALCULATED, **config):\n base._TextBox.__init__(self, \"\", width, **config)\n self.add_defaults(Clipboard.defaults)\n self.timeout_id = None\n\n def _configure(self, qtile, bar):\n base._TextBox._configure(self, qtile, bar)\n self.text = \"\"\n self.setup_hooks()\n\n def clear(self, *args):\n self.text = \"\"\n self.bar.draw()\n\n def is_blacklisted(self, owner_id):\n if not self.blacklist:\n return False\n\n if owner_id in self.qtile.windows_map:\n owner = self.qtile.windows_map[owner_id].window\n else:\n owner = xcbq.window.XWindow(self.qtile.core.conn, owner_id)\n\n owner_class = owner.get_wm_class()\n if owner_class:\n for wm_class in self.blacklist:\n if wm_class in owner_class:\n return True\n\n def setup_hooks(self):\n def hook_change(name, selection):\n if name != self.selection:\n return\n\n if self.is_blacklisted(selection[\"owner\"]):\n text = self.blacklist_text\n else:\n text = selection[\"selection\"].replace(\"\\n\", \" \")\n\n text = text.strip()\n if self.max_width is not None and len(text) > self.max_width:\n text = text[:self.max_width] + \"...\"\n\n self.text = text\n\n if self.timeout_id:\n self.timeout_id.cancel()\n self.timeout_id = None\n\n if self.timeout:\n self.timeout_id = self.timeout_add(self.timeout, self.clear)\n self.bar.draw()\n\n def hook_notify(name, selection):\n if name != self.selection:\n return\n\n if self.timeout_id:\n self.timeout_id.cancel()\n self.timeout_id = None\n\n # only clear if don't change don't apply in .5 seconds\n if self.timeout:\n self.timeout_id = self.timeout_add(self.timeout, self.clear)\n self.bar.draw()\n\n hook.subscribe.selection_notify(hook_notify)\n hook.subscribe.selection_change(hook_change)\n", "path": "libqtile/widget/clipboard.py"}]}
1,668
130
gh_patches_debug_586
rasdani/github-patches
git_diff
pex-tool__pex-1275
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Release 2.1.34 On the docket: + [x] Allow command-line arguments to be read from a file #1271 + [x] Issue when running a module inside pex file #1018 + [x] Guard against concurrent re-imports. #1270 + [x] Ensure Pip logs to stderr. #1268 --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `pex/version.py` Content: ``` 1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). 2 # Licensed under the Apache License, Version 2.0 (see LICENSE). 3 4 __version__ = "2.1.33" 5 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/pex/version.py b/pex/version.py --- a/pex/version.py +++ b/pex/version.py @@ -1,4 +1,4 @@ # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). -__version__ = "2.1.33" +__version__ = "2.1.34"
{"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.33\"\n+__version__ = \"2.1.34\"\n", "issue": "Release 2.1.34\nOn the docket:\r\n+ [x] Allow command-line arguments to be read from a file #1271\r\n+ [x] Issue when running a module inside pex file #1018\r\n+ [x] Guard against concurrent re-imports. #1270\r\n+ [x] Ensure Pip logs to stderr. #1268\r\n\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.33\"\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.34\"\n", "path": "pex/version.py"}]}
394
96
gh_patches_debug_6675
rasdani/github-patches
git_diff
fal-ai__dbt-fal-197
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- [Bug] Too many messages received before initialization > mmeasic: Hey, I get this log message on dbt version 0.21.0: ```Logged from file /Users/mmeasic/.virtualenvs/bi-etl-dbt/lib/python3.8/site-packages/dbt/parser/manifest.py, line 792 Traceback (most recent call last): File "/Users/mmeasic/.virtualenvs/bi-etl-dbt/lib/python3.8/site-packages/logbook/handlers.py", line 216, in handle self.emit(record) File "/Users/mmeasic/.virtualenvs/bi-etl-dbt/lib/python3.8/site-packages/dbt/logger.py", line 478, in emit assert len(self._msg_buffer) < self._bufmax, \ AssertionError: too many messages received before initilization! ``` ***** > jstrom40: did your job run after it gave you this error message? i have had this problem when i have had too many threads set up in dbt. i also had it when i tried to run the fal tool but my actual job still ran after it popped out this message ***** > mmeasic: It did run. > I actually have 4 threads set for the target [Thread link](https://discord.com/channels/908693336280432750/908693336280432755/930791100803850283) --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `src/fal/cli/cli.py` Content: ``` 1 from typing import List 2 import sys 3 from dbt.logger import log_manager, GLOBAL_LOGGER as logger 4 from fal.cli.flow_runner import fal_flow_run 5 from faldbt.lib import DBT_VCURRENT, DBT_V1 6 from .args import parse_args 7 from .fal_runner import fal_run 8 from fal.telemetry import telemetry 9 10 11 @telemetry.log_call("cli") 12 def cli(argv: List[str] = sys.argv): 13 parsed = parse_args(argv[1:]) 14 15 # TODO: remove `action="extend"` to match exactly what dbt does 16 selects_count = ( 17 argv.count("-s") 18 + argv.count("--select") 19 + argv.count("-m") 20 + argv.count("--model") 21 ) 22 exclude_count = argv.count("--exclude") 23 script_count = argv.count("--script") 24 25 if parsed.disable_logging: 26 logger.disable() 27 # Re-enable logging for 1.0.0 through old API of logger 28 elif DBT_VCURRENT.compare(DBT_V1) >= 0: 29 if logger.disabled: 30 logger.enable() 31 32 with log_manager.applicationbound(): 33 if parsed.debug: 34 log_manager.set_debug() 35 36 if parsed.command == "flow": 37 if parsed.flow_command == "run": 38 fal_flow_run(parsed) 39 40 elif parsed.command == "run": 41 fal_run( 42 parsed, 43 selects_count=selects_count, 44 exclude_count=exclude_count, 45 script_count=script_count, 46 ) 47 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/src/fal/cli/cli.py b/src/fal/cli/cli.py --- a/src/fal/cli/cli.py +++ b/src/fal/cli/cli.py @@ -20,6 +20,10 @@ exclude_count = argv.count("--exclude") script_count = argv.count("--script") + # Disabling the dbt.logger.DelayedFileHandler manually + # since we do not use the new dbt logging system + # This fixes issue https://github.com/fal-ai/fal/issues/97 + log_manager.set_path(None) if parsed.disable_logging: logger.disable() # Re-enable logging for 1.0.0 through old API of logger
{"golden_diff": "diff --git a/src/fal/cli/cli.py b/src/fal/cli/cli.py\n--- a/src/fal/cli/cli.py\n+++ b/src/fal/cli/cli.py\n@@ -20,6 +20,10 @@\n exclude_count = argv.count(\"--exclude\")\n script_count = argv.count(\"--script\")\n \n+ # Disabling the dbt.logger.DelayedFileHandler manually\n+ # since we do not use the new dbt logging system\n+ # This fixes issue https://github.com/fal-ai/fal/issues/97\n+ log_manager.set_path(None)\n if parsed.disable_logging:\n logger.disable()\n # Re-enable logging for 1.0.0 through old API of logger\n", "issue": "[Bug] Too many messages received before initialization\n> mmeasic: Hey, I get this log message on dbt version 0.21.0:\r\n\r\n```Logged from file /Users/mmeasic/.virtualenvs/bi-etl-dbt/lib/python3.8/site-packages/dbt/parser/manifest.py, line 792\r\nTraceback (most recent call last):\r\n File \"/Users/mmeasic/.virtualenvs/bi-etl-dbt/lib/python3.8/site-packages/logbook/handlers.py\", line 216, in handle\r\n self.emit(record)\r\n File \"/Users/mmeasic/.virtualenvs/bi-etl-dbt/lib/python3.8/site-packages/dbt/logger.py\", line 478, in emit\r\n assert len(self._msg_buffer) < self._bufmax, \\\r\nAssertionError: too many messages received before initilization!\r\n```\r\n\r\n*****\r\n\r\n> jstrom40: did your job run after it gave you this error message? i have had this problem when i have had too many threads set up in dbt. i also had it when i tried to run the fal tool but my actual job still ran after it popped out this message\r\n\r\n*****\r\n\r\n> mmeasic: It did run.\r\n> I actually have 4 threads set for the target\r\n\r\n[Thread link](https://discord.com/channels/908693336280432750/908693336280432755/930791100803850283)\n", "before_files": [{"content": "from typing import List\nimport sys\nfrom dbt.logger import log_manager, GLOBAL_LOGGER as logger\nfrom fal.cli.flow_runner import fal_flow_run\nfrom faldbt.lib import DBT_VCURRENT, DBT_V1\nfrom .args import parse_args\nfrom .fal_runner import fal_run\nfrom fal.telemetry import telemetry\n\n\[email protected]_call(\"cli\")\ndef cli(argv: List[str] = sys.argv):\n parsed = parse_args(argv[1:])\n\n # TODO: remove `action=\"extend\"` to match exactly what dbt does\n selects_count = (\n argv.count(\"-s\")\n + argv.count(\"--select\")\n + argv.count(\"-m\")\n + argv.count(\"--model\")\n )\n exclude_count = argv.count(\"--exclude\")\n script_count = argv.count(\"--script\")\n\n if parsed.disable_logging:\n logger.disable()\n # Re-enable logging for 1.0.0 through old API of logger\n elif DBT_VCURRENT.compare(DBT_V1) >= 0:\n if logger.disabled:\n logger.enable()\n\n with log_manager.applicationbound():\n if parsed.debug:\n log_manager.set_debug()\n\n if parsed.command == \"flow\":\n if parsed.flow_command == \"run\":\n fal_flow_run(parsed)\n\n elif parsed.command == \"run\":\n fal_run(\n parsed,\n selects_count=selects_count,\n exclude_count=exclude_count,\n script_count=script_count,\n )\n", "path": "src/fal/cli/cli.py"}], "after_files": [{"content": "from typing import List\nimport sys\nfrom dbt.logger import log_manager, GLOBAL_LOGGER as logger\nfrom fal.cli.flow_runner import fal_flow_run\nfrom faldbt.lib import DBT_VCURRENT, DBT_V1\nfrom .args import parse_args\nfrom .fal_runner import fal_run\n\n\ndef cli(argv: List[str] = sys.argv):\n parsed = parse_args(argv[1:])\n\n # TODO: remove `action=\"extend\"` to match exactly what dbt does\n selects_count = (\n argv.count(\"-s\")\n + argv.count(\"--select\")\n + argv.count(\"-m\")\n + argv.count(\"--model\")\n )\n exclude_count = argv.count(\"--exclude\")\n script_count = argv.count(\"--script\")\n\n # Disabling the dbt.logger.DelayedFileHandler manually\n # since we do not use the new dbt logging system\n # This fixes issue https://github.com/fal-ai/fal/issues/97\n log_manager.set_path(None)\n if parsed.disable_logging:\n logger.disable()\n # Re-enable logging for 1.0.0 through old API of logger\n elif DBT_VCURRENT.compare(DBT_V1) >= 0:\n if logger.disabled:\n logger.enable()\n\n with log_manager.applicationbound():\n if parsed.debug:\n log_manager.set_debug()\n\n if parsed.command == \"flow\":\n if parsed.flow_command == \"run\":\n fal_flow_run(parsed)\n\n elif parsed.command == \"run\":\n fal_run(\n parsed,\n selects_count=selects_count,\n exclude_count=exclude_count,\n script_count=script_count,\n )\n", "path": "src/fal/cli/cli.py"}]}
999
155
gh_patches_debug_23183
rasdani/github-patches
git_diff
facebookresearch__ParlAI-3067
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- 'PathManagerBase' object has no attribute 'makedirs' In attempting to create the tensorboard directory with PathManager we're calling a nonexistent function. To repro: ```bash $ python -m parlai.scripts.train_model -t personachat -m transformer/ranker -mf /tmp/model_tr6 --n-layers 1 --embedding-size 300 --ffn-size 600 --n-heads 4 --num-epochs 2 -veps 0.25 -bs 64 -lr 0.001 --dropout 0.1 --embedding-type fasttext_cc --candidates batch --tensorboard-log true ``` Exception hit: ``` File "/Users/spoff/ParlAI/parlai/core/logs.py", line 56, in __init__ PathManager.makedirs(tbpath) AttributeError: 'PathManagerBase' object has no attribute 'makedirs' ``` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `setup.py` Content: ``` 1 #!/usr/bin/env python3 2 3 # Copyright (c) Facebook, Inc. and its affiliates. 4 # This source code is licensed under the MIT license found in the 5 # LICENSE file in the root directory of this source tree. 6 7 8 import sys 9 10 from setuptools import setup, find_packages 11 12 VERSION = '0.9.1' # if you update, update parlai/__init__.py too! 13 14 if sys.version_info < (3, 6): 15 sys.exit('Sorry, Python >=3.6 is required for ParlAI.') 16 17 with open('README.md', encoding="utf8") as f: 18 # strip the header and badges etc 19 readme = f.read().split('--------------------')[-1] 20 21 with open('requirements.txt') as f: 22 reqs = [] 23 for line in f: 24 line = line.strip() 25 reqs.append(line.split('==')[0]) 26 27 28 if __name__ == '__main__': 29 setup( 30 name='parlai', 31 version=VERSION, 32 description='Unified platform for dialogue research.', 33 long_description=readme, 34 long_description_content_type='text/markdown', 35 url='http://parl.ai/', 36 python_requires='>=3.6', 37 packages=find_packages( 38 exclude=('data', 'docs', 'examples', 'tests', 'parlai_internal*') 39 ), 40 install_requires=reqs, 41 include_package_data=True, 42 package_data={'': ['*.txt', '*.md']}, 43 entry_points={ 44 "flake8.extension": ["PAI = parlai.utils.flake8:ParlAIChecker"], 45 "console_scripts": ["parlai=parlai.__main__:main"], 46 }, 47 classifiers=[ 48 "Programming Language :: Python :: 3", 49 "License :: OSI Approved :: MIT License", 50 "Topic :: Scientific/Engineering :: Artificial Intelligence", 51 "Natural Language :: English", 52 ], 53 ) 54 ``` Path: `parlai/core/logs.py` Content: ``` 1 #!/usr/bin/env python3 2 3 # Copyright (c) Facebook, Inc. and its affiliates. 4 # This source code is licensed under the MIT license found in the 5 # LICENSE file in the root directory of this source tree. 6 """ 7 Log metrics to tensorboard. 8 9 This file provides interface to log any metrics in tensorboard, could be 10 extended to any other tool like visdom. 11 12 .. code-block: none 13 14 tensorboard --logdir <PARLAI_DATA/tensorboard> --port 8888. 15 """ 16 17 import json 18 import numbers 19 from parlai.core.opt import Opt 20 from parlai.core.metrics import Metric 21 from parlai.utils.io import PathManager 22 import parlai.utils.logging as logging 23 24 25 class TensorboardLogger(object): 26 """ 27 Log objects to tensorboard. 28 """ 29 30 @staticmethod 31 def add_cmdline_args(argparser): 32 """ 33 Add tensorboard CLI args. 34 """ 35 logger = argparser.add_argument_group('Tensorboard Arguments') 36 logger.add_argument( 37 '-tblog', 38 '--tensorboard-log', 39 type='bool', 40 default=False, 41 help="Tensorboard logging of metrics, default is %(default)s", 42 hidden=False, 43 ) 44 45 def __init__(self, opt: Opt): 46 try: 47 # tensorboard is a very expensive thing to import. Wait until the 48 # last second to import it. 49 from tensorboardX import SummaryWriter 50 except ImportError: 51 raise ImportError('Please run `pip install tensorboard tensorboardX`.') 52 53 tbpath = opt['model_file'] + '.tensorboard' 54 logging.debug(f'Saving tensorboard logs to: {tbpath}') 55 if not PathManager.exists(tbpath): 56 PathManager.makedirs(tbpath) 57 self.writer = SummaryWriter(tbpath, comment=json.dumps(opt)) 58 59 def log_metrics(self, setting, step, report): 60 """ 61 Add all metrics from tensorboard_metrics opt key. 62 63 :param setting: 64 One of train/valid/test. Will be used as the title for the graph. 65 :param step: 66 Number of parleys 67 :param report: 68 The report to log 69 """ 70 for k, v in report.items(): 71 if isinstance(v, numbers.Number): 72 self.writer.add_scalar(f'{k}/{setting}', v, global_step=step) 73 elif isinstance(v, Metric): 74 self.writer.add_scalar(f'{k}/{setting}', v.value(), global_step=step) 75 else: 76 logging.error(f'k {k} v {v} is not a number') 77 78 def flush(self): 79 self.writer.flush() 80 ``` Path: `parlai/__init__.py` Content: ``` 1 #!/usr/bin/env python3 2 3 # Copyright (c) Facebook, Inc. and its affiliates. 4 # This source code is licensed under the MIT license found in the 5 # LICENSE file in the root directory of this source tree. 6 7 __version__ = '0.9.1' 8 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/parlai/__init__.py b/parlai/__init__.py --- a/parlai/__init__.py +++ b/parlai/__init__.py @@ -4,4 +4,4 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -__version__ = '0.9.1' +__version__ = '0.9.2' diff --git a/parlai/core/logs.py b/parlai/core/logs.py --- a/parlai/core/logs.py +++ b/parlai/core/logs.py @@ -53,7 +53,7 @@ tbpath = opt['model_file'] + '.tensorboard' logging.debug(f'Saving tensorboard logs to: {tbpath}') if not PathManager.exists(tbpath): - PathManager.makedirs(tbpath) + PathManager.mkdirs(tbpath) self.writer = SummaryWriter(tbpath, comment=json.dumps(opt)) def log_metrics(self, setting, step, report): diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ from setuptools import setup, find_packages -VERSION = '0.9.1' # if you update, update parlai/__init__.py too! +VERSION = '0.9.2' # if you update, update parlai/__init__.py too! if sys.version_info < (3, 6): sys.exit('Sorry, Python >=3.6 is required for ParlAI.')
{"golden_diff": "diff --git a/parlai/__init__.py b/parlai/__init__.py\n--- a/parlai/__init__.py\n+++ b/parlai/__init__.py\n@@ -4,4 +4,4 @@\n # This source code is licensed under the MIT license found in the\n # LICENSE file in the root directory of this source tree.\n \n-__version__ = '0.9.1'\n+__version__ = '0.9.2'\ndiff --git a/parlai/core/logs.py b/parlai/core/logs.py\n--- a/parlai/core/logs.py\n+++ b/parlai/core/logs.py\n@@ -53,7 +53,7 @@\n tbpath = opt['model_file'] + '.tensorboard'\n logging.debug(f'Saving tensorboard logs to: {tbpath}')\n if not PathManager.exists(tbpath):\n- PathManager.makedirs(tbpath)\n+ PathManager.mkdirs(tbpath)\n self.writer = SummaryWriter(tbpath, comment=json.dumps(opt))\n \n def log_metrics(self, setting, step, report):\ndiff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -9,7 +9,7 @@\n \n from setuptools import setup, find_packages\n \n-VERSION = '0.9.1' # if you update, update parlai/__init__.py too!\n+VERSION = '0.9.2' # if you update, update parlai/__init__.py too!\n \n if sys.version_info < (3, 6):\n sys.exit('Sorry, Python >=3.6 is required for ParlAI.')\n", "issue": "'PathManagerBase' object has no attribute 'makedirs'\nIn attempting to create the tensorboard directory with PathManager we're calling a nonexistent function.\r\n\r\nTo repro:\r\n```bash\r\n$ python -m parlai.scripts.train_model -t personachat -m transformer/ranker -mf /tmp/model_tr6 --n-layers 1 --embedding-size 300 --ffn-size 600 --n-heads 4 --num-epochs 2 -veps 0.25 -bs 64 -lr 0.001 --dropout 0.1 --embedding-type fasttext_cc --candidates batch --tensorboard-log true\r\n```\r\n\r\nException hit:\r\n```\r\nFile \"/Users/spoff/ParlAI/parlai/core/logs.py\", line 56, in __init__\r\n PathManager.makedirs(tbpath)\r\nAttributeError: 'PathManagerBase' object has no attribute 'makedirs'\r\n```\n", "before_files": [{"content": "#!/usr/bin/env python3\n\n# Copyright (c) Facebook, Inc. and its affiliates.\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\n\nimport sys\n\nfrom setuptools import setup, find_packages\n\nVERSION = '0.9.1' # if you update, update parlai/__init__.py too!\n\nif sys.version_info < (3, 6):\n sys.exit('Sorry, Python >=3.6 is required for ParlAI.')\n\nwith open('README.md', encoding=\"utf8\") as f:\n # strip the header and badges etc\n readme = f.read().split('--------------------')[-1]\n\nwith open('requirements.txt') as f:\n reqs = []\n for line in f:\n line = line.strip()\n reqs.append(line.split('==')[0])\n\n\nif __name__ == '__main__':\n setup(\n name='parlai',\n version=VERSION,\n description='Unified platform for dialogue research.',\n long_description=readme,\n long_description_content_type='text/markdown',\n url='http://parl.ai/',\n python_requires='>=3.6',\n packages=find_packages(\n exclude=('data', 'docs', 'examples', 'tests', 'parlai_internal*')\n ),\n install_requires=reqs,\n include_package_data=True,\n package_data={'': ['*.txt', '*.md']},\n entry_points={\n \"flake8.extension\": [\"PAI = parlai.utils.flake8:ParlAIChecker\"],\n \"console_scripts\": [\"parlai=parlai.__main__:main\"],\n },\n classifiers=[\n \"Programming Language :: Python :: 3\",\n \"License :: OSI Approved :: MIT License\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n \"Natural Language :: English\",\n ],\n )\n", "path": "setup.py"}, {"content": "#!/usr/bin/env python3\n\n# Copyright (c) Facebook, Inc. and its affiliates.\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\"\"\"\nLog metrics to tensorboard.\n\nThis file provides interface to log any metrics in tensorboard, could be\nextended to any other tool like visdom.\n\n.. code-block: none\n\n tensorboard --logdir <PARLAI_DATA/tensorboard> --port 8888.\n\"\"\"\n\nimport json\nimport numbers\nfrom parlai.core.opt import Opt\nfrom parlai.core.metrics import Metric\nfrom parlai.utils.io import PathManager\nimport parlai.utils.logging as logging\n\n\nclass TensorboardLogger(object):\n \"\"\"\n Log objects to tensorboard.\n \"\"\"\n\n @staticmethod\n def add_cmdline_args(argparser):\n \"\"\"\n Add tensorboard CLI args.\n \"\"\"\n logger = argparser.add_argument_group('Tensorboard Arguments')\n logger.add_argument(\n '-tblog',\n '--tensorboard-log',\n type='bool',\n default=False,\n help=\"Tensorboard logging of metrics, default is %(default)s\",\n hidden=False,\n )\n\n def __init__(self, opt: Opt):\n try:\n # tensorboard is a very expensive thing to import. Wait until the\n # last second to import it.\n from tensorboardX import SummaryWriter\n except ImportError:\n raise ImportError('Please run `pip install tensorboard tensorboardX`.')\n\n tbpath = opt['model_file'] + '.tensorboard'\n logging.debug(f'Saving tensorboard logs to: {tbpath}')\n if not PathManager.exists(tbpath):\n PathManager.makedirs(tbpath)\n self.writer = SummaryWriter(tbpath, comment=json.dumps(opt))\n\n def log_metrics(self, setting, step, report):\n \"\"\"\n Add all metrics from tensorboard_metrics opt key.\n\n :param setting:\n One of train/valid/test. Will be used as the title for the graph.\n :param step:\n Number of parleys\n :param report:\n The report to log\n \"\"\"\n for k, v in report.items():\n if isinstance(v, numbers.Number):\n self.writer.add_scalar(f'{k}/{setting}', v, global_step=step)\n elif isinstance(v, Metric):\n self.writer.add_scalar(f'{k}/{setting}', v.value(), global_step=step)\n else:\n logging.error(f'k {k} v {v} is not a number')\n\n def flush(self):\n self.writer.flush()\n", "path": "parlai/core/logs.py"}, {"content": "#!/usr/bin/env python3\n\n# Copyright (c) Facebook, Inc. and its affiliates.\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\n__version__ = '0.9.1'\n", "path": "parlai/__init__.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\n# Copyright (c) Facebook, Inc. and its affiliates.\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\n\nimport sys\n\nfrom setuptools import setup, find_packages\n\nVERSION = '0.9.2' # if you update, update parlai/__init__.py too!\n\nif sys.version_info < (3, 6):\n sys.exit('Sorry, Python >=3.6 is required for ParlAI.')\n\nwith open('README.md', encoding=\"utf8\") as f:\n # strip the header and badges etc\n readme = f.read().split('--------------------')[-1]\n\nwith open('requirements.txt') as f:\n reqs = []\n for line in f:\n line = line.strip()\n reqs.append(line.split('==')[0])\n\n\nif __name__ == '__main__':\n setup(\n name='parlai',\n version=VERSION,\n description='Unified platform for dialogue research.',\n long_description=readme,\n long_description_content_type='text/markdown',\n url='http://parl.ai/',\n python_requires='>=3.6',\n packages=find_packages(\n exclude=('data', 'docs', 'examples', 'tests', 'parlai_internal*')\n ),\n install_requires=reqs,\n include_package_data=True,\n package_data={'': ['*.txt', '*.md']},\n entry_points={\n \"flake8.extension\": [\"PAI = parlai.utils.flake8:ParlAIChecker\"],\n \"console_scripts\": [\"parlai=parlai.__main__:main\"],\n },\n classifiers=[\n \"Programming Language :: Python :: 3\",\n \"License :: OSI Approved :: MIT License\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n \"Natural Language :: English\",\n ],\n )\n", "path": "setup.py"}, {"content": "#!/usr/bin/env python3\n\n# Copyright (c) Facebook, Inc. and its affiliates.\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\"\"\"\nLog metrics to tensorboard.\n\nThis file provides interface to log any metrics in tensorboard, could be\nextended to any other tool like visdom.\n\n.. code-block: none\n\n tensorboard --logdir <PARLAI_DATA/tensorboard> --port 8888.\n\"\"\"\n\nimport json\nimport numbers\nfrom parlai.core.opt import Opt\nfrom parlai.core.metrics import Metric\nfrom parlai.utils.io import PathManager\nimport parlai.utils.logging as logging\n\n\nclass TensorboardLogger(object):\n \"\"\"\n Log objects to tensorboard.\n \"\"\"\n\n @staticmethod\n def add_cmdline_args(argparser):\n \"\"\"\n Add tensorboard CLI args.\n \"\"\"\n logger = argparser.add_argument_group('Tensorboard Arguments')\n logger.add_argument(\n '-tblog',\n '--tensorboard-log',\n type='bool',\n default=False,\n help=\"Tensorboard logging of metrics, default is %(default)s\",\n hidden=False,\n )\n\n def __init__(self, opt: Opt):\n try:\n # tensorboard is a very expensive thing to import. Wait until the\n # last second to import it.\n from tensorboardX import SummaryWriter\n except ImportError:\n raise ImportError('Please run `pip install tensorboard tensorboardX`.')\n\n tbpath = opt['model_file'] + '.tensorboard'\n logging.debug(f'Saving tensorboard logs to: {tbpath}')\n if not PathManager.exists(tbpath):\n PathManager.mkdirs(tbpath)\n self.writer = SummaryWriter(tbpath, comment=json.dumps(opt))\n\n def log_metrics(self, setting, step, report):\n \"\"\"\n Add all metrics from tensorboard_metrics opt key.\n\n :param setting:\n One of train/valid/test. Will be used as the title for the graph.\n :param step:\n Number of parleys\n :param report:\n The report to log\n \"\"\"\n for k, v in report.items():\n if isinstance(v, numbers.Number):\n self.writer.add_scalar(f'{k}/{setting}', v, global_step=step)\n elif isinstance(v, Metric):\n self.writer.add_scalar(f'{k}/{setting}', v.value(), global_step=step)\n else:\n logging.error(f'k {k} v {v} is not a number')\n\n def flush(self):\n self.writer.flush()\n", "path": "parlai/core/logs.py"}, {"content": "#!/usr/bin/env python3\n\n# Copyright (c) Facebook, Inc. and its affiliates.\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\n__version__ = '0.9.2'\n", "path": "parlai/__init__.py"}]}
1,759
356
gh_patches_debug_17281
rasdani/github-patches
git_diff
kivy__kivy-3303
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Can't create package for windows with kivy 1.9 portable I'm looking to port an existing kivy 1.8 project to kivy 1.9. I've just downloaded the portable version and have the application working. However when packaging the app using pyinstaller and the instructions on http://kivy.org/docs/guide/packaging-windows.html the app packages, but on execution immediately fails with error: ``` Traceback (most recent call last): File "<string>", line 34, in <module> ImportError: No module named pygame.pkgdata ``` I've tried using my old .spec file and generating a new one with exactly the same results. I'm a bit mystified where this is coming from as pygame isn't imported anywhere in my application and I thought it had been replaced with sdl2 in kivy 1.9. I'm also confused that the application works when run directly. Anyone come across this issue or can point me in the right direction? --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `kivy/tools/packaging/pyinstaller_hooks/rt-hook-kivy.py` Content: ``` 1 from os.path import join, dirname 2 from os import environ, chdir, putenv 3 import sys 4 5 root = 'kivy_install' 6 if hasattr(sys, '_MEIPASS'): 7 # PyInstaller >= 1.6 8 chdir(sys._MEIPASS) 9 root = join(sys._MEIPASS, root) 10 elif '_MEIPASS2' in environ: 11 # PyInstaller < 1.6 (tested on 1.5 only) 12 chdir(environ['_MEIPASS2']) 13 root = join(environ['_MEIPASS2'], root) 14 else: 15 chdir(dirname(sys.argv[0])) 16 root = join(dirname(sys.argv[0]), root) 17 18 19 sys.path += [join(root, '_libs')] 20 21 if sys.platform == 'darwin': 22 sitepackages = join(root, '..', 'sitepackages') 23 sys.path += [sitepackages, join(sitepackages, 'gst-0.10')] 24 putenv('GST_REGISTRY_FORK', 'no') 25 26 environ['GST_PLUGIN_PATH'] = join(root, '..', 'gst-plugins') 27 environ['KIVY_DATA_DIR'] = join(root, 'data') 28 environ['KIVY_EXTS_DIR'] = join(root, 'extensions') 29 environ['KIVY_MODULES_DIR'] = join(root, 'modules') 30 environ['KIVY_EMBED'] = '1' 31 32 # Monkey-patch pygame to get around an issue with Pygame window icon and 33 # PyInstaller 2.1. See kivy issue #1638 34 # Uncomment the following to package pygame 35 #import pygame.pkgdata 36 #_original_getResource = pygame.pkgdata.getResource 37 # 38 # 39 #def getResource(identifier, *args, **kwargs): 40 # if identifier == 'pygame_icon.tiff': 41 # raise IOError() 42 # return _original_getResource(identifier, *args, **kwargs) 43 #pygame.pkgdata.getResource = getResource 44 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/kivy/tools/packaging/pyinstaller_hooks/rt-hook-kivy.py b/kivy/tools/packaging/pyinstaller_hooks/rt-hook-kivy.py --- a/kivy/tools/packaging/pyinstaller_hooks/rt-hook-kivy.py +++ b/kivy/tools/packaging/pyinstaller_hooks/rt-hook-kivy.py @@ -29,15 +29,17 @@ environ['KIVY_MODULES_DIR'] = join(root, 'modules') environ['KIVY_EMBED'] = '1' + # Monkey-patch pygame to get around an issue with Pygame window icon and # PyInstaller 2.1. See kivy issue #1638 -# Uncomment the following to package pygame -#import pygame.pkgdata -#_original_getResource = pygame.pkgdata.getResource -# -# -#def getResource(identifier, *args, **kwargs): -# if identifier == 'pygame_icon.tiff': -# raise IOError() -# return _original_getResource(identifier, *args, **kwargs) -#pygame.pkgdata.getResource = getResource +def getResource(identifier, *args, **kwargs): + if identifier == 'pygame_icon.tiff': + raise IOError() + return _original_getResource(identifier, *args, **kwargs) + +try: + import pygame.pkgdata + _original_getResource = pygame.pkgdata.getResource + pygame.pkgdata.getResource = getResource +except ImportError: + pass
{"golden_diff": "diff --git a/kivy/tools/packaging/pyinstaller_hooks/rt-hook-kivy.py b/kivy/tools/packaging/pyinstaller_hooks/rt-hook-kivy.py\n--- a/kivy/tools/packaging/pyinstaller_hooks/rt-hook-kivy.py\n+++ b/kivy/tools/packaging/pyinstaller_hooks/rt-hook-kivy.py\n@@ -29,15 +29,17 @@\n environ['KIVY_MODULES_DIR'] = join(root, 'modules')\n environ['KIVY_EMBED'] = '1'\n \n+\n # Monkey-patch pygame to get around an issue with Pygame window icon and\n # PyInstaller 2.1. See kivy issue #1638\n-# Uncomment the following to package pygame\n-#import pygame.pkgdata\n-#_original_getResource = pygame.pkgdata.getResource\n-#\n-#\n-#def getResource(identifier, *args, **kwargs):\n-# if identifier == 'pygame_icon.tiff':\n-# raise IOError()\n-# return _original_getResource(identifier, *args, **kwargs)\n-#pygame.pkgdata.getResource = getResource\n+def getResource(identifier, *args, **kwargs):\n+ if identifier == 'pygame_icon.tiff':\n+ raise IOError()\n+ return _original_getResource(identifier, *args, **kwargs)\n+\n+try:\n+ import pygame.pkgdata\n+ _original_getResource = pygame.pkgdata.getResource\n+ pygame.pkgdata.getResource = getResource\n+except ImportError:\n+ pass\n", "issue": "Can't create package for windows with kivy 1.9 portable\nI'm looking to port an existing kivy 1.8 project to kivy 1.9. I've just downloaded the portable version and have the application working.\n\nHowever when packaging the app using pyinstaller and the instructions on http://kivy.org/docs/guide/packaging-windows.html the app packages, but on execution immediately fails with error:\n\n```\nTraceback (most recent call last):\n File \"<string>\", line 34, in <module>\nImportError: No module named pygame.pkgdata\n```\n\nI've tried using my old .spec file and generating a new one with exactly the same results.\n\nI'm a bit mystified where this is coming from as pygame isn't imported anywhere in my application and I thought it had been replaced with sdl2 in kivy 1.9. I'm also confused that the application works when run directly.\n\nAnyone come across this issue or can point me in the right direction?\n\n", "before_files": [{"content": "from os.path import join, dirname\nfrom os import environ, chdir, putenv\nimport sys\n\nroot = 'kivy_install'\nif hasattr(sys, '_MEIPASS'):\n # PyInstaller >= 1.6\n chdir(sys._MEIPASS)\n root = join(sys._MEIPASS, root)\nelif '_MEIPASS2' in environ:\n # PyInstaller < 1.6 (tested on 1.5 only)\n chdir(environ['_MEIPASS2'])\n root = join(environ['_MEIPASS2'], root)\nelse:\n chdir(dirname(sys.argv[0]))\n root = join(dirname(sys.argv[0]), root)\n\n\nsys.path += [join(root, '_libs')]\n\nif sys.platform == 'darwin':\n sitepackages = join(root, '..', 'sitepackages')\n sys.path += [sitepackages, join(sitepackages, 'gst-0.10')]\n putenv('GST_REGISTRY_FORK', 'no')\n\nenviron['GST_PLUGIN_PATH'] = join(root, '..', 'gst-plugins')\nenviron['KIVY_DATA_DIR'] = join(root, 'data')\nenviron['KIVY_EXTS_DIR'] = join(root, 'extensions')\nenviron['KIVY_MODULES_DIR'] = join(root, 'modules')\nenviron['KIVY_EMBED'] = '1'\n\n# Monkey-patch pygame to get around an issue with Pygame window icon and\n# PyInstaller 2.1. See kivy issue #1638\n# Uncomment the following to package pygame\n#import pygame.pkgdata\n#_original_getResource = pygame.pkgdata.getResource\n#\n#\n#def getResource(identifier, *args, **kwargs):\n# if identifier == 'pygame_icon.tiff':\n# raise IOError()\n# return _original_getResource(identifier, *args, **kwargs)\n#pygame.pkgdata.getResource = getResource\n", "path": "kivy/tools/packaging/pyinstaller_hooks/rt-hook-kivy.py"}], "after_files": [{"content": "from os.path import join, dirname\nfrom os import environ, chdir, putenv\nimport sys\n\nroot = 'kivy_install'\nif hasattr(sys, '_MEIPASS'):\n # PyInstaller >= 1.6\n chdir(sys._MEIPASS)\n root = join(sys._MEIPASS, root)\nelif '_MEIPASS2' in environ:\n # PyInstaller < 1.6 (tested on 1.5 only)\n chdir(environ['_MEIPASS2'])\n root = join(environ['_MEIPASS2'], root)\nelse:\n chdir(dirname(sys.argv[0]))\n root = join(dirname(sys.argv[0]), root)\n\n\nsys.path += [join(root, '_libs')]\n\nif sys.platform == 'darwin':\n sitepackages = join(root, '..', 'sitepackages')\n sys.path += [sitepackages, join(sitepackages, 'gst-0.10')]\n putenv('GST_REGISTRY_FORK', 'no')\n\nenviron['GST_PLUGIN_PATH'] = join(root, '..', 'gst-plugins')\nenviron['KIVY_DATA_DIR'] = join(root, 'data')\nenviron['KIVY_EXTS_DIR'] = join(root, 'extensions')\nenviron['KIVY_MODULES_DIR'] = join(root, 'modules')\nenviron['KIVY_EMBED'] = '1'\n\n\n# Monkey-patch pygame to get around an issue with Pygame window icon and\n# PyInstaller 2.1. See kivy issue #1638\ndef getResource(identifier, *args, **kwargs):\n if identifier == 'pygame_icon.tiff':\n raise IOError()\n return _original_getResource(identifier, *args, **kwargs)\n\ntry:\n import pygame.pkgdata\n _original_getResource = pygame.pkgdata.getResource\n pygame.pkgdata.getResource = getResource\nexcept ImportError:\n pass\n", "path": "kivy/tools/packaging/pyinstaller_hooks/rt-hook-kivy.py"}]}
952
319
gh_patches_debug_35684
rasdani/github-patches
git_diff
ManageIQ__integration_tests-296
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Better YAML overriding Now it does not take just the root element into the account, but it crawls throught the dictionary and only updates the values that are present in the new dictionary. It converts all dicts to Configs, other values than specified in override dict are not touched. It also improves the `__getattribute__` behaviour - now it propagates the interface to the child nodes by converting all `dict` to `Config` before returning the value, so the dot operator can be used everywhere. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `utils/conf_loader.py` Content: ``` 1 import os 2 from collections import OrderedDict 3 4 import py.path 5 import yaml 6 from yaml.loader import Loader 7 8 9 class OrderedYamlLoader(Loader): 10 def construct_yaml_map(self, node): 11 data = OrderedDict() 12 yield data 13 value = self.construct_mapping(node) 14 data.update(value) 15 16 17 class ConfigNotFoundException(Exception): 18 pass 19 20 21 class Config(dict): 22 """A dict subclass with knowledge of conf yamls and how to load them 23 24 Also supports descriptor access, e.g. conf.configfile 25 (compared to the normal dict access, conf['configfile']) 26 """ 27 # Stash the exception on the class for convenience, e.g. 28 # try: 29 # conf[does_not_exist] 30 # except conf.NotFoundException 31 # ... 32 NotFoundException = ConfigNotFoundException 33 34 # Support for descriptor access, e.g. instance.attrname 35 # Note that this is only on the get side, for support of nefarious things 36 # like setting and deleting, use the normal dict interface. 37 def __getattribute__(self, attr): 38 # Attempt normal object attr lookup; delegate to the dict interface if that fails 39 try: 40 return super(Config, self).__getattribute__(attr) 41 except AttributeError: 42 return self[attr] 43 44 def __getitem__(self, key): 45 # Attempt a normal dict lookup to pull a cached conf 46 try: 47 return super(Config, self).__getitem__(key) 48 except KeyError: 49 # Cache miss, load the requested yaml 50 yaml_dict = load_yaml(key) 51 52 # Graft in local yaml updates if they're available 53 try: 54 local_yaml = '%s.local' % key 55 local_yaml_dict = load_yaml(local_yaml) 56 yaml_dict.update(local_yaml_dict) 57 except ConfigNotFoundException: 58 pass 59 60 # Returning self[key] instead of yaml_dict as a small sanity check 61 self[key] = yaml_dict 62 return self[key] 63 64 65 def load_yaml(filename=None): 66 # Find the requested yaml in the config dir, relative to this file's location 67 # (aiming for cfme_tests/config) 68 this_file = os.path.abspath(__file__) 69 path = py.path.local(this_file).new(basename='../conf/%s.yaml' % filename) 70 71 if path.check(): 72 with path.open() as config_fh: 73 return yaml.load(config_fh, Loader=OrderedYamlLoader) 74 else: 75 msg = 'Unable to load configuration file at %s' % path 76 raise ConfigNotFoundException(msg) 77 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/utils/conf_loader.py b/utils/conf_loader.py --- a/utils/conf_loader.py +++ b/utils/conf_loader.py @@ -1,17 +1,19 @@ import os -from collections import OrderedDict import py.path import yaml from yaml.loader import Loader -class OrderedYamlLoader(Loader): +class YamlConfigLoader(Loader): + # Override the root yaml node to be a RecursiveUpdateDict def construct_yaml_map(self, node): - data = OrderedDict() + data = RecursiveUpdateDict() yield data value = self.construct_mapping(node) data.update(value) +# Do the same for child nodes of the yaml mapping type +YamlConfigLoader.add_constructor('tag:yaml.org,2002:map', YamlConfigLoader.construct_yaml_map) class ConfigNotFoundException(Exception): @@ -62,6 +64,43 @@ return self[key] +class RecursiveUpdateDict(dict): + def update(self, new_data): + """ More intelligent dictionary update. + + This method changes just data that have been changed. How does it work? + Imagine you want to change just VM name, other things should stay the same. + + Original config: + something: + somewhere: + VM: + a: 1 + b: 2 + name: qwer + c: 3 + + Instead of copying the whole part from original to the override with just 'name' changed, + you will write this: + + something: + somewhere: + VM: + name: tzui + + This digging deeper affects only dictionary values. Lists are unaffected! And so do other + types. + + Args: + new_data: Update data. + """ + for key, value in new_data.iteritems(): + if isinstance(value, type(self)) and key in self: + type(self).update(self[key], value) + else: + self[key] = new_data[key] + + def load_yaml(filename=None): # Find the requested yaml in the config dir, relative to this file's location # (aiming for cfme_tests/config) @@ -70,7 +109,7 @@ if path.check(): with path.open() as config_fh: - return yaml.load(config_fh, Loader=OrderedYamlLoader) + return yaml.load(config_fh, Loader=YamlConfigLoader) else: msg = 'Unable to load configuration file at %s' % path raise ConfigNotFoundException(msg)
{"golden_diff": "diff --git a/utils/conf_loader.py b/utils/conf_loader.py\n--- a/utils/conf_loader.py\n+++ b/utils/conf_loader.py\n@@ -1,17 +1,19 @@\n import os\n-from collections import OrderedDict\n \n import py.path\n import yaml\n from yaml.loader import Loader\n \n \n-class OrderedYamlLoader(Loader):\n+class YamlConfigLoader(Loader):\n+ # Override the root yaml node to be a RecursiveUpdateDict\n def construct_yaml_map(self, node):\n- data = OrderedDict()\n+ data = RecursiveUpdateDict()\n yield data\n value = self.construct_mapping(node)\n data.update(value)\n+# Do the same for child nodes of the yaml mapping type\n+YamlConfigLoader.add_constructor('tag:yaml.org,2002:map', YamlConfigLoader.construct_yaml_map)\n \n \n class ConfigNotFoundException(Exception):\n@@ -62,6 +64,43 @@\n return self[key]\n \n \n+class RecursiveUpdateDict(dict):\n+ def update(self, new_data):\n+ \"\"\" More intelligent dictionary update.\n+\n+ This method changes just data that have been changed. How does it work?\n+ Imagine you want to change just VM name, other things should stay the same.\n+\n+ Original config:\n+ something:\n+ somewhere:\n+ VM:\n+ a: 1\n+ b: 2\n+ name: qwer\n+ c: 3\n+\n+ Instead of copying the whole part from original to the override with just 'name' changed,\n+ you will write this:\n+\n+ something:\n+ somewhere:\n+ VM:\n+ name: tzui\n+\n+ This digging deeper affects only dictionary values. Lists are unaffected! And so do other\n+ types.\n+\n+ Args:\n+ new_data: Update data.\n+ \"\"\"\n+ for key, value in new_data.iteritems():\n+ if isinstance(value, type(self)) and key in self:\n+ type(self).update(self[key], value)\n+ else:\n+ self[key] = new_data[key]\n+\n+\n def load_yaml(filename=None):\n # Find the requested yaml in the config dir, relative to this file's location\n # (aiming for cfme_tests/config)\n@@ -70,7 +109,7 @@\n \n if path.check():\n with path.open() as config_fh:\n- return yaml.load(config_fh, Loader=OrderedYamlLoader)\n+ return yaml.load(config_fh, Loader=YamlConfigLoader)\n else:\n msg = 'Unable to load configuration file at %s' % path\n raise ConfigNotFoundException(msg)\n", "issue": "Better YAML overriding\nNow it does not take just the root element into the account, but it crawls throught the dictionary and only updates the values that are present in the new dictionary. It converts all dicts to Configs, other values than specified in override dict are not touched.\n\nIt also improves the `__getattribute__` behaviour - now it propagates the interface to the child nodes by converting all `dict` to `Config` before returning the value, so the dot operator can be used everywhere.\n\n", "before_files": [{"content": "import os\nfrom collections import OrderedDict\n\nimport py.path\nimport yaml\nfrom yaml.loader import Loader\n\n\nclass OrderedYamlLoader(Loader):\n def construct_yaml_map(self, node):\n data = OrderedDict()\n yield data\n value = self.construct_mapping(node)\n data.update(value)\n\n\nclass ConfigNotFoundException(Exception):\n pass\n\n\nclass Config(dict):\n \"\"\"A dict subclass with knowledge of conf yamls and how to load them\n\n Also supports descriptor access, e.g. conf.configfile\n (compared to the normal dict access, conf['configfile'])\n \"\"\"\n # Stash the exception on the class for convenience, e.g.\n # try:\n # conf[does_not_exist]\n # except conf.NotFoundException\n # ...\n NotFoundException = ConfigNotFoundException\n\n # Support for descriptor access, e.g. instance.attrname\n # Note that this is only on the get side, for support of nefarious things\n # like setting and deleting, use the normal dict interface.\n def __getattribute__(self, attr):\n # Attempt normal object attr lookup; delegate to the dict interface if that fails\n try:\n return super(Config, self).__getattribute__(attr)\n except AttributeError:\n return self[attr]\n\n def __getitem__(self, key):\n # Attempt a normal dict lookup to pull a cached conf\n try:\n return super(Config, self).__getitem__(key)\n except KeyError:\n # Cache miss, load the requested yaml\n yaml_dict = load_yaml(key)\n\n # Graft in local yaml updates if they're available\n try:\n local_yaml = '%s.local' % key\n local_yaml_dict = load_yaml(local_yaml)\n yaml_dict.update(local_yaml_dict)\n except ConfigNotFoundException:\n pass\n\n # Returning self[key] instead of yaml_dict as a small sanity check\n self[key] = yaml_dict\n return self[key]\n\n\ndef load_yaml(filename=None):\n # Find the requested yaml in the config dir, relative to this file's location\n # (aiming for cfme_tests/config)\n this_file = os.path.abspath(__file__)\n path = py.path.local(this_file).new(basename='../conf/%s.yaml' % filename)\n\n if path.check():\n with path.open() as config_fh:\n return yaml.load(config_fh, Loader=OrderedYamlLoader)\n else:\n msg = 'Unable to load configuration file at %s' % path\n raise ConfigNotFoundException(msg)\n", "path": "utils/conf_loader.py"}], "after_files": [{"content": "import os\n\nimport py.path\nimport yaml\nfrom yaml.loader import Loader\n\n\nclass YamlConfigLoader(Loader):\n # Override the root yaml node to be a RecursiveUpdateDict\n def construct_yaml_map(self, node):\n data = RecursiveUpdateDict()\n yield data\n value = self.construct_mapping(node)\n data.update(value)\n# Do the same for child nodes of the yaml mapping type\nYamlConfigLoader.add_constructor('tag:yaml.org,2002:map', YamlConfigLoader.construct_yaml_map)\n\n\nclass ConfigNotFoundException(Exception):\n pass\n\n\nclass Config(dict):\n \"\"\"A dict subclass with knowledge of conf yamls and how to load them\n\n Also supports descriptor access, e.g. conf.configfile\n (compared to the normal dict access, conf['configfile'])\n \"\"\"\n # Stash the exception on the class for convenience, e.g.\n # try:\n # conf[does_not_exist]\n # except conf.NotFoundException\n # ...\n NotFoundException = ConfigNotFoundException\n\n # Support for descriptor access, e.g. instance.attrname\n # Note that this is only on the get side, for support of nefarious things\n # like setting and deleting, use the normal dict interface.\n def __getattribute__(self, attr):\n # Attempt normal object attr lookup; delegate to the dict interface if that fails\n try:\n return super(Config, self).__getattribute__(attr)\n except AttributeError:\n return self[attr]\n\n def __getitem__(self, key):\n # Attempt a normal dict lookup to pull a cached conf\n try:\n return super(Config, self).__getitem__(key)\n except KeyError:\n # Cache miss, load the requested yaml\n yaml_dict = load_yaml(key)\n\n # Graft in local yaml updates if they're available\n try:\n local_yaml = '%s.local' % key\n local_yaml_dict = load_yaml(local_yaml)\n yaml_dict.update(local_yaml_dict)\n except ConfigNotFoundException:\n pass\n\n # Returning self[key] instead of yaml_dict as a small sanity check\n self[key] = yaml_dict\n return self[key]\n\n\nclass RecursiveUpdateDict(dict):\n def update(self, new_data):\n \"\"\" More intelligent dictionary update.\n\n This method changes just data that have been changed. How does it work?\n Imagine you want to change just VM name, other things should stay the same.\n\n Original config:\n something:\n somewhere:\n VM:\n a: 1\n b: 2\n name: qwer\n c: 3\n\n Instead of copying the whole part from original to the override with just 'name' changed,\n you will write this:\n\n something:\n somewhere:\n VM:\n name: tzui\n\n This digging deeper affects only dictionary values. Lists are unaffected! And so do other\n types.\n\n Args:\n new_data: Update data.\n \"\"\"\n for key, value in new_data.iteritems():\n if isinstance(value, type(self)) and key in self:\n type(self).update(self[key], value)\n else:\n self[key] = new_data[key]\n\n\ndef load_yaml(filename=None):\n # Find the requested yaml in the config dir, relative to this file's location\n # (aiming for cfme_tests/config)\n this_file = os.path.abspath(__file__)\n path = py.path.local(this_file).new(basename='../conf/%s.yaml' % filename)\n\n if path.check():\n with path.open() as config_fh:\n return yaml.load(config_fh, Loader=YamlConfigLoader)\n else:\n msg = 'Unable to load configuration file at %s' % path\n raise ConfigNotFoundException(msg)\n", "path": "utils/conf_loader.py"}]}
1,042
563
gh_patches_debug_20213
rasdani/github-patches
git_diff
ray-project__ray-1523
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- [rllib] [docs] Document multi-agent support We should document the new multi-agent support in rllib and have some examples in readthedocs. It would be good to cover the supported cases and which ones are not yet supported (or provide workarounds). --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `python/ray/rllib/examples/multiagent_pendulum_env.py` Content: ``` 1 from gym.spaces import Box, Tuple 2 from gym.utils import seeding 3 from gym.envs.classic_control.pendulum import PendulumEnv 4 import numpy as np 5 6 """ 7 Multiagent pendulum that sums its torques to generate an action 8 """ 9 10 11 class MultiAgentPendulumEnv(PendulumEnv): 12 metadata = { 13 'render.modes': ['human', 'rgb_array'], 14 'video.frames_per_second': 30 15 } 16 17 def __init__(self): 18 self.max_speed = 8 19 self.max_torque = 2. 20 self.dt = .05 21 self.viewer = None 22 23 high = np.array([1., 1., self.max_speed]) 24 self.action_space = [Box(low=-self.max_torque / 2, 25 high=self.max_torque / 2, shape=(1,)) 26 for _ in range(2)] 27 self.observation_space = Tuple(tuple(Box(low=-high, high=high) 28 for _ in range(2))) 29 30 self._seed() 31 32 def _seed(self, seed=None): 33 self.np_random, seed = seeding.np_random(seed) 34 return [seed] 35 36 def _step(self, u): 37 th, thdot = self.state # th := theta 38 39 summed_u = np.sum(u) 40 g = 10. 41 m = 1. 42 length = 1. 43 dt = self.dt 44 45 summed_u = np.clip(summed_u, -self.max_torque, self.max_torque) 46 self.last_u = summed_u # for rendering 47 costs = self.angle_normalize(th) ** 2 + .1 * thdot ** 2 + \ 48 .001 * (summed_u ** 2) 49 50 newthdot = thdot + (-3 * g / (2 * length) * np.sin(th + np.pi) + 51 3. / (m * length ** 2) * summed_u) * dt 52 newth = th + newthdot * dt 53 newthdot = np.clip(newthdot, -self.max_speed, self.max_speed) 54 55 self.state = np.array([newth, newthdot]) 56 return self._get_obs(), -costs, False, {} 57 58 def _reset(self): 59 high = np.array([np.pi, 1]) 60 self.state = self.np_random.uniform(low=-high, high=high) 61 self.last_u = None 62 return self._get_obs() 63 64 def _get_obs(self): 65 theta, thetadot = self.state 66 return [np.array([np.cos(theta), np.sin(theta), thetadot]) 67 for _ in range(2)] 68 69 def angle_normalize(self, x): 70 return (((x + np.pi) % (2 * np.pi)) - np.pi) 71 ``` Path: `python/ray/rllib/examples/multiagent_mountaincar_env.py` Content: ``` 1 import math 2 from gym.spaces import Box, Tuple, Discrete 3 import numpy as np 4 from gym.envs.classic_control.mountain_car import MountainCarEnv 5 6 """ 7 Multiagent mountain car that sums and then 8 averages its actions to produce the velocity 9 """ 10 11 12 class MultiAgentMountainCarEnv(MountainCarEnv): 13 def __init__(self): 14 self.min_position = -1.2 15 self.max_position = 0.6 16 self.max_speed = 0.07 17 self.goal_position = 0.5 18 19 self.low = np.array([self.min_position, -self.max_speed]) 20 self.high = np.array([self.max_position, self.max_speed]) 21 22 self.viewer = None 23 24 self.action_space = [Discrete(3) for _ in range(2)] 25 self.observation_space = Tuple(tuple(Box(self.low, self.high) 26 for _ in range(2))) 27 28 self._seed() 29 self.reset() 30 31 def _step(self, action): 32 summed_act = 0.5 * np.sum(action) 33 34 position, velocity = self.state 35 velocity += (summed_act - 1) * 0.001 36 velocity += math.cos(3 * position) * (-0.0025) 37 velocity = np.clip(velocity, -self.max_speed, self.max_speed) 38 position += velocity 39 position = np.clip(position, self.min_position, self.max_position) 40 if (position == self.min_position and velocity < 0): 41 velocity = 0 42 43 done = bool(position >= self.goal_position) 44 45 reward = position 46 47 self.state = (position, velocity) 48 return [np.array(self.state) for _ in range(2)], reward, done, {} 49 50 def _reset(self): 51 self.state = np.array([self.np_random.uniform(low=-0.6, high=-0.4), 0]) 52 return [np.array(self.state) for _ in range(2)] 53 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/python/ray/rllib/examples/multiagent_mountaincar_env.py b/python/ray/rllib/examples/multiagent_mountaincar_env.py --- a/python/ray/rllib/examples/multiagent_mountaincar_env.py +++ b/python/ray/rllib/examples/multiagent_mountaincar_env.py @@ -22,8 +22,8 @@ self.viewer = None self.action_space = [Discrete(3) for _ in range(2)] - self.observation_space = Tuple(tuple(Box(self.low, self.high) - for _ in range(2))) + self.observation_space = Tuple([ + Box(self.low, self.high) for _ in range(2)]) self._seed() self.reset() diff --git a/python/ray/rllib/examples/multiagent_pendulum_env.py b/python/ray/rllib/examples/multiagent_pendulum_env.py --- a/python/ray/rllib/examples/multiagent_pendulum_env.py +++ b/python/ray/rllib/examples/multiagent_pendulum_env.py @@ -24,8 +24,8 @@ self.action_space = [Box(low=-self.max_torque / 2, high=self.max_torque / 2, shape=(1,)) for _ in range(2)] - self.observation_space = Tuple(tuple(Box(low=-high, high=high) - for _ in range(2))) + self.observation_space = Tuple([ + Box(low=-high, high=high) for _ in range(2)]) self._seed()
{"golden_diff": "diff --git a/python/ray/rllib/examples/multiagent_mountaincar_env.py b/python/ray/rllib/examples/multiagent_mountaincar_env.py\n--- a/python/ray/rllib/examples/multiagent_mountaincar_env.py\n+++ b/python/ray/rllib/examples/multiagent_mountaincar_env.py\n@@ -22,8 +22,8 @@\n self.viewer = None\n \n self.action_space = [Discrete(3) for _ in range(2)]\n- self.observation_space = Tuple(tuple(Box(self.low, self.high)\n- for _ in range(2)))\n+ self.observation_space = Tuple([\n+ Box(self.low, self.high) for _ in range(2)])\n \n self._seed()\n self.reset()\ndiff --git a/python/ray/rllib/examples/multiagent_pendulum_env.py b/python/ray/rllib/examples/multiagent_pendulum_env.py\n--- a/python/ray/rllib/examples/multiagent_pendulum_env.py\n+++ b/python/ray/rllib/examples/multiagent_pendulum_env.py\n@@ -24,8 +24,8 @@\n self.action_space = [Box(low=-self.max_torque / 2,\n high=self.max_torque / 2, shape=(1,))\n for _ in range(2)]\n- self.observation_space = Tuple(tuple(Box(low=-high, high=high)\n- for _ in range(2)))\n+ self.observation_space = Tuple([\n+ Box(low=-high, high=high) for _ in range(2)])\n \n self._seed()\n", "issue": "[rllib] [docs] Document multi-agent support\nWe should document the new multi-agent support in rllib and have some examples in readthedocs. It would be good to cover the supported cases and which ones are not yet supported (or provide workarounds).\n", "before_files": [{"content": "from gym.spaces import Box, Tuple\nfrom gym.utils import seeding\nfrom gym.envs.classic_control.pendulum import PendulumEnv\nimport numpy as np\n\n\"\"\"\n Multiagent pendulum that sums its torques to generate an action\n\"\"\"\n\n\nclass MultiAgentPendulumEnv(PendulumEnv):\n metadata = {\n 'render.modes': ['human', 'rgb_array'],\n 'video.frames_per_second': 30\n }\n\n def __init__(self):\n self.max_speed = 8\n self.max_torque = 2.\n self.dt = .05\n self.viewer = None\n\n high = np.array([1., 1., self.max_speed])\n self.action_space = [Box(low=-self.max_torque / 2,\n high=self.max_torque / 2, shape=(1,))\n for _ in range(2)]\n self.observation_space = Tuple(tuple(Box(low=-high, high=high)\n for _ in range(2)))\n\n self._seed()\n\n def _seed(self, seed=None):\n self.np_random, seed = seeding.np_random(seed)\n return [seed]\n\n def _step(self, u):\n th, thdot = self.state # th := theta\n\n summed_u = np.sum(u)\n g = 10.\n m = 1.\n length = 1.\n dt = self.dt\n\n summed_u = np.clip(summed_u, -self.max_torque, self.max_torque)\n self.last_u = summed_u # for rendering\n costs = self.angle_normalize(th) ** 2 + .1 * thdot ** 2 + \\\n .001 * (summed_u ** 2)\n\n newthdot = thdot + (-3 * g / (2 * length) * np.sin(th + np.pi) +\n 3. / (m * length ** 2) * summed_u) * dt\n newth = th + newthdot * dt\n newthdot = np.clip(newthdot, -self.max_speed, self.max_speed)\n\n self.state = np.array([newth, newthdot])\n return self._get_obs(), -costs, False, {}\n\n def _reset(self):\n high = np.array([np.pi, 1])\n self.state = self.np_random.uniform(low=-high, high=high)\n self.last_u = None\n return self._get_obs()\n\n def _get_obs(self):\n theta, thetadot = self.state\n return [np.array([np.cos(theta), np.sin(theta), thetadot])\n for _ in range(2)]\n\n def angle_normalize(self, x):\n return (((x + np.pi) % (2 * np.pi)) - np.pi)\n", "path": "python/ray/rllib/examples/multiagent_pendulum_env.py"}, {"content": "import math\nfrom gym.spaces import Box, Tuple, Discrete\nimport numpy as np\nfrom gym.envs.classic_control.mountain_car import MountainCarEnv\n\n\"\"\"\nMultiagent mountain car that sums and then\naverages its actions to produce the velocity\n\"\"\"\n\n\nclass MultiAgentMountainCarEnv(MountainCarEnv):\n def __init__(self):\n self.min_position = -1.2\n self.max_position = 0.6\n self.max_speed = 0.07\n self.goal_position = 0.5\n\n self.low = np.array([self.min_position, -self.max_speed])\n self.high = np.array([self.max_position, self.max_speed])\n\n self.viewer = None\n\n self.action_space = [Discrete(3) for _ in range(2)]\n self.observation_space = Tuple(tuple(Box(self.low, self.high)\n for _ in range(2)))\n\n self._seed()\n self.reset()\n\n def _step(self, action):\n summed_act = 0.5 * np.sum(action)\n\n position, velocity = self.state\n velocity += (summed_act - 1) * 0.001\n velocity += math.cos(3 * position) * (-0.0025)\n velocity = np.clip(velocity, -self.max_speed, self.max_speed)\n position += velocity\n position = np.clip(position, self.min_position, self.max_position)\n if (position == self.min_position and velocity < 0):\n velocity = 0\n\n done = bool(position >= self.goal_position)\n\n reward = position\n\n self.state = (position, velocity)\n return [np.array(self.state) for _ in range(2)], reward, done, {}\n\n def _reset(self):\n self.state = np.array([self.np_random.uniform(low=-0.6, high=-0.4), 0])\n return [np.array(self.state) for _ in range(2)]\n", "path": "python/ray/rllib/examples/multiagent_mountaincar_env.py"}], "after_files": [{"content": "from gym.spaces import Box, Tuple\nfrom gym.utils import seeding\nfrom gym.envs.classic_control.pendulum import PendulumEnv\nimport numpy as np\n\n\"\"\"\n Multiagent pendulum that sums its torques to generate an action\n\"\"\"\n\n\nclass MultiAgentPendulumEnv(PendulumEnv):\n metadata = {\n 'render.modes': ['human', 'rgb_array'],\n 'video.frames_per_second': 30\n }\n\n def __init__(self):\n self.max_speed = 8\n self.max_torque = 2.\n self.dt = .05\n self.viewer = None\n\n high = np.array([1., 1., self.max_speed])\n self.action_space = [Box(low=-self.max_torque / 2,\n high=self.max_torque / 2, shape=(1,))\n for _ in range(2)]\n self.observation_space = Tuple([\n Box(low=-high, high=high) for _ in range(2)])\n\n self._seed()\n\n def _seed(self, seed=None):\n self.np_random, seed = seeding.np_random(seed)\n return [seed]\n\n def _step(self, u):\n th, thdot = self.state # th := theta\n\n summed_u = np.sum(u)\n g = 10.\n m = 1.\n length = 1.\n dt = self.dt\n\n summed_u = np.clip(summed_u, -self.max_torque, self.max_torque)\n self.last_u = summed_u # for rendering\n costs = self.angle_normalize(th) ** 2 + .1 * thdot ** 2 + \\\n .001 * (summed_u ** 2)\n\n newthdot = thdot + (-3 * g / (2 * length) * np.sin(th + np.pi) +\n 3. / (m * length ** 2) * summed_u) * dt\n newth = th + newthdot * dt\n newthdot = np.clip(newthdot, -self.max_speed, self.max_speed)\n\n self.state = np.array([newth, newthdot])\n return self._get_obs(), -costs, False, {}\n\n def _reset(self):\n high = np.array([np.pi, 1])\n self.state = self.np_random.uniform(low=-high, high=high)\n self.last_u = None\n return self._get_obs()\n\n def _get_obs(self):\n theta, thetadot = self.state\n return [np.array([np.cos(theta), np.sin(theta), thetadot])\n for _ in range(2)]\n\n def angle_normalize(self, x):\n return (((x + np.pi) % (2 * np.pi)) - np.pi)\n", "path": "python/ray/rllib/examples/multiagent_pendulum_env.py"}, {"content": "import math\nfrom gym.spaces import Box, Tuple, Discrete\nimport numpy as np\nfrom gym.envs.classic_control.mountain_car import MountainCarEnv\n\n\"\"\"\nMultiagent mountain car that sums and then\naverages its actions to produce the velocity\n\"\"\"\n\n\nclass MultiAgentMountainCarEnv(MountainCarEnv):\n def __init__(self):\n self.min_position = -1.2\n self.max_position = 0.6\n self.max_speed = 0.07\n self.goal_position = 0.5\n\n self.low = np.array([self.min_position, -self.max_speed])\n self.high = np.array([self.max_position, self.max_speed])\n\n self.viewer = None\n\n self.action_space = [Discrete(3) for _ in range(2)]\n self.observation_space = Tuple([\n Box(self.low, self.high) for _ in range(2)])\n\n self._seed()\n self.reset()\n\n def _step(self, action):\n summed_act = 0.5 * np.sum(action)\n\n position, velocity = self.state\n velocity += (summed_act - 1) * 0.001\n velocity += math.cos(3 * position) * (-0.0025)\n velocity = np.clip(velocity, -self.max_speed, self.max_speed)\n position += velocity\n position = np.clip(position, self.min_position, self.max_position)\n if (position == self.min_position and velocity < 0):\n velocity = 0\n\n done = bool(position >= self.goal_position)\n\n reward = position\n\n self.state = (position, velocity)\n return [np.array(self.state) for _ in range(2)], reward, done, {}\n\n def _reset(self):\n self.state = np.array([self.np_random.uniform(low=-0.6, high=-0.4), 0])\n return [np.array(self.state) for _ in range(2)]\n", "path": "python/ray/rllib/examples/multiagent_mountaincar_env.py"}]}
1,617
343
gh_patches_debug_2086
rasdani/github-patches
git_diff
google__timesketch-90
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Importing of JSON timelines creates duplicate timelines with same name. Steps to reproduce 1) command line: echo '[ { "datetime": "2012-04-12T17:24:38-08:00", "timestamp_desc": "Test", "timestamp": 1334251478000000, "message": "Test message" } ]' > test_dupe.json tsctl json2ts --name test_dupe --file test_dupe.json tsctl json2ts --name test_dupe --file test_dupe.json 2) Create new sketch 3) Notice duplicate "test_dupe" timelines on list to select from. 4) Add both 5) Explore, using "*" as filter. 6) notice duplicate results. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `wsgi.py` Content: ``` 1 #!/usr/bin/env python 2 # Copyright 2015 Google Inc. All rights reserved. 3 # 4 # Licensed under the Apache License, Version 2.0 (the "License"); 5 # you may not use this file except in compliance with the License. 6 # You may obtain a copy of the License at 7 # 8 # http://www.apache.org/licenses/LICENSE-2.0 9 # 10 # Unless required by applicable law or agreed to in writing, software 11 # distributed under the License is distributed on an "AS IS" BASIS, 12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 # See the License for the specific language governing permissions and 14 # limitations under the License. 15 """This module is for creating the app for a WSGI server. 16 17 Example with Gunicorn: 18 $ gunicorn -b 127.0.0.1:4000 --log-file - wsgi:application 19 20 Example configuration for Apache with mod_wsgi (a2enmod mod_wsgi): 21 <VirtualHost *:443> 22 ServerAdmin root@localhost 23 SSLEngine On 24 SSLCertificateFile /etc/apache2/cert.crt 25 SSLCertificateKeyFile /etc/apache2/cert.key 26 WSGIScriptAlias / /path/to/this/file/wsgi.py 27 </VirtualHost> 28 """ 29 30 # If you installed Timesketch in a virtualenv you need to activate it. 31 # This needs to be before any imports in order to import from the virtualenv. 32 #activate_virtualenv = '/path/to/your/virtualenv/bin/activate_this.py' 33 #execfile(activate_virtualenv, dict(__file__=activate_virtualenv)) 34 35 from timesketch import create_app 36 from timesketch.models import db_session 37 38 application = create_app() 39 40 # Remove the session after every request or app shutdown. 41 @application.teardown_appcontext 42 def shutdown_session(exception=None): 43 db_session.remove() 44 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/wsgi.py b/wsgi.py --- a/wsgi.py +++ b/wsgi.py @@ -37,7 +37,8 @@ application = create_app() -# Remove the session after every request or app shutdown. +# pylint: disable=unused-argument @application.teardown_appcontext def shutdown_session(exception=None): + """Remove the database session after every request or app shutdown.""" db_session.remove()
{"golden_diff": "diff --git a/wsgi.py b/wsgi.py\n--- a/wsgi.py\n+++ b/wsgi.py\n@@ -37,7 +37,8 @@\n \n application = create_app()\n \n-# Remove the session after every request or app shutdown.\n+# pylint: disable=unused-argument\n @application.teardown_appcontext\n def shutdown_session(exception=None):\n+ \"\"\"Remove the database session after every request or app shutdown.\"\"\"\n db_session.remove()\n", "issue": "Importing of JSON timelines creates duplicate timelines with same name.\nSteps to reproduce\n1) command line:\necho '[\n {\n \"datetime\": \"2012-04-12T17:24:38-08:00\",\n \"timestamp_desc\": \"Test\",\n \"timestamp\": 1334251478000000,\n \"message\": \"Test message\"\n }\n]' > test_dupe.json \ntsctl json2ts --name test_dupe --file test_dupe.json\ntsctl json2ts --name test_dupe --file test_dupe.json\n\n2) Create new sketch\n3) Notice duplicate \"test_dupe\" timelines on list to select from.\n4) Add both\n5) Explore, using \"*\" as filter.\n6) notice duplicate results.\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# Copyright 2015 Google Inc. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"This module is for creating the app for a WSGI server.\n\nExample with Gunicorn:\n$ gunicorn -b 127.0.0.1:4000 --log-file - wsgi:application\n\nExample configuration for Apache with mod_wsgi (a2enmod mod_wsgi):\n<VirtualHost *:443>\n ServerAdmin root@localhost\n SSLEngine On\n SSLCertificateFile /etc/apache2/cert.crt\n SSLCertificateKeyFile /etc/apache2/cert.key\n WSGIScriptAlias / /path/to/this/file/wsgi.py\n</VirtualHost>\n\"\"\"\n\n# If you installed Timesketch in a virtualenv you need to activate it.\n# This needs to be before any imports in order to import from the virtualenv.\n#activate_virtualenv = '/path/to/your/virtualenv/bin/activate_this.py'\n#execfile(activate_virtualenv, dict(__file__=activate_virtualenv))\n\nfrom timesketch import create_app\nfrom timesketch.models import db_session\n\napplication = create_app()\n\n# Remove the session after every request or app shutdown.\[email protected]_appcontext\ndef shutdown_session(exception=None):\n db_session.remove()\n", "path": "wsgi.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# Copyright 2015 Google Inc. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"This module is for creating the app for a WSGI server.\n\nExample with Gunicorn:\n$ gunicorn -b 127.0.0.1:4000 --log-file - wsgi:application\n\nExample configuration for Apache with mod_wsgi (a2enmod mod_wsgi):\n<VirtualHost *:443>\n ServerAdmin root@localhost\n SSLEngine On\n SSLCertificateFile /etc/apache2/cert.crt\n SSLCertificateKeyFile /etc/apache2/cert.key\n WSGIScriptAlias / /path/to/this/file/wsgi.py\n</VirtualHost>\n\"\"\"\n\n# If you installed Timesketch in a virtualenv you need to activate it.\n# This needs to be before any imports in order to import from the virtualenv.\n#activate_virtualenv = '/path/to/your/virtualenv/bin/activate_this.py'\n#execfile(activate_virtualenv, dict(__file__=activate_virtualenv))\n\nfrom timesketch import create_app\nfrom timesketch.models import db_session\n\napplication = create_app()\n\n# pylint: disable=unused-argument\[email protected]_appcontext\ndef shutdown_session(exception=None):\n \"\"\"Remove the database session after every request or app shutdown.\"\"\"\n db_session.remove()\n", "path": "wsgi.py"}]}
920
96
gh_patches_debug_11147
rasdani/github-patches
git_diff
dask__dask-10113
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Removal of dask.compatibility.entry_points has broken dask-kubernetes It looks like `dask.compatibility.entry_points` was removed in #10070 without warning. This was being used in `dask-kubernetes` so CI is now failing. https://github.com/dask/dask-kubernetes/actions/runs/4499027159/jobs/7916366189?pr=683 cc @graingert @jrbourbeau --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `dask/compatibility.py` Content: ``` 1 import sys 2 3 from packaging.version import parse as parse_version 4 5 _PY_VERSION = parse_version(".".join(map(str, sys.version_info[:3]))) 6 7 _EMSCRIPTEN = sys.platform == "emscripten" 8 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/dask/compatibility.py b/dask/compatibility.py --- a/dask/compatibility.py +++ b/dask/compatibility.py @@ -1,7 +1,19 @@ import sys +import warnings +from importlib_metadata import entry_points as _entry_points from packaging.version import parse as parse_version _PY_VERSION = parse_version(".".join(map(str, sys.version_info[:3]))) _EMSCRIPTEN = sys.platform == "emscripten" + + +def entry_points(group=None): + warnings.warn( + "`dask.compatibility.entry_points` has been replaced by `importlib_metadata.entry_points` and will be removed " + "in a future version. Please use `importlib_metadata.entry_points` instead.", + DeprecationWarning, + stacklevel=2, + ) + return _entry_points(group=group)
{"golden_diff": "diff --git a/dask/compatibility.py b/dask/compatibility.py\n--- a/dask/compatibility.py\n+++ b/dask/compatibility.py\n@@ -1,7 +1,19 @@\n import sys\n+import warnings\n \n+from importlib_metadata import entry_points as _entry_points\n from packaging.version import parse as parse_version\n \n _PY_VERSION = parse_version(\".\".join(map(str, sys.version_info[:3])))\n \n _EMSCRIPTEN = sys.platform == \"emscripten\"\n+\n+\n+def entry_points(group=None):\n+ warnings.warn(\n+ \"`dask.compatibility.entry_points` has been replaced by `importlib_metadata.entry_points` and will be removed \"\n+ \"in a future version. Please use `importlib_metadata.entry_points` instead.\",\n+ DeprecationWarning,\n+ stacklevel=2,\n+ )\n+ return _entry_points(group=group)\n", "issue": "Removal of dask.compatibility.entry_points has broken dask-kubernetes\nIt looks like `dask.compatibility.entry_points` was removed in #10070 without warning. This was being used in `dask-kubernetes` so CI is now failing.\r\n\r\nhttps://github.com/dask/dask-kubernetes/actions/runs/4499027159/jobs/7916366189?pr=683\r\n\r\ncc @graingert @jrbourbeau \n", "before_files": [{"content": "import sys\n\nfrom packaging.version import parse as parse_version\n\n_PY_VERSION = parse_version(\".\".join(map(str, sys.version_info[:3])))\n\n_EMSCRIPTEN = sys.platform == \"emscripten\"\n", "path": "dask/compatibility.py"}], "after_files": [{"content": "import sys\nimport warnings\n\nfrom importlib_metadata import entry_points as _entry_points\nfrom packaging.version import parse as parse_version\n\n_PY_VERSION = parse_version(\".\".join(map(str, sys.version_info[:3])))\n\n_EMSCRIPTEN = sys.platform == \"emscripten\"\n\n\ndef entry_points(group=None):\n warnings.warn(\n \"`dask.compatibility.entry_points` has been replaced by `importlib_metadata.entry_points` and will be removed \"\n \"in a future version. Please use `importlib_metadata.entry_points` instead.\",\n DeprecationWarning,\n stacklevel=2,\n )\n return _entry_points(group=group)\n", "path": "dask/compatibility.py"}]}
426
194
gh_patches_debug_2563
rasdani/github-patches
git_diff
microsoft__ptvsd-297
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Unable to launch the debugger Getting the following error in master when debugging in VSC: ``` Could not connect to None: 60857 Traceback (most recent call last): File "/Users/donjayamanne/Desktop/Development/vscode/ptvsd/ptvsd/pydevd/pydevd.py", line 1620, in main debugger.connect(host, port) File "/Users/donjayamanne/Desktop/Development/vscode/ptvsd/ptvsd/pydevd/pydevd.py", line 326, in connect s = start_server(port) File "/Users/donjayamanne/Desktop/Development/vscode/ptvsd/ptvsd/wrapper.py", line 1766, in start_server server = _create_server(port) File "/Users/donjayamanne/Desktop/Development/vscode/ptvsd/ptvsd/wrapper.py", line 1701, in _create_server server.bind(('127.0.0.1', port)) OSError: [Errno 48] Address already in u ``` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `ptvsd/debugger.py` Content: ``` 1 # Copyright (c) Microsoft Corporation. All rights reserved. 2 # Licensed under the MIT License. See LICENSE in the project root 3 # for license information. 4 5 from ptvsd.__main__ import run_module, run_file 6 7 8 __author__ = "Microsoft Corporation <[email protected]>" 9 __version__ = "4.0.0a5" 10 11 # TODO: not needed? 12 DONT_DEBUG = [] 13 14 15 def debug(filename, port_num, debug_id, debug_options, run_as, **kwargs): 16 # TODO: docstring 17 address = (None, port_num) 18 if run_as == 'module': 19 run_module(address, filename, **kwargs) 20 else: 21 run_file(address, filename, **kwargs) 22 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/ptvsd/debugger.py b/ptvsd/debugger.py --- a/ptvsd/debugger.py +++ b/ptvsd/debugger.py @@ -14,7 +14,7 @@ def debug(filename, port_num, debug_id, debug_options, run_as, **kwargs): # TODO: docstring - address = (None, port_num) + address = ('localhost', port_num) if run_as == 'module': run_module(address, filename, **kwargs) else:
{"golden_diff": "diff --git a/ptvsd/debugger.py b/ptvsd/debugger.py\n--- a/ptvsd/debugger.py\n+++ b/ptvsd/debugger.py\n@@ -14,7 +14,7 @@\n \n def debug(filename, port_num, debug_id, debug_options, run_as, **kwargs):\n # TODO: docstring\n- address = (None, port_num)\n+ address = ('localhost', port_num)\n if run_as == 'module':\n run_module(address, filename, **kwargs)\n else:\n", "issue": "Unable to launch the debugger\nGetting the following error in master when debugging in VSC:\r\n```\r\nCould not connect to None: 60857\r\nTraceback (most recent call last):\r\n File \"/Users/donjayamanne/Desktop/Development/vscode/ptvsd/ptvsd/pydevd/pydevd.py\", line 1620, in main\r\n debugger.connect(host, port)\r\n File \"/Users/donjayamanne/Desktop/Development/vscode/ptvsd/ptvsd/pydevd/pydevd.py\", line 326, in connect\r\n s = start_server(port)\r\n File \"/Users/donjayamanne/Desktop/Development/vscode/ptvsd/ptvsd/wrapper.py\", line 1766, in start_server\r\n server = _create_server(port)\r\n File \"/Users/donjayamanne/Desktop/Development/vscode/ptvsd/ptvsd/wrapper.py\", line 1701, in _create_server\r\n server.bind(('127.0.0.1', port))\r\nOSError: [Errno 48] Address already in u\r\n```\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nfrom ptvsd.__main__ import run_module, run_file\n\n\n__author__ = \"Microsoft Corporation <[email protected]>\"\n__version__ = \"4.0.0a5\"\n\n# TODO: not needed?\nDONT_DEBUG = []\n\n\ndef debug(filename, port_num, debug_id, debug_options, run_as, **kwargs):\n # TODO: docstring\n address = (None, port_num)\n if run_as == 'module':\n run_module(address, filename, **kwargs)\n else:\n run_file(address, filename, **kwargs)\n", "path": "ptvsd/debugger.py"}], "after_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nfrom ptvsd.__main__ import run_module, run_file\n\n\n__author__ = \"Microsoft Corporation <[email protected]>\"\n__version__ = \"4.0.0a5\"\n\n# TODO: not needed?\nDONT_DEBUG = []\n\n\ndef debug(filename, port_num, debug_id, debug_options, run_as, **kwargs):\n # TODO: docstring\n address = ('localhost', port_num)\n if run_as == 'module':\n run_module(address, filename, **kwargs)\n else:\n run_file(address, filename, **kwargs)\n", "path": "ptvsd/debugger.py"}]}
706
120
gh_patches_debug_31566
rasdani/github-patches
git_diff
getsentry__sentry-python-141
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Log more extra data for Celery The old integration in celery used to log arguments to the task and more. Add that to our celery integration --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `sentry_sdk/integrations/celery.py` Content: ``` 1 from __future__ import absolute_import 2 3 import sys 4 5 from celery.signals import task_failure, task_prerun, task_postrun 6 from celery.exceptions import SoftTimeLimitExceeded 7 8 from sentry_sdk.hub import Hub 9 from sentry_sdk.utils import capture_internal_exceptions, event_from_exception 10 from sentry_sdk.integrations import Integration 11 from sentry_sdk.integrations.logging import ignore_logger 12 13 14 class CeleryIntegration(Integration): 15 identifier = "celery" 16 17 @staticmethod 18 def setup_once(): 19 task_prerun.connect(_handle_task_prerun, weak=False) 20 task_postrun.connect(_handle_task_postrun, weak=False) 21 task_failure.connect(_process_failure_signal, weak=False) 22 23 # This logger logs every status of every task that ran on the worker. 24 # Meaning that every task's breadcrumbs are full of stuff like "Task 25 # <foo> raised unexpected <bar>". 26 ignore_logger("celery.worker.job") 27 28 29 def _process_failure_signal(sender, task_id, einfo, **kw): 30 # einfo from celery is not reliable 31 exc_info = sys.exc_info() 32 33 hub = Hub.current 34 integration = hub.get_integration(CeleryIntegration) 35 if integration is None: 36 return 37 38 if hasattr(sender, "throws") and isinstance(einfo.exception, sender.throws): 39 return 40 41 if isinstance(einfo.exception, SoftTimeLimitExceeded): 42 # TODO: Move this into event processor 43 with hub.push_scope() as scope: 44 scope.fingerprint = [ 45 "celery", 46 "SoftTimeLimitExceeded", 47 getattr(sender, "name", sender), 48 ] 49 _capture_event(hub, exc_info) 50 else: 51 _capture_event(hub, exc_info) 52 53 54 def _handle_task_prerun(sender, task, **kw): 55 hub = Hub.current 56 if hub.get_integration(CeleryIntegration) is not None: 57 scope = hub.push_scope().__enter__() 58 with capture_internal_exceptions(): 59 scope.transaction = task.name 60 61 62 def _handle_task_postrun(sender, task_id, task, **kw): 63 hub = Hub.current 64 if hub.get_integration(CeleryIntegration) is not None: 65 hub.pop_scope_unsafe() 66 67 68 def _capture_event(hub, exc_info): 69 event, hint = event_from_exception( 70 exc_info, 71 client_options=hub.client.options, 72 mechanism={"type": "celery", "handled": False}, 73 ) 74 hub.capture_event(event, hint=hint) 75 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py --- a/sentry_sdk/integrations/celery.py +++ b/sentry_sdk/integrations/celery.py @@ -35,28 +35,48 @@ if integration is None: return - if hasattr(sender, "throws") and isinstance(einfo.exception, sender.throws): - return - - if isinstance(einfo.exception, SoftTimeLimitExceeded): - # TODO: Move this into event processor - with hub.push_scope() as scope: - scope.fingerprint = [ - "celery", - "SoftTimeLimitExceeded", - getattr(sender, "name", sender), - ] - _capture_event(hub, exc_info) - else: - _capture_event(hub, exc_info) + _capture_event(hub, exc_info) -def _handle_task_prerun(sender, task, **kw): +def _handle_task_prerun(sender, task, args, kwargs, **_): hub = Hub.current if hub.get_integration(CeleryIntegration) is not None: scope = hub.push_scope().__enter__() + scope.add_event_processor(_make_event_processor(args, kwargs, task)) + + +def _make_event_processor(args, kwargs, task): + def event_processor(event, hint): + with capture_internal_exceptions(): + if "transaction" not in event: + event["transaction"] = task.name + with capture_internal_exceptions(): - scope.transaction = task.name + extra = event.setdefault("extra", {}) + extra["celery-job"] = { + "task_name": task.name, + "args": args, + "kwargs": kwargs, + } + + if "exc_info" in hint: + with capture_internal_exceptions(): + if issubclass(hint["exc_info"][0], SoftTimeLimitExceeded): + event["fingerprint"] = [ + "celery", + "SoftTimeLimitExceeded", + getattr(task, "name", task), + ] + + with capture_internal_exceptions(): + if hasattr(task, "throws") and isinstance( + hint["exc_info"][1], task.throws + ): + return None + + return event + + return event_processor def _handle_task_postrun(sender, task_id, task, **kw):
{"golden_diff": "diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py\n--- a/sentry_sdk/integrations/celery.py\n+++ b/sentry_sdk/integrations/celery.py\n@@ -35,28 +35,48 @@\n if integration is None:\n return\n \n- if hasattr(sender, \"throws\") and isinstance(einfo.exception, sender.throws):\n- return\n-\n- if isinstance(einfo.exception, SoftTimeLimitExceeded):\n- # TODO: Move this into event processor\n- with hub.push_scope() as scope:\n- scope.fingerprint = [\n- \"celery\",\n- \"SoftTimeLimitExceeded\",\n- getattr(sender, \"name\", sender),\n- ]\n- _capture_event(hub, exc_info)\n- else:\n- _capture_event(hub, exc_info)\n+ _capture_event(hub, exc_info)\n \n \n-def _handle_task_prerun(sender, task, **kw):\n+def _handle_task_prerun(sender, task, args, kwargs, **_):\n hub = Hub.current\n if hub.get_integration(CeleryIntegration) is not None:\n scope = hub.push_scope().__enter__()\n+ scope.add_event_processor(_make_event_processor(args, kwargs, task))\n+\n+\n+def _make_event_processor(args, kwargs, task):\n+ def event_processor(event, hint):\n+ with capture_internal_exceptions():\n+ if \"transaction\" not in event:\n+ event[\"transaction\"] = task.name\n+\n with capture_internal_exceptions():\n- scope.transaction = task.name\n+ extra = event.setdefault(\"extra\", {})\n+ extra[\"celery-job\"] = {\n+ \"task_name\": task.name,\n+ \"args\": args,\n+ \"kwargs\": kwargs,\n+ }\n+\n+ if \"exc_info\" in hint:\n+ with capture_internal_exceptions():\n+ if issubclass(hint[\"exc_info\"][0], SoftTimeLimitExceeded):\n+ event[\"fingerprint\"] = [\n+ \"celery\",\n+ \"SoftTimeLimitExceeded\",\n+ getattr(task, \"name\", task),\n+ ]\n+\n+ with capture_internal_exceptions():\n+ if hasattr(task, \"throws\") and isinstance(\n+ hint[\"exc_info\"][1], task.throws\n+ ):\n+ return None\n+\n+ return event\n+\n+ return event_processor\n \n \n def _handle_task_postrun(sender, task_id, task, **kw):\n", "issue": "Log more extra data for Celery\nThe old integration in celery used to log arguments to the task and more. Add that to our celery integration\n", "before_files": [{"content": "from __future__ import absolute_import\n\nimport sys\n\nfrom celery.signals import task_failure, task_prerun, task_postrun\nfrom celery.exceptions import SoftTimeLimitExceeded\n\nfrom sentry_sdk.hub import Hub\nfrom sentry_sdk.utils import capture_internal_exceptions, event_from_exception\nfrom sentry_sdk.integrations import Integration\nfrom sentry_sdk.integrations.logging import ignore_logger\n\n\nclass CeleryIntegration(Integration):\n identifier = \"celery\"\n\n @staticmethod\n def setup_once():\n task_prerun.connect(_handle_task_prerun, weak=False)\n task_postrun.connect(_handle_task_postrun, weak=False)\n task_failure.connect(_process_failure_signal, weak=False)\n\n # This logger logs every status of every task that ran on the worker.\n # Meaning that every task's breadcrumbs are full of stuff like \"Task\n # <foo> raised unexpected <bar>\".\n ignore_logger(\"celery.worker.job\")\n\n\ndef _process_failure_signal(sender, task_id, einfo, **kw):\n # einfo from celery is not reliable\n exc_info = sys.exc_info()\n\n hub = Hub.current\n integration = hub.get_integration(CeleryIntegration)\n if integration is None:\n return\n\n if hasattr(sender, \"throws\") and isinstance(einfo.exception, sender.throws):\n return\n\n if isinstance(einfo.exception, SoftTimeLimitExceeded):\n # TODO: Move this into event processor\n with hub.push_scope() as scope:\n scope.fingerprint = [\n \"celery\",\n \"SoftTimeLimitExceeded\",\n getattr(sender, \"name\", sender),\n ]\n _capture_event(hub, exc_info)\n else:\n _capture_event(hub, exc_info)\n\n\ndef _handle_task_prerun(sender, task, **kw):\n hub = Hub.current\n if hub.get_integration(CeleryIntegration) is not None:\n scope = hub.push_scope().__enter__()\n with capture_internal_exceptions():\n scope.transaction = task.name\n\n\ndef _handle_task_postrun(sender, task_id, task, **kw):\n hub = Hub.current\n if hub.get_integration(CeleryIntegration) is not None:\n hub.pop_scope_unsafe()\n\n\ndef _capture_event(hub, exc_info):\n event, hint = event_from_exception(\n exc_info,\n client_options=hub.client.options,\n mechanism={\"type\": \"celery\", \"handled\": False},\n )\n hub.capture_event(event, hint=hint)\n", "path": "sentry_sdk/integrations/celery.py"}], "after_files": [{"content": "from __future__ import absolute_import\n\nimport sys\n\nfrom celery.signals import task_failure, task_prerun, task_postrun\nfrom celery.exceptions import SoftTimeLimitExceeded\n\nfrom sentry_sdk.hub import Hub\nfrom sentry_sdk.utils import capture_internal_exceptions, event_from_exception\nfrom sentry_sdk.integrations import Integration\nfrom sentry_sdk.integrations.logging import ignore_logger\n\n\nclass CeleryIntegration(Integration):\n identifier = \"celery\"\n\n @staticmethod\n def setup_once():\n task_prerun.connect(_handle_task_prerun, weak=False)\n task_postrun.connect(_handle_task_postrun, weak=False)\n task_failure.connect(_process_failure_signal, weak=False)\n\n # This logger logs every status of every task that ran on the worker.\n # Meaning that every task's breadcrumbs are full of stuff like \"Task\n # <foo> raised unexpected <bar>\".\n ignore_logger(\"celery.worker.job\")\n\n\ndef _process_failure_signal(sender, task_id, einfo, **kw):\n # einfo from celery is not reliable\n exc_info = sys.exc_info()\n\n hub = Hub.current\n integration = hub.get_integration(CeleryIntegration)\n if integration is None:\n return\n\n _capture_event(hub, exc_info)\n\n\ndef _handle_task_prerun(sender, task, args, kwargs, **_):\n hub = Hub.current\n if hub.get_integration(CeleryIntegration) is not None:\n scope = hub.push_scope().__enter__()\n scope.add_event_processor(_make_event_processor(args, kwargs, task))\n\n\ndef _make_event_processor(args, kwargs, task):\n def event_processor(event, hint):\n with capture_internal_exceptions():\n if \"transaction\" not in event:\n event[\"transaction\"] = task.name\n\n with capture_internal_exceptions():\n extra = event.setdefault(\"extra\", {})\n extra[\"celery-job\"] = {\n \"task_name\": task.name,\n \"args\": args,\n \"kwargs\": kwargs,\n }\n\n if \"exc_info\" in hint:\n with capture_internal_exceptions():\n if issubclass(hint[\"exc_info\"][0], SoftTimeLimitExceeded):\n event[\"fingerprint\"] = [\n \"celery\",\n \"SoftTimeLimitExceeded\",\n getattr(task, \"name\", task),\n ]\n\n with capture_internal_exceptions():\n if hasattr(task, \"throws\") and isinstance(\n hint[\"exc_info\"][1], task.throws\n ):\n return None\n\n return event\n\n return event_processor\n\n\ndef _handle_task_postrun(sender, task_id, task, **kw):\n hub = Hub.current\n if hub.get_integration(CeleryIntegration) is not None:\n hub.pop_scope_unsafe()\n\n\ndef _capture_event(hub, exc_info):\n event, hint = event_from_exception(\n exc_info,\n client_options=hub.client.options,\n mechanism={\"type\": \"celery\", \"handled\": False},\n )\n hub.capture_event(event, hint=hint)\n", "path": "sentry_sdk/integrations/celery.py"}]}
978
542
gh_patches_debug_34657
rasdani/github-patches
git_diff
pantsbuild__pants-14125
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- ResolveError: Directory '{mydir}' does not contain any BUILD files (when Dockerizing packages) **Describe the bug** Created a repo at https://github.com/sureshjoshi/pantsbuild-14031 to help illustrate this problem. Essentially, I use custom output paths for my .pex files, and while testing out the `docker_image` target, I noticed some of my components fail with the error > ResolveError: Directory 'backend' does not contain any BUILD files After a lot of debugging, I only ran into this problem when my output folders were common to multiple `pex_binary` targets. For example, in the repo above, I have 3 identical projects (A, B, C) - where they only differ by the `pex_binary` `output_path` (and this location updated in the associated Dockerfile), and one of the projects refuses to compile. As per the README in the repo: ```bash # Should create a pex at dist/backend/projecta/projecta.pex # Docker image created successfully as projecta-container:latest ./pants package backend/projecta:: # Should create a pex at dist/backend.projectc/projectc.pex # Docker image created successfully as projectc-container:latest ./pants package backend/projectc:: ``` ```bash # Should create a pex at dist/backend/projectb.pex ./pants package backend/projectb:projectb # FAILS: With ResolveError ./pants package backend/projectb:projectb-container ``` So, the difference above is that Project C uses no `output_path` and uses the dot-syntax for the dist folder. ProjectA places the pex file under a `backend/projecta` directory. The failing ProjectB places the pex file directly under `backend`. This isn't a big issue, and easily worked around, and I'm guessing it has to do with namespacing or module/package semantics, but it's just a weird problem that is difficult to debug based on the error message. **Pants version** - 2.8.0 - 2.9.0rc1 **OS** macOS 12.1 Untested on Linux --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `src/python/pants/backend/docker/util_rules/dependencies.py` Content: ``` 1 # Copyright 2021 Pants project contributors (see CONTRIBUTORS.md). 2 # Licensed under the Apache License, Version 2.0 (see LICENSE). 3 4 from pants.backend.docker.subsystems.dockerfile_parser import DockerfileInfo, DockerfileInfoRequest 5 from pants.backend.docker.target_types import DockerDependenciesField 6 from pants.core.goals.package import PackageFieldSet 7 from pants.engine.addresses import Addresses, UnparsedAddressInputs 8 from pants.engine.rules import Get, collect_rules, rule 9 from pants.engine.target import ( 10 FieldSetsPerTarget, 11 FieldSetsPerTargetRequest, 12 InjectDependenciesRequest, 13 InjectedDependencies, 14 Targets, 15 ) 16 from pants.engine.unions import UnionRule 17 18 19 class InjectDockerDependencies(InjectDependenciesRequest): 20 inject_for = DockerDependenciesField 21 22 23 @rule 24 async def inject_docker_dependencies(request: InjectDockerDependencies) -> InjectedDependencies: 25 """Inspects COPY instructions in the Dockerfile for references to known targets.""" 26 dockerfile_info = await Get( 27 DockerfileInfo, DockerfileInfoRequest(request.dependencies_field.address) 28 ) 29 30 targets = await Get( 31 Targets, 32 UnparsedAddressInputs( 33 dockerfile_info.putative_target_addresses, 34 owning_address=dockerfile_info.address, 35 ), 36 ) 37 package = await Get(FieldSetsPerTarget, FieldSetsPerTargetRequest(PackageFieldSet, targets)) 38 referenced_targets = ( 39 field_sets[0].address for field_sets in package.collection if len(field_sets) > 0 40 ) 41 return InjectedDependencies(Addresses(referenced_targets)) 42 43 44 def rules(): 45 return [ 46 *collect_rules(), 47 UnionRule(InjectDependenciesRequest, InjectDockerDependencies), 48 ] 49 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/src/python/pants/backend/docker/util_rules/dependencies.py b/src/python/pants/backend/docker/util_rules/dependencies.py --- a/src/python/pants/backend/docker/util_rules/dependencies.py +++ b/src/python/pants/backend/docker/util_rules/dependencies.py @@ -3,6 +3,7 @@ from pants.backend.docker.subsystems.dockerfile_parser import DockerfileInfo, DockerfileInfoRequest from pants.backend.docker.target_types import DockerDependenciesField +from pants.base.specs import AddressSpecs, MaybeEmptySiblingAddresses from pants.core.goals.package import PackageFieldSet from pants.engine.addresses import Addresses, UnparsedAddressInputs from pants.engine.rules import Get, collect_rules, rule @@ -22,18 +23,28 @@ @rule async def inject_docker_dependencies(request: InjectDockerDependencies) -> InjectedDependencies: - """Inspects COPY instructions in the Dockerfile for references to known targets.""" + """Inspects COPY instructions in the Dockerfile for references to known packagable targets.""" dockerfile_info = await Get( DockerfileInfo, DockerfileInfoRequest(request.dependencies_field.address) ) - targets = await Get( - Targets, + # Parse all putative target addresses. + putative_addresses = await Get( + Addresses, UnparsedAddressInputs( dockerfile_info.putative_target_addresses, owning_address=dockerfile_info.address, ), ) + + # Get the target for those addresses that are known. + directories = {address.spec_path for address in putative_addresses} + all_addresses = await Get(Addresses, AddressSpecs(map(MaybeEmptySiblingAddresses, directories))) + targets = await Get( + Targets, Addresses((address for address in putative_addresses if address in all_addresses)) + ) + + # Only keep those targets that we can "package". package = await Get(FieldSetsPerTarget, FieldSetsPerTargetRequest(PackageFieldSet, targets)) referenced_targets = ( field_sets[0].address for field_sets in package.collection if len(field_sets) > 0
{"golden_diff": "diff --git a/src/python/pants/backend/docker/util_rules/dependencies.py b/src/python/pants/backend/docker/util_rules/dependencies.py\n--- a/src/python/pants/backend/docker/util_rules/dependencies.py\n+++ b/src/python/pants/backend/docker/util_rules/dependencies.py\n@@ -3,6 +3,7 @@\n \n from pants.backend.docker.subsystems.dockerfile_parser import DockerfileInfo, DockerfileInfoRequest\n from pants.backend.docker.target_types import DockerDependenciesField\n+from pants.base.specs import AddressSpecs, MaybeEmptySiblingAddresses\n from pants.core.goals.package import PackageFieldSet\n from pants.engine.addresses import Addresses, UnparsedAddressInputs\n from pants.engine.rules import Get, collect_rules, rule\n@@ -22,18 +23,28 @@\n \n @rule\n async def inject_docker_dependencies(request: InjectDockerDependencies) -> InjectedDependencies:\n- \"\"\"Inspects COPY instructions in the Dockerfile for references to known targets.\"\"\"\n+ \"\"\"Inspects COPY instructions in the Dockerfile for references to known packagable targets.\"\"\"\n dockerfile_info = await Get(\n DockerfileInfo, DockerfileInfoRequest(request.dependencies_field.address)\n )\n \n- targets = await Get(\n- Targets,\n+ # Parse all putative target addresses.\n+ putative_addresses = await Get(\n+ Addresses,\n UnparsedAddressInputs(\n dockerfile_info.putative_target_addresses,\n owning_address=dockerfile_info.address,\n ),\n )\n+\n+ # Get the target for those addresses that are known.\n+ directories = {address.spec_path for address in putative_addresses}\n+ all_addresses = await Get(Addresses, AddressSpecs(map(MaybeEmptySiblingAddresses, directories)))\n+ targets = await Get(\n+ Targets, Addresses((address for address in putative_addresses if address in all_addresses))\n+ )\n+\n+ # Only keep those targets that we can \"package\".\n package = await Get(FieldSetsPerTarget, FieldSetsPerTargetRequest(PackageFieldSet, targets))\n referenced_targets = (\n field_sets[0].address for field_sets in package.collection if len(field_sets) > 0\n", "issue": "ResolveError: Directory '{mydir}' does not contain any BUILD files (when Dockerizing packages)\n**Describe the bug**\r\n\r\nCreated a repo at https://github.com/sureshjoshi/pantsbuild-14031 to help illustrate this problem. \r\n\r\nEssentially, I use custom output paths for my .pex files, and while testing out the `docker_image` target, I noticed some of my components fail with the error \r\n\r\n> ResolveError: Directory 'backend' does not contain any BUILD files\r\n\r\nAfter a lot of debugging, I only ran into this problem when my output folders were common to multiple `pex_binary` targets. \r\n\r\nFor example, in the repo above, I have 3 identical projects (A, B, C) - where they only differ by the `pex_binary` `output_path` (and this location updated in the associated Dockerfile), and one of the projects refuses to compile.\r\n\r\nAs per the README in the repo:\r\n\r\n```bash\r\n# Should create a pex at dist/backend/projecta/projecta.pex\r\n# Docker image created successfully as projecta-container:latest\r\n./pants package backend/projecta::\r\n\r\n# Should create a pex at dist/backend.projectc/projectc.pex\r\n# Docker image created successfully as projectc-container:latest\r\n./pants package backend/projectc::\r\n```\r\n\r\n```bash\r\n# Should create a pex at dist/backend/projectb.pex\r\n./pants package backend/projectb:projectb\r\n\r\n# FAILS: With ResolveError\r\n./pants package backend/projectb:projectb-container \r\n```\r\n\r\nSo, the difference above is that Project C uses no `output_path` and uses the dot-syntax for the dist folder. ProjectA places the pex file under a `backend/projecta` directory. The failing ProjectB places the pex file directly under `backend`.\r\n\r\nThis isn't a big issue, and easily worked around, and I'm guessing it has to do with namespacing or module/package semantics, but it's just a weird problem that is difficult to debug based on the error message.\r\n\r\n**Pants version**\r\n\r\n- 2.8.0\r\n- 2.9.0rc1\r\n\r\n**OS**\r\n\r\nmacOS 12.1\r\nUntested on Linux\r\n\n", "before_files": [{"content": "# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\nfrom pants.backend.docker.subsystems.dockerfile_parser import DockerfileInfo, DockerfileInfoRequest\nfrom pants.backend.docker.target_types import DockerDependenciesField\nfrom pants.core.goals.package import PackageFieldSet\nfrom pants.engine.addresses import Addresses, UnparsedAddressInputs\nfrom pants.engine.rules import Get, collect_rules, rule\nfrom pants.engine.target import (\n FieldSetsPerTarget,\n FieldSetsPerTargetRequest,\n InjectDependenciesRequest,\n InjectedDependencies,\n Targets,\n)\nfrom pants.engine.unions import UnionRule\n\n\nclass InjectDockerDependencies(InjectDependenciesRequest):\n inject_for = DockerDependenciesField\n\n\n@rule\nasync def inject_docker_dependencies(request: InjectDockerDependencies) -> InjectedDependencies:\n \"\"\"Inspects COPY instructions in the Dockerfile for references to known targets.\"\"\"\n dockerfile_info = await Get(\n DockerfileInfo, DockerfileInfoRequest(request.dependencies_field.address)\n )\n\n targets = await Get(\n Targets,\n UnparsedAddressInputs(\n dockerfile_info.putative_target_addresses,\n owning_address=dockerfile_info.address,\n ),\n )\n package = await Get(FieldSetsPerTarget, FieldSetsPerTargetRequest(PackageFieldSet, targets))\n referenced_targets = (\n field_sets[0].address for field_sets in package.collection if len(field_sets) > 0\n )\n return InjectedDependencies(Addresses(referenced_targets))\n\n\ndef rules():\n return [\n *collect_rules(),\n UnionRule(InjectDependenciesRequest, InjectDockerDependencies),\n ]\n", "path": "src/python/pants/backend/docker/util_rules/dependencies.py"}], "after_files": [{"content": "# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\nfrom pants.backend.docker.subsystems.dockerfile_parser import DockerfileInfo, DockerfileInfoRequest\nfrom pants.backend.docker.target_types import DockerDependenciesField\nfrom pants.base.specs import AddressSpecs, MaybeEmptySiblingAddresses\nfrom pants.core.goals.package import PackageFieldSet\nfrom pants.engine.addresses import Addresses, UnparsedAddressInputs\nfrom pants.engine.rules import Get, collect_rules, rule\nfrom pants.engine.target import (\n FieldSetsPerTarget,\n FieldSetsPerTargetRequest,\n InjectDependenciesRequest,\n InjectedDependencies,\n Targets,\n)\nfrom pants.engine.unions import UnionRule\n\n\nclass InjectDockerDependencies(InjectDependenciesRequest):\n inject_for = DockerDependenciesField\n\n\n@rule\nasync def inject_docker_dependencies(request: InjectDockerDependencies) -> InjectedDependencies:\n \"\"\"Inspects COPY instructions in the Dockerfile for references to known packagable targets.\"\"\"\n dockerfile_info = await Get(\n DockerfileInfo, DockerfileInfoRequest(request.dependencies_field.address)\n )\n\n # Parse all putative target addresses.\n putative_addresses = await Get(\n Addresses,\n UnparsedAddressInputs(\n dockerfile_info.putative_target_addresses,\n owning_address=dockerfile_info.address,\n ),\n )\n\n # Get the target for those addresses that are known.\n directories = {address.spec_path for address in putative_addresses}\n all_addresses = await Get(Addresses, AddressSpecs(map(MaybeEmptySiblingAddresses, directories)))\n targets = await Get(\n Targets, Addresses((address for address in putative_addresses if address in all_addresses))\n )\n\n # Only keep those targets that we can \"package\".\n package = await Get(FieldSetsPerTarget, FieldSetsPerTargetRequest(PackageFieldSet, targets))\n referenced_targets = (\n field_sets[0].address for field_sets in package.collection if len(field_sets) > 0\n )\n return InjectedDependencies(Addresses(referenced_targets))\n\n\ndef rules():\n return [\n *collect_rules(),\n UnionRule(InjectDependenciesRequest, InjectDockerDependencies),\n ]\n", "path": "src/python/pants/backend/docker/util_rules/dependencies.py"}]}
1,176
452
gh_patches_debug_1116
rasdani/github-patches
git_diff
scikit-hep__pyhf-895
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Docs build broken with Sphinx v3.1.0 # Description Today (2020-06-08) [Sphinx `v3.1.0`](https://github.com/sphinx-doc/sphinx/releases/tag/v3.1.0) was released which now classifies pyhf's particular usages of the "autoclass" directive as an Error in the docs generated for [`interpolators/code0.py`](https://github.com/scikit-hep/pyhf/blob/62becc2e469f89babf75534a2decfb3ace6ff179/src/pyhf/interpolators/code0.py) ``` Warning, treated as error: /home/runner/work/pyhf/pyhf/docs/_generated/pyhf.interpolators.code0.rst:8:Error in "autoclass" directive: 1 argument(s) required, 0 supplied. .. autoclass:: :show-inheritance: .. rubric:: Methods .. automethod:: .__init__ ##[error]Process completed with exit code 1. ``` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `setup.py` Content: ``` 1 from setuptools import setup 2 3 extras_require = { 4 'tensorflow': ['tensorflow~=2.0', 'tensorflow-probability~=0.8'], 5 'torch': ['torch~=1.2'], 6 'jax': ['jax~=0.1,>0.1.51', 'jaxlib~=0.1,>0.1.33'], 7 'xmlio': ['uproot'], 8 'minuit': ['iminuit'], 9 } 10 extras_require['backends'] = sorted( 11 set( 12 extras_require['tensorflow'] 13 + extras_require['torch'] 14 + extras_require['jax'] 15 + extras_require['minuit'] 16 ) 17 ) 18 extras_require['contrib'] = sorted(set(['matplotlib'])) 19 20 extras_require['test'] = sorted( 21 set( 22 extras_require['backends'] 23 + extras_require['xmlio'] 24 + extras_require['contrib'] 25 + [ 26 'pyflakes', 27 'pytest~=3.5', 28 'pytest-cov>=2.5.1', 29 'pytest-mock', 30 'pytest-benchmark[histogram]', 31 'pytest-console-scripts', 32 'pytest-mpl', 33 'pydocstyle', 34 'coverage>=4.0', # coveralls 35 'papermill~=2.0', 36 'nteract-scrapbook~=0.2', 37 'check-manifest', 38 'jupyter', 39 'uproot~=3.3', 40 'graphviz', 41 'jsonpatch', 42 'black', 43 ] 44 ) 45 ) 46 extras_require['docs'] = sorted( 47 set( 48 [ 49 'sphinx', 50 'sphinxcontrib-bibtex', 51 'sphinx-click', 52 'sphinx_rtd_theme', 53 'nbsphinx', 54 'ipywidgets', 55 'sphinx-issues', 56 'sphinx-copybutton>0.2.9', 57 ] 58 ) 59 ) 60 extras_require['develop'] = sorted( 61 set( 62 extras_require['docs'] 63 + extras_require['test'] 64 + ['nbdime', 'bumpversion', 'ipython', 'pre-commit', 'twine'] 65 ) 66 ) 67 extras_require['complete'] = sorted(set(sum(extras_require.values(), []))) 68 69 70 setup( 71 extras_require=extras_require, 72 use_scm_version=lambda: {'local_scheme': lambda version: ''}, 73 ) 74 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -46,7 +46,7 @@ extras_require['docs'] = sorted( set( [ - 'sphinx', + 'sphinx!=3.1.0', 'sphinxcontrib-bibtex', 'sphinx-click', 'sphinx_rtd_theme',
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -46,7 +46,7 @@\n extras_require['docs'] = sorted(\n set(\n [\n- 'sphinx',\n+ 'sphinx!=3.1.0',\n 'sphinxcontrib-bibtex',\n 'sphinx-click',\n 'sphinx_rtd_theme',\n", "issue": "Docs build broken with Sphinx v3.1.0\n# Description\r\n\r\nToday (2020-06-08) [Sphinx `v3.1.0`](https://github.com/sphinx-doc/sphinx/releases/tag/v3.1.0) was released which now classifies pyhf's particular usages of the \"autoclass\" directive as an Error in the docs generated for [`interpolators/code0.py`](https://github.com/scikit-hep/pyhf/blob/62becc2e469f89babf75534a2decfb3ace6ff179/src/pyhf/interpolators/code0.py)\r\n\r\n```\r\nWarning, treated as error:\r\n/home/runner/work/pyhf/pyhf/docs/_generated/pyhf.interpolators.code0.rst:8:Error in \"autoclass\" directive:\r\n1 argument(s) required, 0 supplied.\r\n\r\n.. autoclass::\r\n :show-inheritance:\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n .. rubric:: Methods\r\n\r\n\r\n\r\n .. automethod:: .__init__\r\n##[error]Process completed with exit code 1.\r\n```\n", "before_files": [{"content": "from setuptools import setup\n\nextras_require = {\n 'tensorflow': ['tensorflow~=2.0', 'tensorflow-probability~=0.8'],\n 'torch': ['torch~=1.2'],\n 'jax': ['jax~=0.1,>0.1.51', 'jaxlib~=0.1,>0.1.33'],\n 'xmlio': ['uproot'],\n 'minuit': ['iminuit'],\n}\nextras_require['backends'] = sorted(\n set(\n extras_require['tensorflow']\n + extras_require['torch']\n + extras_require['jax']\n + extras_require['minuit']\n )\n)\nextras_require['contrib'] = sorted(set(['matplotlib']))\n\nextras_require['test'] = sorted(\n set(\n extras_require['backends']\n + extras_require['xmlio']\n + extras_require['contrib']\n + [\n 'pyflakes',\n 'pytest~=3.5',\n 'pytest-cov>=2.5.1',\n 'pytest-mock',\n 'pytest-benchmark[histogram]',\n 'pytest-console-scripts',\n 'pytest-mpl',\n 'pydocstyle',\n 'coverage>=4.0', # coveralls\n 'papermill~=2.0',\n 'nteract-scrapbook~=0.2',\n 'check-manifest',\n 'jupyter',\n 'uproot~=3.3',\n 'graphviz',\n 'jsonpatch',\n 'black',\n ]\n )\n)\nextras_require['docs'] = sorted(\n set(\n [\n 'sphinx',\n 'sphinxcontrib-bibtex',\n 'sphinx-click',\n 'sphinx_rtd_theme',\n 'nbsphinx',\n 'ipywidgets',\n 'sphinx-issues',\n 'sphinx-copybutton>0.2.9',\n ]\n )\n)\nextras_require['develop'] = sorted(\n set(\n extras_require['docs']\n + extras_require['test']\n + ['nbdime', 'bumpversion', 'ipython', 'pre-commit', 'twine']\n )\n)\nextras_require['complete'] = sorted(set(sum(extras_require.values(), [])))\n\n\nsetup(\n extras_require=extras_require,\n use_scm_version=lambda: {'local_scheme': lambda version: ''},\n)\n", "path": "setup.py"}], "after_files": [{"content": "from setuptools import setup\n\nextras_require = {\n 'tensorflow': ['tensorflow~=2.0', 'tensorflow-probability~=0.8'],\n 'torch': ['torch~=1.2'],\n 'jax': ['jax~=0.1,>0.1.51', 'jaxlib~=0.1,>0.1.33'],\n 'xmlio': ['uproot'],\n 'minuit': ['iminuit'],\n}\nextras_require['backends'] = sorted(\n set(\n extras_require['tensorflow']\n + extras_require['torch']\n + extras_require['jax']\n + extras_require['minuit']\n )\n)\nextras_require['contrib'] = sorted(set(['matplotlib']))\n\nextras_require['test'] = sorted(\n set(\n extras_require['backends']\n + extras_require['xmlio']\n + extras_require['contrib']\n + [\n 'pyflakes',\n 'pytest~=3.5',\n 'pytest-cov>=2.5.1',\n 'pytest-mock',\n 'pytest-benchmark[histogram]',\n 'pytest-console-scripts',\n 'pytest-mpl',\n 'pydocstyle',\n 'coverage>=4.0', # coveralls\n 'papermill~=2.0',\n 'nteract-scrapbook~=0.2',\n 'check-manifest',\n 'jupyter',\n 'uproot~=3.3',\n 'graphviz',\n 'jsonpatch',\n 'black',\n ]\n )\n)\nextras_require['docs'] = sorted(\n set(\n [\n 'sphinx!=3.1.0',\n 'sphinxcontrib-bibtex',\n 'sphinx-click',\n 'sphinx_rtd_theme',\n 'nbsphinx',\n 'ipywidgets',\n 'sphinx-issues',\n 'sphinx-copybutton>0.2.9',\n ]\n )\n)\nextras_require['develop'] = sorted(\n set(\n extras_require['docs']\n + extras_require['test']\n + ['nbdime', 'bumpversion', 'ipython', 'pre-commit', 'twine']\n )\n)\nextras_require['complete'] = sorted(set(sum(extras_require.values(), [])))\n\n\nsetup(\n extras_require=extras_require,\n use_scm_version=lambda: {'local_scheme': lambda version: ''},\n)\n", "path": "setup.py"}]}
1,126
86
gh_patches_debug_11412
rasdani/github-patches
git_diff
RedHatInsights__insights-core-3108
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- The modprobe combiner is raising AttributeError exceptions in production. The AllModProbe combiner is throwing a number of the exception AttributeError("'bool' object has no attribute 'append'",) in production. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `insights/combiners/modprobe.py` Content: ``` 1 """ 2 Modprobe configuration 3 ====================== 4 5 The modprobe configuration files are normally available to rules as a list of 6 ModProbe objects. This combiner turns those into one set of data, preserving 7 the original file name that defined modprobe configuration line using a tuple. 8 9 """ 10 11 from insights.core.plugins import combiner 12 from insights.parsers.modprobe import ModProbe 13 from .. import LegacyItemAccess 14 15 from collections import namedtuple 16 17 18 ModProbeValue = namedtuple("ModProbeValue", ['value', 'source']) 19 """ 20 A value from a ModProbe source 21 """ 22 23 24 @combiner(ModProbe) 25 class AllModProbe(LegacyItemAccess): 26 """ 27 Combiner for accessing all the modprobe configuration files in one 28 structure. 29 30 It's important for our reporting and information purposes to know not 31 only what the configuration was but where it was defined. Therefore, the 32 format of the data in this combiner is slightly different compared to the 33 ModProbe parser. Here, each 'value' is actually a 2-tuple, with the 34 actual data first and the file name from whence the value came second. 35 This does mean that you need to pull the value out of each item - e.g. 36 using a list comprehension - but it means that every item is associated 37 with the file it was defined in. 38 39 In line with the ModProbe configuration parser, the actual value is 40 usually a list of the space-separated parts on the line, and the 41 definitions for each module are similarly kept in a list, which makes 42 43 Thanks to the LegacyItemAccess class, this can also be treated as a 44 dictionary for look-ups of data in the `data` attribute. 45 46 Attributes: 47 data (dict): The combined data structures, with each item as a 48 2-tuple, as described above. 49 bad_lines(list): The list of unparseable lines from all files, with 50 each line as a 2-tuple as described above. 51 52 Sample data files:: 53 54 /etc/modprobe.conf: 55 # watchdog drivers 56 blacklist i8xx_tco 57 58 # Don't install the Firewire ethernet driver 59 install eth1394 /bin/true 60 61 /etc/modprobe.conf.d/no_ipv6.conf: 62 options ipv6 disable=1 63 install ipv6 /bin/true 64 65 Examples: 66 >>> all_modprobe = shared[AllModProbe] 67 >>> all_modprobe['alias'] 68 [] 69 >>> all_modprobe['blacklist'] 70 {'i8xx_tco': ModProbeValue(True, '/etc/modprobe.conf')} 71 >>> all_modprobe['install'] 72 {'eth1394': ModProbeValue(['/bin/true'], '/etc/modprobe.conf'), 73 'ipv6': ModProbeValue(['/bin/true'], '/etc/modprobe.conf.d/no_ipv6.conf')} 74 """ 75 def __init__(self, modprobe): 76 self.data = {} 77 self.bad_lines = [] 78 for mod in modprobe: 79 filename = mod.file_path # relative path inside archive 80 # Copy data section 81 for section, sectdict in mod.data.items(): 82 if section not in self.data: 83 self.data[section] = {} 84 for name, value in sectdict.items(): 85 if name in self.data[section]: 86 # append to this module's value - should only 87 # happen for aliases. 88 self.data[section][name][0].append(value) 89 else: 90 # create new tuple 91 self.data[section][name] = ModProbeValue(value=value, source=filename) 92 # Copy bad lines, if any 93 if mod.bad_lines: 94 self.bad_lines.extend( 95 [ModProbeValue(value=line, source=filename) for line in mod.bad_lines] 96 ) 97 super(AllModProbe, self).__init__() 98 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/insights/combiners/modprobe.py b/insights/combiners/modprobe.py --- a/insights/combiners/modprobe.py +++ b/insights/combiners/modprobe.py @@ -82,7 +82,7 @@ if section not in self.data: self.data[section] = {} for name, value in sectdict.items(): - if name in self.data[section]: + if name in self.data[section] and type(self.data[section][name][0]) == list: # append to this module's value - should only # happen for aliases. self.data[section][name][0].append(value)
{"golden_diff": "diff --git a/insights/combiners/modprobe.py b/insights/combiners/modprobe.py\n--- a/insights/combiners/modprobe.py\n+++ b/insights/combiners/modprobe.py\n@@ -82,7 +82,7 @@\n if section not in self.data:\n self.data[section] = {}\n for name, value in sectdict.items():\n- if name in self.data[section]:\n+ if name in self.data[section] and type(self.data[section][name][0]) == list:\n # append to this module's value - should only\n # happen for aliases.\n self.data[section][name][0].append(value)\n", "issue": "The modprobe combiner is raising AttributeError exceptions in production.\nThe AllModProbe combiner is throwing a number of the exception AttributeError(\"'bool' object has no attribute 'append'\",) in production.\n", "before_files": [{"content": "\"\"\"\nModprobe configuration\n======================\n\nThe modprobe configuration files are normally available to rules as a list of\nModProbe objects. This combiner turns those into one set of data, preserving\nthe original file name that defined modprobe configuration line using a tuple.\n\n\"\"\"\n\nfrom insights.core.plugins import combiner\nfrom insights.parsers.modprobe import ModProbe\nfrom .. import LegacyItemAccess\n\nfrom collections import namedtuple\n\n\nModProbeValue = namedtuple(\"ModProbeValue\", ['value', 'source'])\n\"\"\"\nA value from a ModProbe source\n\"\"\"\n\n\n@combiner(ModProbe)\nclass AllModProbe(LegacyItemAccess):\n \"\"\"\n Combiner for accessing all the modprobe configuration files in one\n structure.\n\n It's important for our reporting and information purposes to know not\n only what the configuration was but where it was defined. Therefore, the\n format of the data in this combiner is slightly different compared to the\n ModProbe parser. Here, each 'value' is actually a 2-tuple, with the\n actual data first and the file name from whence the value came second.\n This does mean that you need to pull the value out of each item - e.g.\n using a list comprehension - but it means that every item is associated\n with the file it was defined in.\n\n In line with the ModProbe configuration parser, the actual value is\n usually a list of the space-separated parts on the line, and the\n definitions for each module are similarly kept in a list, which makes\n\n Thanks to the LegacyItemAccess class, this can also be treated as a\n dictionary for look-ups of data in the `data` attribute.\n\n Attributes:\n data (dict): The combined data structures, with each item as a\n 2-tuple, as described above.\n bad_lines(list): The list of unparseable lines from all files, with\n each line as a 2-tuple as described above.\n\n Sample data files::\n\n /etc/modprobe.conf:\n # watchdog drivers\n blacklist i8xx_tco\n\n # Don't install the Firewire ethernet driver\n install eth1394 /bin/true\n\n /etc/modprobe.conf.d/no_ipv6.conf:\n options ipv6 disable=1\n install ipv6 /bin/true\n\n Examples:\n >>> all_modprobe = shared[AllModProbe]\n >>> all_modprobe['alias']\n []\n >>> all_modprobe['blacklist']\n {'i8xx_tco': ModProbeValue(True, '/etc/modprobe.conf')}\n >>> all_modprobe['install']\n {'eth1394': ModProbeValue(['/bin/true'], '/etc/modprobe.conf'),\n 'ipv6': ModProbeValue(['/bin/true'], '/etc/modprobe.conf.d/no_ipv6.conf')}\n \"\"\"\n def __init__(self, modprobe):\n self.data = {}\n self.bad_lines = []\n for mod in modprobe:\n filename = mod.file_path # relative path inside archive\n # Copy data section\n for section, sectdict in mod.data.items():\n if section not in self.data:\n self.data[section] = {}\n for name, value in sectdict.items():\n if name in self.data[section]:\n # append to this module's value - should only\n # happen for aliases.\n self.data[section][name][0].append(value)\n else:\n # create new tuple\n self.data[section][name] = ModProbeValue(value=value, source=filename)\n # Copy bad lines, if any\n if mod.bad_lines:\n self.bad_lines.extend(\n [ModProbeValue(value=line, source=filename) for line in mod.bad_lines]\n )\n super(AllModProbe, self).__init__()\n", "path": "insights/combiners/modprobe.py"}], "after_files": [{"content": "\"\"\"\nModprobe configuration\n======================\n\nThe modprobe configuration files are normally available to rules as a list of\nModProbe objects. This combiner turns those into one set of data, preserving\nthe original file name that defined modprobe configuration line using a tuple.\n\n\"\"\"\n\nfrom insights.core.plugins import combiner\nfrom insights.parsers.modprobe import ModProbe\nfrom .. import LegacyItemAccess\n\nfrom collections import namedtuple\n\n\nModProbeValue = namedtuple(\"ModProbeValue\", ['value', 'source'])\n\"\"\"\nA value from a ModProbe source\n\"\"\"\n\n\n@combiner(ModProbe)\nclass AllModProbe(LegacyItemAccess):\n \"\"\"\n Combiner for accessing all the modprobe configuration files in one\n structure.\n\n It's important for our reporting and information purposes to know not\n only what the configuration was but where it was defined. Therefore, the\n format of the data in this combiner is slightly different compared to the\n ModProbe parser. Here, each 'value' is actually a 2-tuple, with the\n actual data first and the file name from whence the value came second.\n This does mean that you need to pull the value out of each item - e.g.\n using a list comprehension - but it means that every item is associated\n with the file it was defined in.\n\n In line with the ModProbe configuration parser, the actual value is\n usually a list of the space-separated parts on the line, and the\n definitions for each module are similarly kept in a list, which makes\n\n Thanks to the LegacyItemAccess class, this can also be treated as a\n dictionary for look-ups of data in the `data` attribute.\n\n Attributes:\n data (dict): The combined data structures, with each item as a\n 2-tuple, as described above.\n bad_lines(list): The list of unparseable lines from all files, with\n each line as a 2-tuple as described above.\n\n Sample data files::\n\n /etc/modprobe.conf:\n # watchdog drivers\n blacklist i8xx_tco\n\n # Don't install the Firewire ethernet driver\n install eth1394 /bin/true\n\n /etc/modprobe.conf.d/no_ipv6.conf:\n options ipv6 disable=1\n install ipv6 /bin/true\n\n Examples:\n >>> all_modprobe = shared[AllModProbe]\n >>> all_modprobe['alias']\n []\n >>> all_modprobe['blacklist']\n {'i8xx_tco': ModProbeValue(True, '/etc/modprobe.conf')}\n >>> all_modprobe['install']\n {'eth1394': ModProbeValue(['/bin/true'], '/etc/modprobe.conf'),\n 'ipv6': ModProbeValue(['/bin/true'], '/etc/modprobe.conf.d/no_ipv6.conf')}\n \"\"\"\n def __init__(self, modprobe):\n self.data = {}\n self.bad_lines = []\n for mod in modprobe:\n filename = mod.file_path # relative path inside archive\n # Copy data section\n for section, sectdict in mod.data.items():\n if section not in self.data:\n self.data[section] = {}\n for name, value in sectdict.items():\n if name in self.data[section] and type(self.data[section][name][0]) == list:\n # append to this module's value - should only\n # happen for aliases.\n self.data[section][name][0].append(value)\n else:\n # create new tuple\n self.data[section][name] = ModProbeValue(value=value, source=filename)\n # Copy bad lines, if any\n if mod.bad_lines:\n self.bad_lines.extend(\n [ModProbeValue(value=line, source=filename) for line in mod.bad_lines]\n )\n super(AllModProbe, self).__init__()\n", "path": "insights/combiners/modprobe.py"}]}
1,305
146
gh_patches_debug_33260
rasdani/github-patches
git_diff
apache__airflow-1056
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- UnicodeDecodeError in bash_operator.py Hi, I see a lot of these errors when running `airflow backfill` : ``` Traceback (most recent call last): File "/usr/lib/python2.7/logging/__init__.py", line 851, in emit msg = self.format(record) File "/usr/lib/python2.7/logging/__init__.py", line 724, in format return fmt.format(record) File "/usr/lib/python2.7/logging/__init__.py", line 467, in format s = self._fmt % record.__dict__ UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 13: ordinal not in range(128) Logged from file bash_operator.py, line 72 ``` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `airflow/operators/bash_operator.py` Content: ``` 1 2 from builtins import bytes 3 import logging 4 import sys 5 from subprocess import Popen, STDOUT, PIPE 6 from tempfile import gettempdir, NamedTemporaryFile 7 8 from airflow.utils import AirflowException 9 from airflow.models import BaseOperator 10 from airflow.utils import apply_defaults, TemporaryDirectory 11 12 13 class BashOperator(BaseOperator): 14 """ 15 Execute a Bash script, command or set of commands. 16 17 :param bash_command: The command, set of commands or reference to a 18 bash script (must be '.sh') to be executed. 19 :type bash_command: string 20 :param env: If env is not None, it must be a mapping that defines the 21 environment variables for the new process; these are used instead 22 of inheriting the current process environment, which is the default 23 behavior. 24 :type env: dict 25 """ 26 template_fields = ('bash_command', 'env') 27 template_ext = ('.sh', '.bash',) 28 ui_color = '#f0ede4' 29 30 @apply_defaults 31 def __init__( 32 self, 33 bash_command, 34 xcom_push=False, 35 env=None, 36 *args, **kwargs): 37 """ 38 If xcom_push is True, the last line written to stdout will also 39 be pushed to an XCom when the bash command completes. 40 """ 41 super(BashOperator, self).__init__(*args, **kwargs) 42 self.bash_command = bash_command 43 self.env = env 44 self.xcom_push_flag = xcom_push 45 46 def execute(self, context): 47 """ 48 Execute the bash command in a temporary directory 49 which will be cleaned afterwards 50 """ 51 bash_command = self.bash_command 52 logging.info("tmp dir root location: \n" + gettempdir()) 53 with TemporaryDirectory(prefix='airflowtmp') as tmp_dir: 54 with NamedTemporaryFile(dir=tmp_dir, prefix=self.task_id) as f: 55 56 f.write(bytes(bash_command, 'utf_8')) 57 f.flush() 58 fname = f.name 59 script_location = tmp_dir + "/" + fname 60 logging.info("Temporary script " 61 "location :{0}".format(script_location)) 62 logging.info("Running command: " + bash_command) 63 sp = Popen( 64 ['bash', fname], 65 stdout=PIPE, stderr=STDOUT, 66 cwd=tmp_dir, env=self.env) 67 68 self.sp = sp 69 70 logging.info("Output:") 71 line = '' 72 for line in iter(sp.stdout.readline, b''): 73 line = line.decode().strip() 74 logging.info(line) 75 sp.wait() 76 logging.info("Command exited with " 77 "return code {0}".format(sp.returncode)) 78 79 if sp.returncode: 80 raise AirflowException("Bash command failed") 81 82 if self.xcom_push_flag: 83 return line 84 85 def on_kill(self): 86 logging.info('Sending SIGTERM signal to bash subprocess') 87 self.sp.terminate() 88 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/airflow/operators/bash_operator.py b/airflow/operators/bash_operator.py --- a/airflow/operators/bash_operator.py +++ b/airflow/operators/bash_operator.py @@ -1,7 +1,6 @@ from builtins import bytes import logging -import sys from subprocess import Popen, STDOUT, PIPE from tempfile import gettempdir, NamedTemporaryFile @@ -22,6 +21,7 @@ of inheriting the current process environment, which is the default behavior. :type env: dict + :type output_encoding: output encoding of bash command """ template_fields = ('bash_command', 'env') template_ext = ('.sh', '.bash',) @@ -33,6 +33,7 @@ bash_command, xcom_push=False, env=None, + output_encoding='utf-8', *args, **kwargs): """ If xcom_push is True, the last line written to stdout will also @@ -42,6 +43,7 @@ self.bash_command = bash_command self.env = env self.xcom_push_flag = xcom_push + self.output_encoding = output_encoding def execute(self, context): """ @@ -70,7 +72,7 @@ logging.info("Output:") line = '' for line in iter(sp.stdout.readline, b''): - line = line.decode().strip() + line = line.decode(self.output_encoding).strip() logging.info(line) sp.wait() logging.info("Command exited with "
{"golden_diff": "diff --git a/airflow/operators/bash_operator.py b/airflow/operators/bash_operator.py\n--- a/airflow/operators/bash_operator.py\n+++ b/airflow/operators/bash_operator.py\n@@ -1,7 +1,6 @@\n \n from builtins import bytes\n import logging\n-import sys\n from subprocess import Popen, STDOUT, PIPE\n from tempfile import gettempdir, NamedTemporaryFile\n \n@@ -22,6 +21,7 @@\n of inheriting the current process environment, which is the default\n behavior.\n :type env: dict\n+ :type output_encoding: output encoding of bash command\n \"\"\"\n template_fields = ('bash_command', 'env')\n template_ext = ('.sh', '.bash',)\n@@ -33,6 +33,7 @@\n bash_command,\n xcom_push=False,\n env=None,\n+ output_encoding='utf-8',\n *args, **kwargs):\n \"\"\"\n If xcom_push is True, the last line written to stdout will also\n@@ -42,6 +43,7 @@\n self.bash_command = bash_command\n self.env = env\n self.xcom_push_flag = xcom_push\n+ self.output_encoding = output_encoding\n \n def execute(self, context):\n \"\"\"\n@@ -70,7 +72,7 @@\n logging.info(\"Output:\")\n line = ''\n for line in iter(sp.stdout.readline, b''):\n- line = line.decode().strip()\n+ line = line.decode(self.output_encoding).strip()\n logging.info(line)\n sp.wait()\n logging.info(\"Command exited with \"\n", "issue": "UnicodeDecodeError in bash_operator.py\nHi,\n\nI see a lot of these errors when running `airflow backfill` : \n\n```\nTraceback (most recent call last):\n File \"/usr/lib/python2.7/logging/__init__.py\", line 851, in emit\n msg = self.format(record)\n File \"/usr/lib/python2.7/logging/__init__.py\", line 724, in format\n return fmt.format(record)\n File \"/usr/lib/python2.7/logging/__init__.py\", line 467, in format\n s = self._fmt % record.__dict__\nUnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 13: ordinal not in range(128)\nLogged from file bash_operator.py, line 72\n```\n\n", "before_files": [{"content": "\nfrom builtins import bytes\nimport logging\nimport sys\nfrom subprocess import Popen, STDOUT, PIPE\nfrom tempfile import gettempdir, NamedTemporaryFile\n\nfrom airflow.utils import AirflowException\nfrom airflow.models import BaseOperator\nfrom airflow.utils import apply_defaults, TemporaryDirectory\n\n\nclass BashOperator(BaseOperator):\n \"\"\"\n Execute a Bash script, command or set of commands.\n\n :param bash_command: The command, set of commands or reference to a\n bash script (must be '.sh') to be executed.\n :type bash_command: string\n :param env: If env is not None, it must be a mapping that defines the\n environment variables for the new process; these are used instead\n of inheriting the current process environment, which is the default\n behavior.\n :type env: dict\n \"\"\"\n template_fields = ('bash_command', 'env')\n template_ext = ('.sh', '.bash',)\n ui_color = '#f0ede4'\n\n @apply_defaults\n def __init__(\n self,\n bash_command,\n xcom_push=False,\n env=None,\n *args, **kwargs):\n \"\"\"\n If xcom_push is True, the last line written to stdout will also\n be pushed to an XCom when the bash command completes.\n \"\"\"\n super(BashOperator, self).__init__(*args, **kwargs)\n self.bash_command = bash_command\n self.env = env\n self.xcom_push_flag = xcom_push\n\n def execute(self, context):\n \"\"\"\n Execute the bash command in a temporary directory\n which will be cleaned afterwards\n \"\"\"\n bash_command = self.bash_command\n logging.info(\"tmp dir root location: \\n\" + gettempdir())\n with TemporaryDirectory(prefix='airflowtmp') as tmp_dir:\n with NamedTemporaryFile(dir=tmp_dir, prefix=self.task_id) as f:\n\n f.write(bytes(bash_command, 'utf_8'))\n f.flush()\n fname = f.name\n script_location = tmp_dir + \"/\" + fname\n logging.info(\"Temporary script \"\n \"location :{0}\".format(script_location))\n logging.info(\"Running command: \" + bash_command)\n sp = Popen(\n ['bash', fname],\n stdout=PIPE, stderr=STDOUT,\n cwd=tmp_dir, env=self.env)\n\n self.sp = sp\n\n logging.info(\"Output:\")\n line = ''\n for line in iter(sp.stdout.readline, b''):\n line = line.decode().strip()\n logging.info(line)\n sp.wait()\n logging.info(\"Command exited with \"\n \"return code {0}\".format(sp.returncode))\n\n if sp.returncode:\n raise AirflowException(\"Bash command failed\")\n\n if self.xcom_push_flag:\n return line\n\n def on_kill(self):\n logging.info('Sending SIGTERM signal to bash subprocess')\n self.sp.terminate()\n", "path": "airflow/operators/bash_operator.py"}], "after_files": [{"content": "\nfrom builtins import bytes\nimport logging\nfrom subprocess import Popen, STDOUT, PIPE\nfrom tempfile import gettempdir, NamedTemporaryFile\n\nfrom airflow.utils import AirflowException\nfrom airflow.models import BaseOperator\nfrom airflow.utils import apply_defaults, TemporaryDirectory\n\n\nclass BashOperator(BaseOperator):\n \"\"\"\n Execute a Bash script, command or set of commands.\n\n :param bash_command: The command, set of commands or reference to a\n bash script (must be '.sh') to be executed.\n :type bash_command: string\n :param env: If env is not None, it must be a mapping that defines the\n environment variables for the new process; these are used instead\n of inheriting the current process environment, which is the default\n behavior.\n :type env: dict\n :type output_encoding: output encoding of bash command\n \"\"\"\n template_fields = ('bash_command', 'env')\n template_ext = ('.sh', '.bash',)\n ui_color = '#f0ede4'\n\n @apply_defaults\n def __init__(\n self,\n bash_command,\n xcom_push=False,\n env=None,\n output_encoding='utf-8',\n *args, **kwargs):\n \"\"\"\n If xcom_push is True, the last line written to stdout will also\n be pushed to an XCom when the bash command completes.\n \"\"\"\n super(BashOperator, self).__init__(*args, **kwargs)\n self.bash_command = bash_command\n self.env = env\n self.xcom_push_flag = xcom_push\n self.output_encoding = output_encoding\n\n def execute(self, context):\n \"\"\"\n Execute the bash command in a temporary directory\n which will be cleaned afterwards\n \"\"\"\n bash_command = self.bash_command\n logging.info(\"tmp dir root location: \\n\" + gettempdir())\n with TemporaryDirectory(prefix='airflowtmp') as tmp_dir:\n with NamedTemporaryFile(dir=tmp_dir, prefix=self.task_id) as f:\n\n f.write(bytes(bash_command, 'utf_8'))\n f.flush()\n fname = f.name\n script_location = tmp_dir + \"/\" + fname\n logging.info(\"Temporary script \"\n \"location :{0}\".format(script_location))\n logging.info(\"Running command: \" + bash_command)\n sp = Popen(\n ['bash', fname],\n stdout=PIPE, stderr=STDOUT,\n cwd=tmp_dir, env=self.env)\n\n self.sp = sp\n\n logging.info(\"Output:\")\n line = ''\n for line in iter(sp.stdout.readline, b''):\n line = line.decode(self.output_encoding).strip()\n logging.info(line)\n sp.wait()\n logging.info(\"Command exited with \"\n \"return code {0}\".format(sp.returncode))\n\n if sp.returncode:\n raise AirflowException(\"Bash command failed\")\n\n if self.xcom_push_flag:\n return line\n\n def on_kill(self):\n logging.info('Sending SIGTERM signal to bash subprocess')\n self.sp.terminate()\n", "path": "airflow/operators/bash_operator.py"}]}
1,230
346
gh_patches_debug_9103
rasdani/github-patches
git_diff
opsdroid__opsdroid-30
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Copy message on respond When a message responds it updates it's `text` value and passes itself to the connector. Due to pointers in Python the next rule to parse the message goes on to parse the response text. The message respond method should create a shallow copy of itself to pass to the connector, instead of updating itself directly. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `opsdroid/message.py` Content: ``` 1 """Class to encapsulate a message.""" 2 3 4 class Message: 5 # pylint: disable=too-few-public-methods 6 """A message object.""" 7 8 def __init__(self, text, user, room, connector): 9 """Create object with minimum properties.""" 10 self.text = text 11 self.user = user 12 self.room = room 13 self.connector = connector 14 self.regex = None 15 16 def respond(self, text): 17 """Respond to this message using the connector it was created by.""" 18 self.text = text 19 self.connector.respond(self) 20 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/opsdroid/message.py b/opsdroid/message.py --- a/opsdroid/message.py +++ b/opsdroid/message.py @@ -1,5 +1,7 @@ """Class to encapsulate a message.""" +from copy import copy + class Message: # pylint: disable=too-few-public-methods @@ -15,5 +17,6 @@ def respond(self, text): """Respond to this message using the connector it was created by.""" - self.text = text - self.connector.respond(self) + response = copy(self) + response.text = text + self.connector.respond(response)
{"golden_diff": "diff --git a/opsdroid/message.py b/opsdroid/message.py\n--- a/opsdroid/message.py\n+++ b/opsdroid/message.py\n@@ -1,5 +1,7 @@\n \"\"\"Class to encapsulate a message.\"\"\"\n \n+from copy import copy\n+\n \n class Message:\n # pylint: disable=too-few-public-methods\n@@ -15,5 +17,6 @@\n \n def respond(self, text):\n \"\"\"Respond to this message using the connector it was created by.\"\"\"\n- self.text = text\n- self.connector.respond(self)\n+ response = copy(self)\n+ response.text = text\n+ self.connector.respond(response)\n", "issue": "Copy message on respond\nWhen a message responds it updates it's `text` value and passes itself to the connector. Due to pointers in Python the next rule to parse the message goes on to parse the response text.\n\nThe message respond method should create a shallow copy of itself to pass to the connector, instead of updating itself directly.\n\n", "before_files": [{"content": "\"\"\"Class to encapsulate a message.\"\"\"\n\n\nclass Message:\n # pylint: disable=too-few-public-methods\n \"\"\"A message object.\"\"\"\n\n def __init__(self, text, user, room, connector):\n \"\"\"Create object with minimum properties.\"\"\"\n self.text = text\n self.user = user\n self.room = room\n self.connector = connector\n self.regex = None\n\n def respond(self, text):\n \"\"\"Respond to this message using the connector it was created by.\"\"\"\n self.text = text\n self.connector.respond(self)\n", "path": "opsdroid/message.py"}], "after_files": [{"content": "\"\"\"Class to encapsulate a message.\"\"\"\n\nfrom copy import copy\n\n\nclass Message:\n # pylint: disable=too-few-public-methods\n \"\"\"A message object.\"\"\"\n\n def __init__(self, text, user, room, connector):\n \"\"\"Create object with minimum properties.\"\"\"\n self.text = text\n self.user = user\n self.room = room\n self.connector = connector\n self.regex = None\n\n def respond(self, text):\n \"\"\"Respond to this message using the connector it was created by.\"\"\"\n response = copy(self)\n response.text = text\n self.connector.respond(response)\n", "path": "opsdroid/message.py"}]}
478
148
gh_patches_debug_25188
rasdani/github-patches
git_diff
helmholtz-analytics__heat-115
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Add unit tests for stride_tricks/broadcast_shape --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `heat/core/stride_tricks.py` Content: ``` 1 import itertools 2 3 4 def broadcast_shape(shape_a, shape_b): 5 """ 6 Infers, if possible, the broadcast output shape of two operands a and b. Inspired by stackoverflow post: 7 https://stackoverflow.com/questions/24743753/test-if-an-array-is-broadcastable-to-a-shape 8 9 Parameters 10 ---------- 11 shape_a : tuple of ints 12 shape of operand a 13 shape_b : tuple of ints 14 shape of operand b 15 16 Returns 17 ------- 18 broadcast_shape : tuple of ints 19 the broadcast shape 20 21 Raises 22 ------- 23 ValueError 24 If the two shapes cannot be broadcast. 25 """ 26 #TODO: test me 27 it = itertools.zip_longest(shape_a[::-1], shape_b[::-1], fillvalue=1) 28 resulting_shape = max(len(shape_a), len(shape_b)) * [None] 29 for i, (a, b) in enumerate(it): 30 if a == 1 or b == 1 or a == b: 31 resulting_shape[i] = max(a, b) 32 else: 33 raise ValueError('operands could not be broadcast, input shapes {} {}'.format(shape_a, shape_b)) 34 35 return tuple(resulting_shape[::-1]) 36 37 38 def sanitize_axis(shape, axis): 39 """ 40 Checks conformity of an axis with respect to a given shape. The axis will be converted to its positive equivalent 41 and is checked to be within bounds 42 43 Parameters 44 ---------- 45 shape : tuple of ints 46 shape of an array 47 axis : ints 48 the axis to be sanitized 49 50 Returns 51 ------- 52 sane_axis : int 53 the sane axis 54 55 Raises 56 ------- 57 ValueError 58 if the axis cannot be sanitized, i.e. out of bounds. 59 TypeError 60 if the the axis is not integral. 61 """ 62 #TODO: test me 63 64 if axis is not None: 65 if isinstance(axis, tuple): 66 raise NotImplementedError('Not implemented for axis: tuple of ints') 67 if not isinstance(axis, int): 68 raise TypeError('axis must be None or int, but was {}'.format(type(axis))) 69 70 if axis is None or 0 <= axis < len(shape): 71 return axis 72 elif axis < 0: 73 axis += len(shape) 74 75 if axis < 0 or axis >= len(shape): 76 raise ValueError('axis axis {} is out of bounds for shape {}'.format(axis, shape)) 77 78 return axis 79 80 81 def sanitize_shape(shape): 82 """ 83 Verifies and normalizes the given shape. 84 85 Parameters 86 ---------- 87 shape : int or sequence of ints 88 Shape of an array. 89 90 Returns 91 ------- 92 sane_shape : tuple of ints 93 The sanitized shape. 94 95 Raises 96 ------- 97 ValueError 98 If the shape contains illegal values, e.g. negative numbers. 99 TypeError 100 If the given shape is neither and int or a sequence of ints. 101 102 Examples 103 -------- 104 >>> sanitize_shape(3) 105 (3,) 106 107 >>> sanitize_shape([1, 2, 3]) 108 (1, 2, 3,) 109 110 >>> sanitize_shape(1.0) 111 TypeError 112 """ 113 shape = (shape,) if not hasattr(shape, '__iter__') else tuple(shape) 114 115 for dimension in shape: 116 if not isinstance(dimension, int): 117 raise TypeError('expected sequence object with length >= 0 or a single integer') 118 if dimension <= 0: 119 raise ValueError('negative dimensions are not allowed') 120 121 return shape 122 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/heat/core/stride_tricks.py b/heat/core/stride_tricks.py --- a/heat/core/stride_tricks.py +++ b/heat/core/stride_tricks.py @@ -22,8 +22,22 @@ ------- ValueError If the two shapes cannot be broadcast. + + Examples + ------- + >>> broadcast_shape((5,4),(4,)) + (5,4) + + >>> broadcast_shape((1,100,1),(10,1,5)) + (10,100,5) + + >>> broadcast_shape((8,1,6,1),(7,1,5,)) + (8,7,6,5)) + + >>> broadcast_shape((2,1),(8,4,3)) + ValueError """ - #TODO: test me + it = itertools.zip_longest(shape_a[::-1], shape_b[::-1], fillvalue=1) resulting_shape = max(len(shape_a), len(shape_b)) * [None] for i, (a, b) in enumerate(it): @@ -58,9 +72,23 @@ if the axis cannot be sanitized, i.e. out of bounds. TypeError if the the axis is not integral. + + Examples + ------- + >>> sanitize_axis((5,4,4),1) + 1 + + >>> sanitize_axis((5,4,4),-1) + 2 + + >>> sanitize_axis((5, 4), (1,)) + NotImplementedError + + >>> sanitize_axis((5, 4), 1.0) + TypeError + """ - #TODO: test me - + if axis is not None: if isinstance(axis, tuple): raise NotImplementedError('Not implemented for axis: tuple of ints')
{"golden_diff": "diff --git a/heat/core/stride_tricks.py b/heat/core/stride_tricks.py\n--- a/heat/core/stride_tricks.py\n+++ b/heat/core/stride_tricks.py\n@@ -22,8 +22,22 @@\n -------\n ValueError\n If the two shapes cannot be broadcast.\n+\n+ Examples\n+ -------\n+ >>> broadcast_shape((5,4),(4,))\n+ (5,4)\n+\n+ >>> broadcast_shape((1,100,1),(10,1,5))\n+ (10,100,5)\n+\n+ >>> broadcast_shape((8,1,6,1),(7,1,5,))\n+ (8,7,6,5))\n+\n+ >>> broadcast_shape((2,1),(8,4,3))\n+ ValueError\n \"\"\"\n- #TODO: test me\n+\n it = itertools.zip_longest(shape_a[::-1], shape_b[::-1], fillvalue=1)\n resulting_shape = max(len(shape_a), len(shape_b)) * [None]\n for i, (a, b) in enumerate(it):\n@@ -58,9 +72,23 @@\n if the axis cannot be sanitized, i.e. out of bounds.\n TypeError\n if the the axis is not integral.\n+\n+ Examples\n+ -------\n+ >>> sanitize_axis((5,4,4),1)\n+ 1\n+\n+ >>> sanitize_axis((5,4,4),-1)\n+ 2\n+\n+ >>> sanitize_axis((5, 4), (1,))\n+ NotImplementedError\n+\n+ >>> sanitize_axis((5, 4), 1.0)\n+ TypeError\n+\n \"\"\"\n- #TODO: test me\n- \n+\n if axis is not None:\n if isinstance(axis, tuple):\n raise NotImplementedError('Not implemented for axis: tuple of ints')\n", "issue": "Add unit tests for stride_tricks/broadcast_shape\n\n", "before_files": [{"content": "import itertools\n\n\ndef broadcast_shape(shape_a, shape_b):\n \"\"\"\n Infers, if possible, the broadcast output shape of two operands a and b. Inspired by stackoverflow post:\n https://stackoverflow.com/questions/24743753/test-if-an-array-is-broadcastable-to-a-shape\n\n Parameters\n ----------\n shape_a : tuple of ints\n shape of operand a\n shape_b : tuple of ints\n shape of operand b\n\n Returns\n -------\n broadcast_shape : tuple of ints\n the broadcast shape\n\n Raises\n -------\n ValueError\n If the two shapes cannot be broadcast.\n \"\"\"\n #TODO: test me\n it = itertools.zip_longest(shape_a[::-1], shape_b[::-1], fillvalue=1)\n resulting_shape = max(len(shape_a), len(shape_b)) * [None]\n for i, (a, b) in enumerate(it):\n if a == 1 or b == 1 or a == b:\n resulting_shape[i] = max(a, b)\n else:\n raise ValueError('operands could not be broadcast, input shapes {} {}'.format(shape_a, shape_b))\n\n return tuple(resulting_shape[::-1])\n\n\ndef sanitize_axis(shape, axis):\n \"\"\"\n Checks conformity of an axis with respect to a given shape. The axis will be converted to its positive equivalent\n and is checked to be within bounds\n\n Parameters\n ----------\n shape : tuple of ints\n shape of an array\n axis : ints\n the axis to be sanitized\n\n Returns\n -------\n sane_axis : int\n the sane axis\n\n Raises\n -------\n ValueError\n if the axis cannot be sanitized, i.e. out of bounds.\n TypeError\n if the the axis is not integral.\n \"\"\"\n #TODO: test me\n \n if axis is not None:\n if isinstance(axis, tuple):\n raise NotImplementedError('Not implemented for axis: tuple of ints')\n if not isinstance(axis, int):\n raise TypeError('axis must be None or int, but was {}'.format(type(axis)))\n\n if axis is None or 0 <= axis < len(shape):\n return axis\n elif axis < 0:\n axis += len(shape)\n\n if axis < 0 or axis >= len(shape):\n raise ValueError('axis axis {} is out of bounds for shape {}'.format(axis, shape))\n\n return axis\n\n\ndef sanitize_shape(shape):\n \"\"\"\n Verifies and normalizes the given shape.\n\n Parameters\n ----------\n shape : int or sequence of ints\n Shape of an array.\n\n Returns\n -------\n sane_shape : tuple of ints\n The sanitized shape.\n\n Raises\n -------\n ValueError\n If the shape contains illegal values, e.g. negative numbers.\n TypeError\n If the given shape is neither and int or a sequence of ints.\n\n Examples\n --------\n >>> sanitize_shape(3)\n (3,)\n\n >>> sanitize_shape([1, 2, 3])\n (1, 2, 3,)\n\n >>> sanitize_shape(1.0)\n TypeError\n \"\"\"\n shape = (shape,) if not hasattr(shape, '__iter__') else tuple(shape)\n\n for dimension in shape:\n if not isinstance(dimension, int):\n raise TypeError('expected sequence object with length >= 0 or a single integer')\n if dimension <= 0:\n raise ValueError('negative dimensions are not allowed')\n\n return shape\n", "path": "heat/core/stride_tricks.py"}], "after_files": [{"content": "import itertools\n\n\ndef broadcast_shape(shape_a, shape_b):\n \"\"\"\n Infers, if possible, the broadcast output shape of two operands a and b. Inspired by stackoverflow post:\n https://stackoverflow.com/questions/24743753/test-if-an-array-is-broadcastable-to-a-shape\n\n Parameters\n ----------\n shape_a : tuple of ints\n shape of operand a\n shape_b : tuple of ints\n shape of operand b\n\n Returns\n -------\n broadcast_shape : tuple of ints\n the broadcast shape\n\n Raises\n -------\n ValueError\n If the two shapes cannot be broadcast.\n\n Examples\n -------\n >>> broadcast_shape((5,4),(4,))\n (5,4)\n\n >>> broadcast_shape((1,100,1),(10,1,5))\n (10,100,5)\n\n >>> broadcast_shape((8,1,6,1),(7,1,5,))\n (8,7,6,5))\n\n >>> broadcast_shape((2,1),(8,4,3))\n ValueError\n \"\"\"\n\n it = itertools.zip_longest(shape_a[::-1], shape_b[::-1], fillvalue=1)\n resulting_shape = max(len(shape_a), len(shape_b)) * [None]\n for i, (a, b) in enumerate(it):\n if a == 1 or b == 1 or a == b:\n resulting_shape[i] = max(a, b)\n else:\n raise ValueError('operands could not be broadcast, input shapes {} {}'.format(shape_a, shape_b))\n\n return tuple(resulting_shape[::-1])\n\n\ndef sanitize_axis(shape, axis):\n \"\"\"\n Checks conformity of an axis with respect to a given shape. The axis will be converted to its positive equivalent\n and is checked to be within bounds\n\n Parameters\n ----------\n shape : tuple of ints\n shape of an array\n axis : ints\n the axis to be sanitized\n\n Returns\n -------\n sane_axis : int\n the sane axis\n\n Raises\n -------\n ValueError\n if the axis cannot be sanitized, i.e. out of bounds.\n TypeError\n if the the axis is not integral.\n\n Examples\n -------\n >>> sanitize_axis((5,4,4),1)\n 1\n\n >>> sanitize_axis((5,4,4),-1)\n 2\n\n >>> sanitize_axis((5, 4), (1,))\n NotImplementedError\n\n >>> sanitize_axis((5, 4), 1.0)\n TypeError\n\n \"\"\"\n\n if axis is not None:\n if isinstance(axis, tuple):\n raise NotImplementedError('Not implemented for axis: tuple of ints')\n if not isinstance(axis, int):\n raise TypeError('axis must be None or int, but was {}'.format(type(axis)))\n\n if axis is None or 0 <= axis < len(shape):\n return axis\n elif axis < 0:\n axis += len(shape)\n\n if axis < 0 or axis >= len(shape):\n raise ValueError('axis axis {} is out of bounds for shape {}'.format(axis, shape))\n\n return axis\n\n\ndef sanitize_shape(shape):\n \"\"\"\n Verifies and normalizes the given shape.\n\n Parameters\n ----------\n shape : int or sequence of ints\n Shape of an array.\n\n Returns\n -------\n sane_shape : tuple of ints\n The sanitized shape.\n\n Raises\n -------\n ValueError\n If the shape contains illegal values, e.g. negative numbers.\n TypeError\n If the given shape is neither and int or a sequence of ints.\n\n Examples\n --------\n >>> sanitize_shape(3)\n (3,)\n\n >>> sanitize_shape([1, 2, 3])\n (1, 2, 3,)\n\n >>> sanitize_shape(1.0)\n TypeError\n \"\"\"\n shape = (shape,) if not hasattr(shape, '__iter__') else tuple(shape)\n\n for dimension in shape:\n if not isinstance(dimension, int):\n raise TypeError('expected sequence object with length >= 0 or a single integer')\n if dimension <= 0:\n raise ValueError('negative dimensions are not allowed')\n\n return shape\n", "path": "heat/core/stride_tricks.py"}]}
1,280
417
gh_patches_debug_34376
rasdani/github-patches
git_diff
ethereum__consensus-specs-1065
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- add linter to CI Add a new job to CI that uses `flake8` or something similar to lint the the pyspec after the python code is dumped to `spec.py`. This will likely need a bit of configuration (`maxline=120`, etc) and some cleaning up. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `scripts/phase0/function_puller.py` Content: ``` 1 import sys 2 from typing import List 3 4 5 def get_spec(file_name: str) -> List[str]: 6 code_lines = [] 7 pulling_from = None 8 current_name = None 9 current_typedef = None 10 type_defs = [] 11 for linenum, line in enumerate(open(sys.argv[1]).readlines()): 12 line = line.rstrip() 13 if pulling_from is None and len(line) > 0 and line[0] == '#' and line[-1] == '`': 14 current_name = line[line[:-1].rfind('`') + 1: -1] 15 if line[:9] == '```python': 16 assert pulling_from is None 17 pulling_from = linenum + 1 18 elif line[:3] == '```': 19 if pulling_from is None: 20 pulling_from = linenum 21 else: 22 if current_typedef is not None: 23 assert code_lines[-1] == '}' 24 code_lines[-1] = '})' 25 current_typedef[-1] = '})' 26 type_defs.append((current_name, current_typedef)) 27 pulling_from = None 28 current_typedef = None 29 else: 30 if pulling_from == linenum and line == '{': 31 code_lines.append('%s = SSZType({' % current_name) 32 current_typedef = ['global_vars["%s"] = SSZType({' % current_name] 33 elif pulling_from is not None: 34 # Add some whitespace between functions 35 if line[:3] == 'def': 36 code_lines.append('') 37 code_lines.append('') 38 code_lines.append(line) 39 # Remember type def lines 40 if current_typedef is not None: 41 current_typedef.append(line) 42 elif pulling_from is None and len(line) > 0 and line[0] == '|': 43 row = line[1:].split('|') 44 if len(row) >= 2: 45 for i in range(2): 46 row[i] = row[i].strip().strip('`') 47 if '`' in row[i]: 48 row[i] = row[i][:row[i].find('`')] 49 eligible = True 50 if row[0][0] not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_': 51 eligible = False 52 for c in row[0]: 53 if c not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789': 54 eligible = False 55 if eligible: 56 code_lines.append(row[0] + ' = ' + (row[1].replace('**TBD**', '0x1234567890123456789012345678901234567890'))) 57 # Build type-def re-initialization 58 code_lines.append('') 59 code_lines.append('def init_SSZ_types():') 60 code_lines.append(' global_vars = globals()') 61 for ssz_type_name, ssz_type in type_defs: 62 code_lines.append('') 63 for type_line in ssz_type: 64 code_lines.append(' ' + type_line) 65 code_lines.append('\n') 66 code_lines.append('ssz_types = [' + ', '.join([f'\'{ssz_type_name}\'' for (ssz_type_name, _) in type_defs]) + ']') 67 code_lines.append('\n') 68 code_lines.append('def get_ssz_type_by_name(name: str) -> SSZType:') 69 code_lines.append(' return globals()[name]') 70 code_lines.append('') 71 return code_lines 72 ``` Path: `scripts/phase0/build_spec.py` Content: ``` 1 import sys 2 import function_puller 3 4 5 def build_phase0_spec(sourcefile, outfile): 6 code_lines = [] 7 code_lines.append(""" 8 from typing import ( 9 Any, 10 Dict, 11 List, 12 NewType, 13 Tuple, 14 ) 15 from eth2spec.utils.minimal_ssz import * 16 from eth2spec.utils.bls_stub import * 17 18 # stub, will get overwritten by real var 19 SLOTS_PER_EPOCH = 64 20 21 Slot = NewType('Slot', int) # uint64 22 Epoch = NewType('Epoch', int) # uint64 23 Shard = NewType('Shard', int) # uint64 24 ValidatorIndex = NewType('ValidatorIndex', int) # uint64 25 Gwei = NewType('Gwei', int) # uint64 26 Bytes32 = NewType('Bytes32', bytes) # bytes32 27 BLSPubkey = NewType('BLSPubkey', bytes) # bytes48 28 BLSSignature = NewType('BLSSignature', bytes) # bytes96 29 Store = None 30 """) 31 32 code_lines += function_puller.get_spec(sourcefile) 33 34 code_lines.append(""" 35 # Monkey patch validator compute committee code 36 _compute_committee = compute_committee 37 committee_cache = {} 38 39 40 def compute_committee(indices: List[ValidatorIndex], seed: Bytes32, index: int, count: int) -> List[ValidatorIndex]: 41 param_hash = (hash_tree_root(indices), seed, index, count) 42 43 if param_hash in committee_cache: 44 return committee_cache[param_hash] 45 else: 46 ret = _compute_committee(indices, seed, index, count) 47 committee_cache[param_hash] = ret 48 return ret 49 50 51 # Monkey patch hash cache 52 _hash = hash 53 hash_cache = {} 54 55 56 def hash(x): 57 if x in hash_cache: 58 return hash_cache[x] 59 else: 60 ret = _hash(x) 61 hash_cache[x] = ret 62 return ret 63 64 # Access to overwrite spec constants based on configuration 65 def apply_constants_preset(preset: Dict[str, Any]): 66 global_vars = globals() 67 for k, v in preset.items(): 68 global_vars[k] = v 69 70 # Deal with derived constants 71 global_vars['GENESIS_EPOCH'] = slot_to_epoch(GENESIS_SLOT) 72 73 # Initialize SSZ types again, to account for changed lengths 74 init_SSZ_types() 75 """) 76 77 with open(outfile, 'w') as out: 78 out.write("\n".join(code_lines)) 79 80 81 if __name__ == '__main__': 82 if len(sys.argv) < 3: 83 print("Usage: <source phase0> <output phase0 pyspec>") 84 build_phase0_spec(sys.argv[1], sys.argv[2]) 85 86 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/scripts/phase0/build_spec.py b/scripts/phase0/build_spec.py --- a/scripts/phase0/build_spec.py +++ b/scripts/phase0/build_spec.py @@ -12,8 +12,18 @@ NewType, Tuple, ) -from eth2spec.utils.minimal_ssz import * -from eth2spec.utils.bls_stub import * +from eth2spec.utils.minimal_ssz import ( + SSZType, + hash_tree_root, + signing_root, +) +from eth2spec.utils.bls_stub import ( + bls_aggregate_pubkeys, + bls_verify, + bls_verify_multiple, +) +from eth2spec.utils.hash_function import hash + # stub, will get overwritten by real var SLOTS_PER_EPOCH = 64 @@ -61,6 +71,7 @@ hash_cache[x] = ret return ret + # Access to overwrite spec constants based on configuration def apply_constants_preset(preset: Dict[str, Any]): global_vars = globals() diff --git a/scripts/phase0/function_puller.py b/scripts/phase0/function_puller.py --- a/scripts/phase0/function_puller.py +++ b/scripts/phase0/function_puller.py @@ -55,15 +55,19 @@ if eligible: code_lines.append(row[0] + ' = ' + (row[1].replace('**TBD**', '0x1234567890123456789012345678901234567890'))) # Build type-def re-initialization - code_lines.append('') + code_lines.append('\n') code_lines.append('def init_SSZ_types():') code_lines.append(' global_vars = globals()') for ssz_type_name, ssz_type in type_defs: code_lines.append('') for type_line in ssz_type: - code_lines.append(' ' + type_line) + if len(type_line) > 0: + code_lines.append(' ' + type_line) code_lines.append('\n') - code_lines.append('ssz_types = [' + ', '.join([f'\'{ssz_type_name}\'' for (ssz_type_name, _) in type_defs]) + ']') + code_lines.append('ssz_types = [\n') + for (ssz_type_name, _) in type_defs: + code_lines.append(f' {ssz_type_name},\n') + code_lines.append(']') code_lines.append('\n') code_lines.append('def get_ssz_type_by_name(name: str) -> SSZType:') code_lines.append(' return globals()[name]')
{"golden_diff": "diff --git a/scripts/phase0/build_spec.py b/scripts/phase0/build_spec.py\n--- a/scripts/phase0/build_spec.py\n+++ b/scripts/phase0/build_spec.py\n@@ -12,8 +12,18 @@\n NewType,\n Tuple,\n )\n-from eth2spec.utils.minimal_ssz import *\n-from eth2spec.utils.bls_stub import *\n+from eth2spec.utils.minimal_ssz import (\n+ SSZType,\n+ hash_tree_root,\n+ signing_root,\n+)\n+from eth2spec.utils.bls_stub import (\n+ bls_aggregate_pubkeys,\n+ bls_verify,\n+ bls_verify_multiple,\n+)\n+from eth2spec.utils.hash_function import hash\n+\n \n # stub, will get overwritten by real var\n SLOTS_PER_EPOCH = 64\n@@ -61,6 +71,7 @@\n hash_cache[x] = ret\n return ret\n \n+\n # Access to overwrite spec constants based on configuration\n def apply_constants_preset(preset: Dict[str, Any]):\n global_vars = globals()\ndiff --git a/scripts/phase0/function_puller.py b/scripts/phase0/function_puller.py\n--- a/scripts/phase0/function_puller.py\n+++ b/scripts/phase0/function_puller.py\n@@ -55,15 +55,19 @@\n if eligible:\n code_lines.append(row[0] + ' = ' + (row[1].replace('**TBD**', '0x1234567890123456789012345678901234567890')))\n # Build type-def re-initialization\n- code_lines.append('')\n+ code_lines.append('\\n')\n code_lines.append('def init_SSZ_types():')\n code_lines.append(' global_vars = globals()')\n for ssz_type_name, ssz_type in type_defs:\n code_lines.append('')\n for type_line in ssz_type:\n- code_lines.append(' ' + type_line)\n+ if len(type_line) > 0:\n+ code_lines.append(' ' + type_line)\n code_lines.append('\\n')\n- code_lines.append('ssz_types = [' + ', '.join([f'\\'{ssz_type_name}\\'' for (ssz_type_name, _) in type_defs]) + ']')\n+ code_lines.append('ssz_types = [\\n')\n+ for (ssz_type_name, _) in type_defs:\n+ code_lines.append(f' {ssz_type_name},\\n')\n+ code_lines.append(']')\n code_lines.append('\\n')\n code_lines.append('def get_ssz_type_by_name(name: str) -> SSZType:')\n code_lines.append(' return globals()[name]')\n", "issue": "add linter to CI\nAdd a new job to CI that uses `flake8` or something similar to lint the the pyspec after the python code is dumped to `spec.py`.\r\n\r\nThis will likely need a bit of configuration (`maxline=120`, etc) and some cleaning up.\n", "before_files": [{"content": "import sys\nfrom typing import List\n\n\ndef get_spec(file_name: str) -> List[str]:\n code_lines = []\n pulling_from = None\n current_name = None\n current_typedef = None\n type_defs = []\n for linenum, line in enumerate(open(sys.argv[1]).readlines()):\n line = line.rstrip()\n if pulling_from is None and len(line) > 0 and line[0] == '#' and line[-1] == '`':\n current_name = line[line[:-1].rfind('`') + 1: -1]\n if line[:9] == '```python':\n assert pulling_from is None\n pulling_from = linenum + 1\n elif line[:3] == '```':\n if pulling_from is None:\n pulling_from = linenum\n else:\n if current_typedef is not None:\n assert code_lines[-1] == '}'\n code_lines[-1] = '})'\n current_typedef[-1] = '})'\n type_defs.append((current_name, current_typedef))\n pulling_from = None\n current_typedef = None\n else:\n if pulling_from == linenum and line == '{':\n code_lines.append('%s = SSZType({' % current_name)\n current_typedef = ['global_vars[\"%s\"] = SSZType({' % current_name]\n elif pulling_from is not None:\n # Add some whitespace between functions\n if line[:3] == 'def':\n code_lines.append('')\n code_lines.append('')\n code_lines.append(line)\n # Remember type def lines\n if current_typedef is not None:\n current_typedef.append(line)\n elif pulling_from is None and len(line) > 0 and line[0] == '|':\n row = line[1:].split('|')\n if len(row) >= 2:\n for i in range(2):\n row[i] = row[i].strip().strip('`')\n if '`' in row[i]:\n row[i] = row[i][:row[i].find('`')]\n eligible = True\n if row[0][0] not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_':\n eligible = False\n for c in row[0]:\n if c not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789':\n eligible = False\n if eligible:\n code_lines.append(row[0] + ' = ' + (row[1].replace('**TBD**', '0x1234567890123456789012345678901234567890')))\n # Build type-def re-initialization\n code_lines.append('')\n code_lines.append('def init_SSZ_types():')\n code_lines.append(' global_vars = globals()')\n for ssz_type_name, ssz_type in type_defs:\n code_lines.append('')\n for type_line in ssz_type:\n code_lines.append(' ' + type_line)\n code_lines.append('\\n')\n code_lines.append('ssz_types = [' + ', '.join([f'\\'{ssz_type_name}\\'' for (ssz_type_name, _) in type_defs]) + ']')\n code_lines.append('\\n')\n code_lines.append('def get_ssz_type_by_name(name: str) -> SSZType:')\n code_lines.append(' return globals()[name]')\n code_lines.append('')\n return code_lines\n", "path": "scripts/phase0/function_puller.py"}, {"content": "import sys\nimport function_puller\n\n\ndef build_phase0_spec(sourcefile, outfile):\n code_lines = []\n code_lines.append(\"\"\"\nfrom typing import (\n Any,\n Dict,\n List,\n NewType,\n Tuple,\n)\nfrom eth2spec.utils.minimal_ssz import *\nfrom eth2spec.utils.bls_stub import *\n\n# stub, will get overwritten by real var\nSLOTS_PER_EPOCH = 64\n\nSlot = NewType('Slot', int) # uint64\nEpoch = NewType('Epoch', int) # uint64\nShard = NewType('Shard', int) # uint64\nValidatorIndex = NewType('ValidatorIndex', int) # uint64\nGwei = NewType('Gwei', int) # uint64\nBytes32 = NewType('Bytes32', bytes) # bytes32\nBLSPubkey = NewType('BLSPubkey', bytes) # bytes48\nBLSSignature = NewType('BLSSignature', bytes) # bytes96\nStore = None\n\"\"\")\n\n code_lines += function_puller.get_spec(sourcefile)\n\n code_lines.append(\"\"\"\n# Monkey patch validator compute committee code\n_compute_committee = compute_committee\ncommittee_cache = {}\n\n\ndef compute_committee(indices: List[ValidatorIndex], seed: Bytes32, index: int, count: int) -> List[ValidatorIndex]:\n param_hash = (hash_tree_root(indices), seed, index, count)\n\n if param_hash in committee_cache:\n return committee_cache[param_hash]\n else:\n ret = _compute_committee(indices, seed, index, count)\n committee_cache[param_hash] = ret\n return ret\n\n\n# Monkey patch hash cache\n_hash = hash\nhash_cache = {}\n\n\ndef hash(x):\n if x in hash_cache:\n return hash_cache[x]\n else:\n ret = _hash(x)\n hash_cache[x] = ret\n return ret\n\n# Access to overwrite spec constants based on configuration\ndef apply_constants_preset(preset: Dict[str, Any]):\n global_vars = globals()\n for k, v in preset.items():\n global_vars[k] = v\n\n # Deal with derived constants\n global_vars['GENESIS_EPOCH'] = slot_to_epoch(GENESIS_SLOT)\n\n # Initialize SSZ types again, to account for changed lengths\n init_SSZ_types()\n\"\"\")\n\n with open(outfile, 'w') as out:\n out.write(\"\\n\".join(code_lines))\n\n\nif __name__ == '__main__':\n if len(sys.argv) < 3:\n print(\"Usage: <source phase0> <output phase0 pyspec>\")\n build_phase0_spec(sys.argv[1], sys.argv[2])\n\n", "path": "scripts/phase0/build_spec.py"}], "after_files": [{"content": "import sys\nfrom typing import List\n\n\ndef get_spec(file_name: str) -> List[str]:\n code_lines = []\n pulling_from = None\n current_name = None\n current_typedef = None\n type_defs = []\n for linenum, line in enumerate(open(sys.argv[1]).readlines()):\n line = line.rstrip()\n if pulling_from is None and len(line) > 0 and line[0] == '#' and line[-1] == '`':\n current_name = line[line[:-1].rfind('`') + 1: -1]\n if line[:9] == '```python':\n assert pulling_from is None\n pulling_from = linenum + 1\n elif line[:3] == '```':\n if pulling_from is None:\n pulling_from = linenum\n else:\n if current_typedef is not None:\n assert code_lines[-1] == '}'\n code_lines[-1] = '})'\n current_typedef[-1] = '})'\n type_defs.append((current_name, current_typedef))\n pulling_from = None\n current_typedef = None\n else:\n if pulling_from == linenum and line == '{':\n code_lines.append('%s = SSZType({' % current_name)\n current_typedef = ['global_vars[\"%s\"] = SSZType({' % current_name]\n elif pulling_from is not None:\n # Add some whitespace between functions\n if line[:3] == 'def':\n code_lines.append('')\n code_lines.append('')\n code_lines.append(line)\n # Remember type def lines\n if current_typedef is not None:\n current_typedef.append(line)\n elif pulling_from is None and len(line) > 0 and line[0] == '|':\n row = line[1:].split('|')\n if len(row) >= 2:\n for i in range(2):\n row[i] = row[i].strip().strip('`')\n if '`' in row[i]:\n row[i] = row[i][:row[i].find('`')]\n eligible = True\n if row[0][0] not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_':\n eligible = False\n for c in row[0]:\n if c not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789':\n eligible = False\n if eligible:\n code_lines.append(row[0] + ' = ' + (row[1].replace('**TBD**', '0x1234567890123456789012345678901234567890')))\n # Build type-def re-initialization\n code_lines.append('\\n')\n code_lines.append('def init_SSZ_types():')\n code_lines.append(' global_vars = globals()')\n for ssz_type_name, ssz_type in type_defs:\n code_lines.append('')\n for type_line in ssz_type:\n if len(type_line) > 0:\n code_lines.append(' ' + type_line)\n code_lines.append('\\n')\n code_lines.append('ssz_types = [\\n')\n for (ssz_type_name, _) in type_defs:\n code_lines.append(f' {ssz_type_name},\\n')\n code_lines.append(']')\n code_lines.append('\\n')\n code_lines.append('def get_ssz_type_by_name(name: str) -> SSZType:')\n code_lines.append(' return globals()[name]')\n code_lines.append('')\n return code_lines\n", "path": "scripts/phase0/function_puller.py"}, {"content": "import sys\nimport function_puller\n\n\ndef build_phase0_spec(sourcefile, outfile):\n code_lines = []\n code_lines.append(\"\"\"\nfrom typing import (\n Any,\n Dict,\n List,\n NewType,\n Tuple,\n)\nfrom eth2spec.utils.minimal_ssz import (\n SSZType,\n hash_tree_root,\n signing_root,\n)\nfrom eth2spec.utils.bls_stub import (\n bls_aggregate_pubkeys,\n bls_verify,\n bls_verify_multiple,\n)\nfrom eth2spec.utils.hash_function import hash\n\n\n# stub, will get overwritten by real var\nSLOTS_PER_EPOCH = 64\n\nSlot = NewType('Slot', int) # uint64\nEpoch = NewType('Epoch', int) # uint64\nShard = NewType('Shard', int) # uint64\nValidatorIndex = NewType('ValidatorIndex', int) # uint64\nGwei = NewType('Gwei', int) # uint64\nBytes32 = NewType('Bytes32', bytes) # bytes32\nBLSPubkey = NewType('BLSPubkey', bytes) # bytes48\nBLSSignature = NewType('BLSSignature', bytes) # bytes96\nStore = None\n\"\"\")\n\n code_lines += function_puller.get_spec(sourcefile)\n\n code_lines.append(\"\"\"\n# Monkey patch validator compute committee code\n_compute_committee = compute_committee\ncommittee_cache = {}\n\n\ndef compute_committee(indices: List[ValidatorIndex], seed: Bytes32, index: int, count: int) -> List[ValidatorIndex]:\n param_hash = (hash_tree_root(indices), seed, index, count)\n\n if param_hash in committee_cache:\n return committee_cache[param_hash]\n else:\n ret = _compute_committee(indices, seed, index, count)\n committee_cache[param_hash] = ret\n return ret\n\n\n# Monkey patch hash cache\n_hash = hash\nhash_cache = {}\n\n\ndef hash(x):\n if x in hash_cache:\n return hash_cache[x]\n else:\n ret = _hash(x)\n hash_cache[x] = ret\n return ret\n\n\n# Access to overwrite spec constants based on configuration\ndef apply_constants_preset(preset: Dict[str, Any]):\n global_vars = globals()\n for k, v in preset.items():\n global_vars[k] = v\n\n # Deal with derived constants\n global_vars['GENESIS_EPOCH'] = slot_to_epoch(GENESIS_SLOT)\n\n # Initialize SSZ types again, to account for changed lengths\n init_SSZ_types()\n\"\"\")\n\n with open(outfile, 'w') as out:\n out.write(\"\\n\".join(code_lines))\n\n\nif __name__ == '__main__':\n if len(sys.argv) < 3:\n print(\"Usage: <source phase0> <output phase0 pyspec>\")\n build_phase0_spec(sys.argv[1], sys.argv[2])\n\n", "path": "scripts/phase0/build_spec.py"}]}
1,992
614
gh_patches_debug_6425
rasdani/github-patches
git_diff
helmholtz-analytics__heat-736
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Heat software development status "Beta" **Related** -- **Feature functionality** The software development status in PyPI is listed as "3 - Alpha". We are currently considering Heat as Beta, so this should be reflected in the status, which I propose to set to "4 - Beta". **Additional context** -- --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `setup.py` Content: ``` 1 from setuptools import setup, find_packages 2 import codecs 3 4 5 with codecs.open("README.md", "r", "utf-8") as handle: 6 long_description = handle.read() 7 8 __version__ = None # appeases flake, assignment in exec() below 9 with open("./heat/core/version.py") as handle: 10 exec(handle.read()) 11 12 setup( 13 name="heat", 14 packages=find_packages(exclude=("*tests*", "*benchmarks*")), 15 data_files=["README.md", "LICENSE"], 16 version=__version__, 17 description="A framework for high-performance data analytics and machine learning.", 18 long_description=long_description, 19 long_description_content_type="text/markdown", 20 author="Helmholtz Association", 21 author_email="[email protected]", 22 url="https://github.com/helmholtz-analytics/heat", 23 keywords=["data", "analytics", "tensors", "distributed", "gpu"], 24 python_requires="~=3.6", 25 classifiers=[ 26 "Development Status :: 3 - Alpha", 27 "Programming Language :: Python :: 3.6", 28 "Programming Language :: Python :: 3.7", 29 "Programming Language :: Python :: 3.8", 30 "License :: OSI Approved :: MIT License", 31 "Intended Audience :: Science/Research", 32 "Topic :: Scientific/Engineering", 33 ], 34 install_requires=[ 35 "mpi4py>=3.0.0", 36 "numpy>=1.13.0", 37 "torch>=1.7.0", 38 "scipy>=0.14.0", 39 "pillow>=6.0.0", 40 "torchvision>=0.5.0", 41 ], 42 extras_require={ 43 "hdf5": ["h5py>=2.8.0"], 44 "netcdf": ["netCDF4>=1.4.0"], 45 "dev": ["pre-commit>=1.18.3"], 46 }, 47 ) 48 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -23,7 +23,7 @@ keywords=["data", "analytics", "tensors", "distributed", "gpu"], python_requires="~=3.6", classifiers=[ - "Development Status :: 3 - Alpha", + "Development Status :: 4 - Beta", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8",
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -23,7 +23,7 @@\n keywords=[\"data\", \"analytics\", \"tensors\", \"distributed\", \"gpu\"],\n python_requires=\"~=3.6\",\n classifiers=[\n- \"Development Status :: 3 - Alpha\",\n+ \"Development Status :: 4 - Beta\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n", "issue": "Heat software development status \"Beta\"\n**Related**\r\n--\r\n\r\n**Feature functionality**\r\nThe software development status in PyPI is listed as \"3 - Alpha\". We are currently considering Heat as Beta, so this should be reflected in the status, which I propose to set to \"4 - Beta\".\r\n\r\n**Additional context**\r\n--\r\n\n", "before_files": [{"content": "from setuptools import setup, find_packages\nimport codecs\n\n\nwith codecs.open(\"README.md\", \"r\", \"utf-8\") as handle:\n long_description = handle.read()\n\n__version__ = None # appeases flake, assignment in exec() below\nwith open(\"./heat/core/version.py\") as handle:\n exec(handle.read())\n\nsetup(\n name=\"heat\",\n packages=find_packages(exclude=(\"*tests*\", \"*benchmarks*\")),\n data_files=[\"README.md\", \"LICENSE\"],\n version=__version__,\n description=\"A framework for high-performance data analytics and machine learning.\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n author=\"Helmholtz Association\",\n author_email=\"[email protected]\",\n url=\"https://github.com/helmholtz-analytics/heat\",\n keywords=[\"data\", \"analytics\", \"tensors\", \"distributed\", \"gpu\"],\n python_requires=\"~=3.6\",\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"License :: OSI Approved :: MIT License\",\n \"Intended Audience :: Science/Research\",\n \"Topic :: Scientific/Engineering\",\n ],\n install_requires=[\n \"mpi4py>=3.0.0\",\n \"numpy>=1.13.0\",\n \"torch>=1.7.0\",\n \"scipy>=0.14.0\",\n \"pillow>=6.0.0\",\n \"torchvision>=0.5.0\",\n ],\n extras_require={\n \"hdf5\": [\"h5py>=2.8.0\"],\n \"netcdf\": [\"netCDF4>=1.4.0\"],\n \"dev\": [\"pre-commit>=1.18.3\"],\n },\n)\n", "path": "setup.py"}], "after_files": [{"content": "from setuptools import setup, find_packages\nimport codecs\n\n\nwith codecs.open(\"README.md\", \"r\", \"utf-8\") as handle:\n long_description = handle.read()\n\n__version__ = None # appeases flake, assignment in exec() below\nwith open(\"./heat/core/version.py\") as handle:\n exec(handle.read())\n\nsetup(\n name=\"heat\",\n packages=find_packages(exclude=(\"*tests*\", \"*benchmarks*\")),\n data_files=[\"README.md\", \"LICENSE\"],\n version=__version__,\n description=\"A framework for high-performance data analytics and machine learning.\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n author=\"Helmholtz Association\",\n author_email=\"[email protected]\",\n url=\"https://github.com/helmholtz-analytics/heat\",\n keywords=[\"data\", \"analytics\", \"tensors\", \"distributed\", \"gpu\"],\n python_requires=\"~=3.6\",\n classifiers=[\n \"Development Status :: 4 - Beta\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"License :: OSI Approved :: MIT License\",\n \"Intended Audience :: Science/Research\",\n \"Topic :: Scientific/Engineering\",\n ],\n install_requires=[\n \"mpi4py>=3.0.0\",\n \"numpy>=1.13.0\",\n \"torch>=1.7.0\",\n \"scipy>=0.14.0\",\n \"pillow>=6.0.0\",\n \"torchvision>=0.5.0\",\n ],\n extras_require={\n \"hdf5\": [\"h5py>=2.8.0\"],\n \"netcdf\": [\"netCDF4>=1.4.0\"],\n \"dev\": [\"pre-commit>=1.18.3\"],\n },\n)\n", "path": "setup.py"}]}
826
120
gh_patches_debug_467
rasdani/github-patches
git_diff
ocadotechnology__codeforlife-portal-442
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- New run on local fails because of latest pillow version Needs to be set to 2.9 --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `setup.py` Content: ``` 1 # -*- coding: utf-8 -*- 2 from setuptools import find_packages, setup 3 import versioneer 4 5 setup(name='codeforlife-portal', 6 cmdclass=versioneer.get_cmdclass(), 7 version=versioneer.get_version(), 8 packages=find_packages(), 9 include_package_data=True, 10 install_requires=[ 11 'django==1.8.2', 12 'django-appconf==1.0.1', 13 'django-countries==3.4.1', 14 'djangorestframework==3.1.3', 15 'django-jquery==1.9.1', 16 'django-autoconfig==0.3.6', 17 'django-pipeline==1.5.4', 18 19 'pyyaml==3.10', 20 'rapid-router >= 1.0.0.post.dev1', 21 'six==1.9.0', 22 'docutils==0.12', 23 'django-recaptcha-field==1.0b2', 24 'reportlab==3.2.0', 25 'postcodes==0.1', 26 'django-formtools==1.0', 27 'django-two-factor-auth==1.2.0', 28 'urllib3==1.10.4', 29 'requests==2.7.0', 30 31 'django-cms==3.1.2', 32 33 'django-classy-tags==0.6.1', 34 'django-treebeard==3.0', 35 'django-sekizai==0.8.2', 36 'djangocms-admin-style==0.2.8', 37 38 'djangocms-text-ckeditor==2.6.0', 39 'djangocms-link==1.6.2', 40 'djangocms-snippet==1.5', 41 'djangocms-style==1.5', 42 'djangocms-column==1.5', 43 'djangocms-grid==1.2', 44 'djangocms-oembed==0.5', 45 'djangocms-table==1.2', 46 'djangocms-file==0.1', 47 'djangocms_flash==0.2.0', 48 'djangocms_googlemap==0.3', 49 'djangocms_inherit==0.1', 50 'djangocms_picture==0.1', 51 'djangocms_teaser==0.1', 52 'djangocms_video==0.1', 53 'django-online-status==0.1.0', 54 55 56 'Pillow>=2.9.0', 57 'django-reversion==1.9.3', 58 'sqlparse', 59 'libsass', 60 ], 61 tests_require=[ 62 'django-setuptest', 63 'django-selenium-clean==0.2.1', 64 'responses==0.4.0', 65 'selenium==2.48.0', 66 ], 67 test_suite='setuptest.setuptest.SetupTestSuite', 68 zip_safe=False, 69 ) 70 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -53,7 +53,7 @@ 'django-online-status==0.1.0', - 'Pillow>=2.9.0', + 'Pillow==2.9.0', 'django-reversion==1.9.3', 'sqlparse', 'libsass',
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -53,7 +53,7 @@\n 'django-online-status==0.1.0',\n \n \n- 'Pillow>=2.9.0',\n+ 'Pillow==2.9.0',\n 'django-reversion==1.9.3',\n 'sqlparse',\n 'libsass',\n", "issue": "New run on local fails because of latest pillow version\nNeeds to be set to 2.9\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nfrom setuptools import find_packages, setup\nimport versioneer\n\nsetup(name='codeforlife-portal',\n cmdclass=versioneer.get_cmdclass(),\n version=versioneer.get_version(),\n packages=find_packages(),\n include_package_data=True,\n install_requires=[\n 'django==1.8.2',\n 'django-appconf==1.0.1',\n 'django-countries==3.4.1',\n 'djangorestframework==3.1.3',\n 'django-jquery==1.9.1',\n 'django-autoconfig==0.3.6',\n 'django-pipeline==1.5.4',\n\n 'pyyaml==3.10',\n 'rapid-router >= 1.0.0.post.dev1',\n 'six==1.9.0',\n 'docutils==0.12',\n 'django-recaptcha-field==1.0b2',\n 'reportlab==3.2.0',\n 'postcodes==0.1',\n 'django-formtools==1.0',\n 'django-two-factor-auth==1.2.0',\n 'urllib3==1.10.4',\n 'requests==2.7.0',\n\n 'django-cms==3.1.2',\n\n 'django-classy-tags==0.6.1',\n 'django-treebeard==3.0',\n 'django-sekizai==0.8.2',\n 'djangocms-admin-style==0.2.8',\n\n 'djangocms-text-ckeditor==2.6.0',\n 'djangocms-link==1.6.2',\n 'djangocms-snippet==1.5',\n 'djangocms-style==1.5',\n 'djangocms-column==1.5',\n 'djangocms-grid==1.2',\n 'djangocms-oembed==0.5',\n 'djangocms-table==1.2',\n 'djangocms-file==0.1',\n 'djangocms_flash==0.2.0',\n 'djangocms_googlemap==0.3',\n 'djangocms_inherit==0.1',\n 'djangocms_picture==0.1',\n 'djangocms_teaser==0.1',\n 'djangocms_video==0.1',\n 'django-online-status==0.1.0',\n\n\n 'Pillow>=2.9.0',\n 'django-reversion==1.9.3',\n 'sqlparse',\n 'libsass',\n ],\n tests_require=[\n 'django-setuptest',\n 'django-selenium-clean==0.2.1',\n 'responses==0.4.0',\n 'selenium==2.48.0',\n ],\n test_suite='setuptest.setuptest.SetupTestSuite',\n zip_safe=False,\n )\n", "path": "setup.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\nfrom setuptools import find_packages, setup\nimport versioneer\n\nsetup(name='codeforlife-portal',\n cmdclass=versioneer.get_cmdclass(),\n version=versioneer.get_version(),\n packages=find_packages(),\n include_package_data=True,\n install_requires=[\n 'django==1.8.2',\n 'django-appconf==1.0.1',\n 'django-countries==3.4.1',\n 'djangorestframework==3.1.3',\n 'django-jquery==1.9.1',\n 'django-autoconfig==0.3.6',\n 'django-pipeline==1.5.4',\n\n 'pyyaml==3.10',\n 'rapid-router >= 1.0.0.post.dev1',\n 'six==1.9.0',\n 'docutils==0.12',\n 'django-recaptcha-field==1.0b2',\n 'reportlab==3.2.0',\n 'postcodes==0.1',\n 'django-formtools==1.0',\n 'django-two-factor-auth==1.2.0',\n 'urllib3==1.10.4',\n 'requests==2.7.0',\n\n 'django-cms==3.1.2',\n\n 'django-classy-tags==0.6.1',\n 'django-treebeard==3.0',\n 'django-sekizai==0.8.2',\n 'djangocms-admin-style==0.2.8',\n\n 'djangocms-text-ckeditor==2.6.0',\n 'djangocms-link==1.6.2',\n 'djangocms-snippet==1.5',\n 'djangocms-style==1.5',\n 'djangocms-column==1.5',\n 'djangocms-grid==1.2',\n 'djangocms-oembed==0.5',\n 'djangocms-table==1.2',\n 'djangocms-file==0.1',\n 'djangocms_flash==0.2.0',\n 'djangocms_googlemap==0.3',\n 'djangocms_inherit==0.1',\n 'djangocms_picture==0.1',\n 'djangocms_teaser==0.1',\n 'djangocms_video==0.1',\n 'django-online-status==0.1.0',\n\n\n 'Pillow==2.9.0',\n 'django-reversion==1.9.3',\n 'sqlparse',\n 'libsass',\n ],\n tests_require=[\n 'django-setuptest',\n 'django-selenium-clean==0.2.1',\n 'responses==0.4.0',\n 'selenium==2.48.0',\n ],\n test_suite='setuptest.setuptest.SetupTestSuite',\n zip_safe=False,\n )\n", "path": "setup.py"}]}
1,058
91
gh_patches_debug_24362
rasdani/github-patches
git_diff
liqd__a4-opin-496
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Markdown messes with Gender Mainstreaming When writing “Initiator*innen […] Entscheidungsträger*innen” in a comment, the text between the `*` is set in italics, because of the markdown formatting, I assume. Is there anything we can do about that? If I remember it correctly, some version of markdown only allows underscores for emphasis and double “*” for setting something in bold. Should we maybe use that version? ![bildschirmfoto 2016-11-21 um 12 22 31](https://cloud.githubusercontent.com/assets/15341015/20481129/e73970dc-afe5-11e6-8189-2210fc0b6c17.png) Example here: https://opin-stage.liqd.net/de/projects/opin-alleinstellungsmerkmale-fur-produktseite/ Markdown in comments in consistent with rest We decided against Markdown in most of the other input fields and used CKEditor instead, but comments still use markdown. But this is not document anywhere. So these are our options: 1. support markdown in comments, but also advertise it to the user 2. support only new lines and nor further formatting in comments (like _italic_, **bold**, ~~strike~~) 3. add ckeditor to comment edit field and allow some basic html in comments --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `euth/comments/templatetags/react_comments.py` Content: ``` 1 import json 2 3 from django import template, utils 4 from django.contrib.contenttypes.models import ContentType 5 from django.utils.safestring import mark_safe 6 7 8 from ..models import Comment 9 from ..serializers import ThreadSerializer 10 11 register = template.Library() 12 13 14 @register.simple_tag(takes_context=True) 15 def react_comments(context, obj): 16 request = context['request'] 17 18 serializer = ThreadSerializer( 19 obj.comments.all(), many=True, context={'request': request}) 20 comments = serializer.data 21 22 user = request.user 23 is_authenticated = user.is_authenticated() 24 is_moderator = user.is_superuser or user in obj.project.moderators.all() 25 user_name = user.username 26 27 contenttype = ContentType.objects.get_for_model(obj) 28 permission = '{ct.app_label}.comment_{ct.model}'.format(ct=contenttype) 29 has_comment_permission = user.has_perm(permission, obj) 30 31 comments_contenttype = ContentType.objects.get_for_model(Comment) 32 pk = obj.pk 33 34 language = utils.translation.get_language() 35 36 mountpoint = 'comments_for_{contenttype}_{pk}'.format( 37 contenttype=contenttype.pk, 38 pk=pk 39 ) 40 attributes = { 41 'comments': comments, 42 'comments_contenttype': comments_contenttype.pk, 43 'subjectType': contenttype.pk, 44 'subjectId': pk, 45 'isAuthenticated': is_authenticated, 46 'isModerator': is_moderator, 47 'user_name': user_name, 48 'language': language, 49 'isReadOnly': not has_comment_permission, 50 } 51 52 return mark_safe(( 53 '<div id={mountpoint}></div><script>window.opin.renderComment(' 54 '{mountpoint}, {attributes})</script>').format( 55 attributes=json.dumps(attributes), 56 mountpoint=json.dumps(mountpoint) 57 ) 58 ) 59 ``` Path: `euth/comments/models.py` Content: ``` 1 from django.conf import settings 2 from django.contrib.contenttypes.fields import (GenericForeignKey, 3 GenericRelation) 4 from django.contrib.contenttypes.models import ContentType 5 from django.db import models 6 from django.utils.translation import ugettext_lazy as _ 7 8 from euth.contrib.base_models import UserGeneratedContentModel 9 from euth.contrib.generics import models_to_limit 10 from euth.ratings import models as rating_models 11 12 13 class Comment(UserGeneratedContentModel): 14 15 content_type = models.ForeignKey( 16 ContentType, 17 on_delete=models.CASCADE, 18 limit_choices_to=models_to_limit(settings.COMMENTABLES) 19 ) 20 object_pk = models.PositiveIntegerField() 21 content_object = GenericForeignKey( 22 ct_field="content_type", fk_field="object_pk") 23 comment = models.TextField(max_length=1024) 24 is_removed = models.BooleanField(default=False) 25 is_censored = models.BooleanField(default=False) 26 ratings = GenericRelation(rating_models.Rating, 27 related_query_name='comment', 28 object_id_field='object_pk') 29 child_comments = GenericRelation('self', 30 related_query_name='parent_comment', 31 object_id_field='object_pk') 32 33 class Meta: 34 verbose_name = _("Comment") 35 verbose_name_plural = _("Comments") 36 ordering = ('created',) 37 38 def __str__(self): 39 if len(self.comment) > 50: 40 return "comment: {} ...".format(self.comment[:50]) 41 else: 42 return "comment: {}".format(self.comment) 43 44 def save(self, *args, **kwargs): 45 """ 46 Change the text of the comment if 47 the comment was marked removed or censored 48 """ 49 50 if self.is_removed: 51 self.comment = 'deleted by creator' 52 if self.is_censored: 53 self.comment = 'deleted by moderator' 54 return super(Comment, self).save(*args, **kwargs) 55 56 def get_absolute_url(self): 57 if hasattr(self.content_object, 'get_absolute_url'): 58 return self.content_object.get_absolute_url() 59 elif hasattr(self.project, 'get_absolute_url'): 60 return self.project.get_absolute_url() 61 else: 62 return None 63 64 @property 65 def notification_content(self): 66 return self.comment 67 68 @property 69 def project(self): 70 co = self.content_object 71 if isinstance(co, self.__class__): 72 co = co.content_object 73 return co.project 74 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/euth/comments/models.py b/euth/comments/models.py --- a/euth/comments/models.py +++ b/euth/comments/models.py @@ -5,6 +5,7 @@ from django.db import models from django.utils.translation import ugettext_lazy as _ +from contrib.transforms import html_transforms from euth.contrib.base_models import UserGeneratedContentModel from euth.contrib.generics import models_to_limit from euth.ratings import models as rating_models @@ -47,6 +48,9 @@ the comment was marked removed or censored """ + self.comment = html_transforms.clean_html_all( + self.comment) + if self.is_removed: self.comment = 'deleted by creator' if self.is_censored: diff --git a/euth/comments/templatetags/react_comments.py b/euth/comments/templatetags/react_comments.py --- a/euth/comments/templatetags/react_comments.py +++ b/euth/comments/templatetags/react_comments.py @@ -16,7 +16,8 @@ request = context['request'] serializer = ThreadSerializer( - obj.comments.all(), many=True, context={'request': request}) + obj.comments.all().order_by('-created'), + many=True, context={'request': request}) comments = serializer.data user = request.user
{"golden_diff": "diff --git a/euth/comments/models.py b/euth/comments/models.py\n--- a/euth/comments/models.py\n+++ b/euth/comments/models.py\n@@ -5,6 +5,7 @@\n from django.db import models\n from django.utils.translation import ugettext_lazy as _\n \n+from contrib.transforms import html_transforms\n from euth.contrib.base_models import UserGeneratedContentModel\n from euth.contrib.generics import models_to_limit\n from euth.ratings import models as rating_models\n@@ -47,6 +48,9 @@\n the comment was marked removed or censored\n \"\"\"\n \n+ self.comment = html_transforms.clean_html_all(\n+ self.comment)\n+\n if self.is_removed:\n self.comment = 'deleted by creator'\n if self.is_censored:\ndiff --git a/euth/comments/templatetags/react_comments.py b/euth/comments/templatetags/react_comments.py\n--- a/euth/comments/templatetags/react_comments.py\n+++ b/euth/comments/templatetags/react_comments.py\n@@ -16,7 +16,8 @@\n request = context['request']\n \n serializer = ThreadSerializer(\n- obj.comments.all(), many=True, context={'request': request})\n+ obj.comments.all().order_by('-created'),\n+ many=True, context={'request': request})\n comments = serializer.data\n \n user = request.user\n", "issue": "Markdown messes with Gender Mainstreaming\nWhen writing \u201cInitiator*innen [\u2026] Entscheidungstr\u00e4ger*innen\u201d in a comment, the text between the `*` is set in italics, because of the markdown formatting, I assume. Is there anything we can do about that? If I remember it correctly, some version of markdown only allows underscores for emphasis and double \u201c*\u201d for setting something in bold. Should we maybe use that version?\r\n\r\n![bildschirmfoto 2016-11-21 um 12 22 31](https://cloud.githubusercontent.com/assets/15341015/20481129/e73970dc-afe5-11e6-8189-2210fc0b6c17.png)\r\n\r\nExample here: https://opin-stage.liqd.net/de/projects/opin-alleinstellungsmerkmale-fur-produktseite/\nMarkdown in comments in consistent with rest\nWe decided against Markdown in most of the other input fields and used CKEditor instead, but comments still use markdown. But this is not document anywhere. So these are our options:\n1. support markdown in comments, but also advertise it to the user\n2. support only new lines and nor further formatting in comments (like _italic_, **bold**, ~~strike~~)\n3. add ckeditor to comment edit field and allow some basic html in comments\n\n", "before_files": [{"content": "import json\n\nfrom django import template, utils\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.utils.safestring import mark_safe\n\n\nfrom ..models import Comment\nfrom ..serializers import ThreadSerializer\n\nregister = template.Library()\n\n\[email protected]_tag(takes_context=True)\ndef react_comments(context, obj):\n request = context['request']\n\n serializer = ThreadSerializer(\n obj.comments.all(), many=True, context={'request': request})\n comments = serializer.data\n\n user = request.user\n is_authenticated = user.is_authenticated()\n is_moderator = user.is_superuser or user in obj.project.moderators.all()\n user_name = user.username\n\n contenttype = ContentType.objects.get_for_model(obj)\n permission = '{ct.app_label}.comment_{ct.model}'.format(ct=contenttype)\n has_comment_permission = user.has_perm(permission, obj)\n\n comments_contenttype = ContentType.objects.get_for_model(Comment)\n pk = obj.pk\n\n language = utils.translation.get_language()\n\n mountpoint = 'comments_for_{contenttype}_{pk}'.format(\n contenttype=contenttype.pk,\n pk=pk\n )\n attributes = {\n 'comments': comments,\n 'comments_contenttype': comments_contenttype.pk,\n 'subjectType': contenttype.pk,\n 'subjectId': pk,\n 'isAuthenticated': is_authenticated,\n 'isModerator': is_moderator,\n 'user_name': user_name,\n 'language': language,\n 'isReadOnly': not has_comment_permission,\n }\n\n return mark_safe((\n '<div id={mountpoint}></div><script>window.opin.renderComment('\n '{mountpoint}, {attributes})</script>').format(\n attributes=json.dumps(attributes),\n mountpoint=json.dumps(mountpoint)\n )\n )\n", "path": "euth/comments/templatetags/react_comments.py"}, {"content": "from django.conf import settings\nfrom django.contrib.contenttypes.fields import (GenericForeignKey,\n GenericRelation)\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.db import models\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom euth.contrib.base_models import UserGeneratedContentModel\nfrom euth.contrib.generics import models_to_limit\nfrom euth.ratings import models as rating_models\n\n\nclass Comment(UserGeneratedContentModel):\n\n content_type = models.ForeignKey(\n ContentType,\n on_delete=models.CASCADE,\n limit_choices_to=models_to_limit(settings.COMMENTABLES)\n )\n object_pk = models.PositiveIntegerField()\n content_object = GenericForeignKey(\n ct_field=\"content_type\", fk_field=\"object_pk\")\n comment = models.TextField(max_length=1024)\n is_removed = models.BooleanField(default=False)\n is_censored = models.BooleanField(default=False)\n ratings = GenericRelation(rating_models.Rating,\n related_query_name='comment',\n object_id_field='object_pk')\n child_comments = GenericRelation('self',\n related_query_name='parent_comment',\n object_id_field='object_pk')\n\n class Meta:\n verbose_name = _(\"Comment\")\n verbose_name_plural = _(\"Comments\")\n ordering = ('created',)\n\n def __str__(self):\n if len(self.comment) > 50:\n return \"comment: {} ...\".format(self.comment[:50])\n else:\n return \"comment: {}\".format(self.comment)\n\n def save(self, *args, **kwargs):\n \"\"\"\n Change the text of the comment if\n the comment was marked removed or censored\n \"\"\"\n\n if self.is_removed:\n self.comment = 'deleted by creator'\n if self.is_censored:\n self.comment = 'deleted by moderator'\n return super(Comment, self).save(*args, **kwargs)\n\n def get_absolute_url(self):\n if hasattr(self.content_object, 'get_absolute_url'):\n return self.content_object.get_absolute_url()\n elif hasattr(self.project, 'get_absolute_url'):\n return self.project.get_absolute_url()\n else:\n return None\n\n @property\n def notification_content(self):\n return self.comment\n\n @property\n def project(self):\n co = self.content_object\n if isinstance(co, self.__class__):\n co = co.content_object\n return co.project\n", "path": "euth/comments/models.py"}], "after_files": [{"content": "import json\n\nfrom django import template, utils\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.utils.safestring import mark_safe\n\n\nfrom ..models import Comment\nfrom ..serializers import ThreadSerializer\n\nregister = template.Library()\n\n\[email protected]_tag(takes_context=True)\ndef react_comments(context, obj):\n request = context['request']\n\n serializer = ThreadSerializer(\n obj.comments.all().order_by('-created'),\n many=True, context={'request': request})\n comments = serializer.data\n\n user = request.user\n is_authenticated = user.is_authenticated()\n is_moderator = user.is_superuser or user in obj.project.moderators.all()\n user_name = user.username\n\n contenttype = ContentType.objects.get_for_model(obj)\n permission = '{ct.app_label}.comment_{ct.model}'.format(ct=contenttype)\n has_comment_permission = user.has_perm(permission, obj)\n\n comments_contenttype = ContentType.objects.get_for_model(Comment)\n pk = obj.pk\n\n language = utils.translation.get_language()\n\n mountpoint = 'comments_for_{contenttype}_{pk}'.format(\n contenttype=contenttype.pk,\n pk=pk\n )\n attributes = {\n 'comments': comments,\n 'comments_contenttype': comments_contenttype.pk,\n 'subjectType': contenttype.pk,\n 'subjectId': pk,\n 'isAuthenticated': is_authenticated,\n 'isModerator': is_moderator,\n 'user_name': user_name,\n 'language': language,\n 'isReadOnly': not has_comment_permission,\n }\n\n return mark_safe((\n '<div id={mountpoint}></div><script>window.opin.renderComment('\n '{mountpoint}, {attributes})</script>').format(\n attributes=json.dumps(attributes),\n mountpoint=json.dumps(mountpoint)\n )\n )\n", "path": "euth/comments/templatetags/react_comments.py"}, {"content": "from django.conf import settings\nfrom django.contrib.contenttypes.fields import (GenericForeignKey,\n GenericRelation)\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.db import models\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom contrib.transforms import html_transforms\nfrom euth.contrib.base_models import UserGeneratedContentModel\nfrom euth.contrib.generics import models_to_limit\nfrom euth.ratings import models as rating_models\n\n\nclass Comment(UserGeneratedContentModel):\n\n content_type = models.ForeignKey(\n ContentType,\n on_delete=models.CASCADE,\n limit_choices_to=models_to_limit(settings.COMMENTABLES)\n )\n object_pk = models.PositiveIntegerField()\n content_object = GenericForeignKey(\n ct_field=\"content_type\", fk_field=\"object_pk\")\n comment = models.TextField(max_length=1024)\n is_removed = models.BooleanField(default=False)\n is_censored = models.BooleanField(default=False)\n ratings = GenericRelation(rating_models.Rating,\n related_query_name='comment',\n object_id_field='object_pk')\n child_comments = GenericRelation('self',\n related_query_name='parent_comment',\n object_id_field='object_pk')\n\n class Meta:\n verbose_name = _(\"Comment\")\n verbose_name_plural = _(\"Comments\")\n ordering = ('created',)\n\n def __str__(self):\n if len(self.comment) > 50:\n return \"comment: {} ...\".format(self.comment[:50])\n else:\n return \"comment: {}\".format(self.comment)\n\n def save(self, *args, **kwargs):\n \"\"\"\n Change the text of the comment if\n the comment was marked removed or censored\n \"\"\"\n\n self.comment = html_transforms.clean_html_all(\n self.comment)\n\n if self.is_removed:\n self.comment = 'deleted by creator'\n if self.is_censored:\n self.comment = 'deleted by moderator'\n return super(Comment, self).save(*args, **kwargs)\n\n def get_absolute_url(self):\n if hasattr(self.content_object, 'get_absolute_url'):\n return self.content_object.get_absolute_url()\n elif hasattr(self.project, 'get_absolute_url'):\n return self.project.get_absolute_url()\n else:\n return None\n\n @property\n def notification_content(self):\n return self.comment\n\n @property\n def project(self):\n co = self.content_object\n if isinstance(co, self.__class__):\n co = co.content_object\n return co.project\n", "path": "euth/comments/models.py"}]}
1,720
295
gh_patches_debug_1853
rasdani/github-patches
git_diff
microsoft__playwright-python-145
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- DEBUG outputs won't get forwarded --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `playwright/main.py` Content: ``` 1 # Copyright (c) Microsoft Corporation. 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 import asyncio 16 import subprocess 17 import sys 18 from typing import Any 19 20 from greenlet import greenlet 21 22 from playwright.async_api import Playwright as AsyncPlaywright 23 from playwright.connection import Connection 24 from playwright.helper import Error 25 from playwright.object_factory import create_remote_object 26 from playwright.path_utils import get_file_dirname 27 from playwright.playwright import Playwright 28 from playwright.sync_api import Playwright as SyncPlaywright 29 from playwright.sync_base import dispatcher_fiber, set_dispatcher_fiber 30 31 32 def compute_driver_name() -> str: 33 platform = sys.platform 34 if platform == "darwin": 35 result = "driver-macos" 36 elif platform == "linux": 37 result = "driver-linux" 38 elif platform == "win32": 39 result = "driver-win.exe" 40 return result 41 42 43 async def run_driver_async() -> Connection: 44 package_path = get_file_dirname() 45 driver_name = compute_driver_name() 46 driver_executable = package_path / "drivers" / driver_name 47 48 proc = await asyncio.create_subprocess_exec( 49 str(driver_executable), 50 stdin=asyncio.subprocess.PIPE, 51 stdout=asyncio.subprocess.PIPE, 52 stderr=asyncio.subprocess.PIPE, 53 limit=32768, 54 ) 55 assert proc.stdout 56 assert proc.stdin 57 connection = Connection( 58 proc.stdout, proc.stdin, create_remote_object, asyncio.get_event_loop() 59 ) 60 return connection 61 62 63 def run_driver() -> Connection: 64 loop = asyncio.get_event_loop() 65 if loop.is_running(): 66 raise Error("Can only run one Playwright at a time.") 67 return loop.run_until_complete(run_driver_async()) 68 69 70 class SyncPlaywrightContextManager: 71 def __init__(self) -> None: 72 self._connection = run_driver() 73 self._playwright: SyncPlaywright 74 75 def __enter__(self) -> SyncPlaywright: 76 g_self = greenlet.getcurrent() 77 78 def callback_wrapper(playwright_impl: Playwright) -> None: 79 self._playwright = SyncPlaywright(playwright_impl) 80 g_self.switch() 81 82 self._connection.call_on_object_with_known_name("Playwright", callback_wrapper) 83 set_dispatcher_fiber(greenlet(lambda: self._connection.run_sync())) 84 dispatcher_fiber().switch() 85 return self._playwright 86 87 def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: 88 self._connection.stop_sync() 89 90 91 class AsyncPlaywrightContextManager: 92 def __init__(self) -> None: 93 self._connection: Connection 94 95 async def __aenter__(self) -> AsyncPlaywright: 96 self._connection = await run_driver_async() 97 self._connection.run_async() 98 return AsyncPlaywright( 99 await self._connection.wait_for_object_with_known_name("Playwright") 100 ) 101 102 async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: 103 self._connection.stop_async() 104 105 106 if sys.platform == "win32": 107 # Use ProactorEventLoop in 3.7, which is default in 3.8 108 loop = asyncio.ProactorEventLoop() 109 asyncio.set_event_loop(loop) 110 111 112 def main() -> None: 113 if "install" not in sys.argv: 114 print('Run "python -m playwright install" to complete installation') 115 return 116 package_path = get_file_dirname() 117 driver_name = compute_driver_name() 118 driver_executable = package_path / "drivers" / driver_name 119 print("Installing the browsers...") 120 subprocess.check_call(f"{driver_executable} install", shell=True) 121 122 print("Playwright is now ready for use") 123 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/playwright/main.py b/playwright/main.py --- a/playwright/main.py +++ b/playwright/main.py @@ -49,7 +49,7 @@ str(driver_executable), stdin=asyncio.subprocess.PIPE, stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE, + stderr=sys.stderr, limit=32768, ) assert proc.stdout
{"golden_diff": "diff --git a/playwright/main.py b/playwright/main.py\n--- a/playwright/main.py\n+++ b/playwright/main.py\n@@ -49,7 +49,7 @@\n str(driver_executable),\n stdin=asyncio.subprocess.PIPE,\n stdout=asyncio.subprocess.PIPE,\n- stderr=asyncio.subprocess.PIPE,\n+ stderr=sys.stderr,\n limit=32768,\n )\n assert proc.stdout\n", "issue": "DEBUG outputs won't get forwarded\n\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport asyncio\nimport subprocess\nimport sys\nfrom typing import Any\n\nfrom greenlet import greenlet\n\nfrom playwright.async_api import Playwright as AsyncPlaywright\nfrom playwright.connection import Connection\nfrom playwright.helper import Error\nfrom playwright.object_factory import create_remote_object\nfrom playwright.path_utils import get_file_dirname\nfrom playwright.playwright import Playwright\nfrom playwright.sync_api import Playwright as SyncPlaywright\nfrom playwright.sync_base import dispatcher_fiber, set_dispatcher_fiber\n\n\ndef compute_driver_name() -> str:\n platform = sys.platform\n if platform == \"darwin\":\n result = \"driver-macos\"\n elif platform == \"linux\":\n result = \"driver-linux\"\n elif platform == \"win32\":\n result = \"driver-win.exe\"\n return result\n\n\nasync def run_driver_async() -> Connection:\n package_path = get_file_dirname()\n driver_name = compute_driver_name()\n driver_executable = package_path / \"drivers\" / driver_name\n\n proc = await asyncio.create_subprocess_exec(\n str(driver_executable),\n stdin=asyncio.subprocess.PIPE,\n stdout=asyncio.subprocess.PIPE,\n stderr=asyncio.subprocess.PIPE,\n limit=32768,\n )\n assert proc.stdout\n assert proc.stdin\n connection = Connection(\n proc.stdout, proc.stdin, create_remote_object, asyncio.get_event_loop()\n )\n return connection\n\n\ndef run_driver() -> Connection:\n loop = asyncio.get_event_loop()\n if loop.is_running():\n raise Error(\"Can only run one Playwright at a time.\")\n return loop.run_until_complete(run_driver_async())\n\n\nclass SyncPlaywrightContextManager:\n def __init__(self) -> None:\n self._connection = run_driver()\n self._playwright: SyncPlaywright\n\n def __enter__(self) -> SyncPlaywright:\n g_self = greenlet.getcurrent()\n\n def callback_wrapper(playwright_impl: Playwright) -> None:\n self._playwright = SyncPlaywright(playwright_impl)\n g_self.switch()\n\n self._connection.call_on_object_with_known_name(\"Playwright\", callback_wrapper)\n set_dispatcher_fiber(greenlet(lambda: self._connection.run_sync()))\n dispatcher_fiber().switch()\n return self._playwright\n\n def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:\n self._connection.stop_sync()\n\n\nclass AsyncPlaywrightContextManager:\n def __init__(self) -> None:\n self._connection: Connection\n\n async def __aenter__(self) -> AsyncPlaywright:\n self._connection = await run_driver_async()\n self._connection.run_async()\n return AsyncPlaywright(\n await self._connection.wait_for_object_with_known_name(\"Playwright\")\n )\n\n async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:\n self._connection.stop_async()\n\n\nif sys.platform == \"win32\":\n # Use ProactorEventLoop in 3.7, which is default in 3.8\n loop = asyncio.ProactorEventLoop()\n asyncio.set_event_loop(loop)\n\n\ndef main() -> None:\n if \"install\" not in sys.argv:\n print('Run \"python -m playwright install\" to complete installation')\n return\n package_path = get_file_dirname()\n driver_name = compute_driver_name()\n driver_executable = package_path / \"drivers\" / driver_name\n print(\"Installing the browsers...\")\n subprocess.check_call(f\"{driver_executable} install\", shell=True)\n\n print(\"Playwright is now ready for use\")\n", "path": "playwright/main.py"}], "after_files": [{"content": "# Copyright (c) Microsoft Corporation.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport asyncio\nimport subprocess\nimport sys\nfrom typing import Any\n\nfrom greenlet import greenlet\n\nfrom playwright.async_api import Playwright as AsyncPlaywright\nfrom playwright.connection import Connection\nfrom playwright.helper import Error\nfrom playwright.object_factory import create_remote_object\nfrom playwright.path_utils import get_file_dirname\nfrom playwright.playwright import Playwright\nfrom playwright.sync_api import Playwright as SyncPlaywright\nfrom playwright.sync_base import dispatcher_fiber, set_dispatcher_fiber\n\n\ndef compute_driver_name() -> str:\n platform = sys.platform\n if platform == \"darwin\":\n result = \"driver-macos\"\n elif platform == \"linux\":\n result = \"driver-linux\"\n elif platform == \"win32\":\n result = \"driver-win.exe\"\n return result\n\n\nasync def run_driver_async() -> Connection:\n package_path = get_file_dirname()\n driver_name = compute_driver_name()\n driver_executable = package_path / \"drivers\" / driver_name\n\n proc = await asyncio.create_subprocess_exec(\n str(driver_executable),\n stdin=asyncio.subprocess.PIPE,\n stdout=asyncio.subprocess.PIPE,\n stderr=sys.stderr,\n limit=32768,\n )\n assert proc.stdout\n assert proc.stdin\n connection = Connection(\n proc.stdout, proc.stdin, create_remote_object, asyncio.get_event_loop()\n )\n return connection\n\n\ndef run_driver() -> Connection:\n loop = asyncio.get_event_loop()\n if loop.is_running():\n raise Error(\"Can only run one Playwright at a time.\")\n return loop.run_until_complete(run_driver_async())\n\n\nclass SyncPlaywrightContextManager:\n def __init__(self) -> None:\n self._connection = run_driver()\n self._playwright: SyncPlaywright\n\n def __enter__(self) -> SyncPlaywright:\n g_self = greenlet.getcurrent()\n\n def callback_wrapper(playwright_impl: Playwright) -> None:\n self._playwright = SyncPlaywright(playwright_impl)\n g_self.switch()\n\n self._connection.call_on_object_with_known_name(\"Playwright\", callback_wrapper)\n set_dispatcher_fiber(greenlet(lambda: self._connection.run_sync()))\n dispatcher_fiber().switch()\n return self._playwright\n\n def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:\n self._connection.stop_sync()\n\n\nclass AsyncPlaywrightContextManager:\n def __init__(self) -> None:\n self._connection: Connection\n\n async def __aenter__(self) -> AsyncPlaywright:\n self._connection = await run_driver_async()\n self._connection.run_async()\n return AsyncPlaywright(\n await self._connection.wait_for_object_with_known_name(\"Playwright\")\n )\n\n async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:\n self._connection.stop_async()\n\n\nif sys.platform == \"win32\":\n # Use ProactorEventLoop in 3.7, which is default in 3.8\n loop = asyncio.ProactorEventLoop()\n asyncio.set_event_loop(loop)\n\n\ndef main() -> None:\n if \"install\" not in sys.argv:\n print('Run \"python -m playwright install\" to complete installation')\n return\n package_path = get_file_dirname()\n driver_name = compute_driver_name()\n driver_executable = package_path / \"drivers\" / driver_name\n print(\"Installing the browsers...\")\n subprocess.check_call(f\"{driver_executable} install\", shell=True)\n\n print(\"Playwright is now ready for use\")\n", "path": "playwright/main.py"}]}
1,442
96
gh_patches_debug_20086
rasdani/github-patches
git_diff
python-telegram-bot__python-telegram-bot-3911
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Add rich equality comparison to `WriteAccessAllowed` The comparison should be based on the `web_app_name` attribute only. See https://github.com/python-telegram-bot/python-telegram-bot/pull/3898#discussion_r1337582872 --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `telegram/_writeaccessallowed.py` Content: ``` 1 #!/usr/bin/env python 2 # 3 # A library that provides a Python interface to the Telegram Bot API 4 # Copyright (C) 2015-2023 5 # Leandro Toledo de Souza <[email protected]> 6 # 7 # This program is free software: you can redistribute it and/or modify 8 # it under the terms of the GNU Lesser Public License as published by 9 # the Free Software Foundation, either version 3 of the License, or 10 # (at your option) any later version. 11 # 12 # This program is distributed in the hope that it will be useful, 13 # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 # GNU Lesser Public License for more details. 16 # 17 # You should have received a copy of the GNU Lesser Public License 18 # along with this program. If not, see [http://www.gnu.org/licenses/]. 19 """This module contains objects related to the write access allowed service message.""" 20 from typing import Optional 21 22 from telegram._telegramobject import TelegramObject 23 from telegram._utils.types import JSONDict 24 25 26 class WriteAccessAllowed(TelegramObject): 27 """ 28 This object represents a service message about a user allowing a bot to write messages after 29 adding the bot to the attachment menu or launching a Web App from a link. 30 31 .. versionadded:: 20.0 32 33 Args: 34 web_app_name (:obj:`str`, optional): Name of the Web App which was launched from a link. 35 36 .. versionadded:: 20.3 37 38 Attributes: 39 web_app_name (:obj:`str`): Optional. Name of the Web App which was launched from a link. 40 41 .. versionadded:: 20.3 42 43 """ 44 45 __slots__ = ("web_app_name",) 46 47 def __init__( 48 self, web_app_name: Optional[str] = None, *, api_kwargs: Optional[JSONDict] = None 49 ): 50 super().__init__(api_kwargs=api_kwargs) 51 self.web_app_name: Optional[str] = web_app_name 52 53 self._freeze() 54 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/telegram/_writeaccessallowed.py b/telegram/_writeaccessallowed.py --- a/telegram/_writeaccessallowed.py +++ b/telegram/_writeaccessallowed.py @@ -28,7 +28,12 @@ This object represents a service message about a user allowing a bot to write messages after adding the bot to the attachment menu or launching a Web App from a link. + Objects of this class are comparable in terms of equality. Two objects of this class are + considered equal, if their :attr:`web_app_name` is equal. + .. versionadded:: 20.0 + .. versionchanged:: NEXT.VERSION + Added custom equality comparison for objects of this class. Args: web_app_name (:obj:`str`, optional): Name of the Web App which was launched from a link. @@ -50,4 +55,6 @@ super().__init__(api_kwargs=api_kwargs) self.web_app_name: Optional[str] = web_app_name + self._id_attrs = (self.web_app_name,) + self._freeze()
{"golden_diff": "diff --git a/telegram/_writeaccessallowed.py b/telegram/_writeaccessallowed.py\n--- a/telegram/_writeaccessallowed.py\n+++ b/telegram/_writeaccessallowed.py\n@@ -28,7 +28,12 @@\n This object represents a service message about a user allowing a bot to write messages after\n adding the bot to the attachment menu or launching a Web App from a link.\n \n+ Objects of this class are comparable in terms of equality. Two objects of this class are\n+ considered equal, if their :attr:`web_app_name` is equal.\n+\n .. versionadded:: 20.0\n+ .. versionchanged:: NEXT.VERSION\n+ Added custom equality comparison for objects of this class.\n \n Args:\n web_app_name (:obj:`str`, optional): Name of the Web App which was launched from a link.\n@@ -50,4 +55,6 @@\n super().__init__(api_kwargs=api_kwargs)\n self.web_app_name: Optional[str] = web_app_name\n \n+ self._id_attrs = (self.web_app_name,)\n+\n self._freeze()\n", "issue": "Add rich equality comparison to `WriteAccessAllowed`\nThe comparison should be based on the `web_app_name` attribute only.\r\n\r\nSee https://github.com/python-telegram-bot/python-telegram-bot/pull/3898#discussion_r1337582872\n", "before_files": [{"content": "#!/usr/bin/env python\n#\n# A library that provides a Python interface to the Telegram Bot API\n# Copyright (C) 2015-2023\n# Leandro Toledo de Souza <[email protected]>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser Public License for more details.\n#\n# You should have received a copy of the GNU Lesser Public License\n# along with this program. If not, see [http://www.gnu.org/licenses/].\n\"\"\"This module contains objects related to the write access allowed service message.\"\"\"\nfrom typing import Optional\n\nfrom telegram._telegramobject import TelegramObject\nfrom telegram._utils.types import JSONDict\n\n\nclass WriteAccessAllowed(TelegramObject):\n \"\"\"\n This object represents a service message about a user allowing a bot to write messages after\n adding the bot to the attachment menu or launching a Web App from a link.\n\n .. versionadded:: 20.0\n\n Args:\n web_app_name (:obj:`str`, optional): Name of the Web App which was launched from a link.\n\n .. versionadded:: 20.3\n\n Attributes:\n web_app_name (:obj:`str`): Optional. Name of the Web App which was launched from a link.\n\n .. versionadded:: 20.3\n\n \"\"\"\n\n __slots__ = (\"web_app_name\",)\n\n def __init__(\n self, web_app_name: Optional[str] = None, *, api_kwargs: Optional[JSONDict] = None\n ):\n super().__init__(api_kwargs=api_kwargs)\n self.web_app_name: Optional[str] = web_app_name\n\n self._freeze()\n", "path": "telegram/_writeaccessallowed.py"}], "after_files": [{"content": "#!/usr/bin/env python\n#\n# A library that provides a Python interface to the Telegram Bot API\n# Copyright (C) 2015-2023\n# Leandro Toledo de Souza <[email protected]>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser Public License for more details.\n#\n# You should have received a copy of the GNU Lesser Public License\n# along with this program. If not, see [http://www.gnu.org/licenses/].\n\"\"\"This module contains objects related to the write access allowed service message.\"\"\"\nfrom typing import Optional\n\nfrom telegram._telegramobject import TelegramObject\nfrom telegram._utils.types import JSONDict\n\n\nclass WriteAccessAllowed(TelegramObject):\n \"\"\"\n This object represents a service message about a user allowing a bot to write messages after\n adding the bot to the attachment menu or launching a Web App from a link.\n\n Objects of this class are comparable in terms of equality. Two objects of this class are\n considered equal, if their :attr:`web_app_name` is equal.\n\n .. versionadded:: 20.0\n .. versionchanged:: NEXT.VERSION\n Added custom equality comparison for objects of this class.\n\n Args:\n web_app_name (:obj:`str`, optional): Name of the Web App which was launched from a link.\n\n .. versionadded:: 20.3\n\n Attributes:\n web_app_name (:obj:`str`): Optional. Name of the Web App which was launched from a link.\n\n .. versionadded:: 20.3\n\n \"\"\"\n\n __slots__ = (\"web_app_name\",)\n\n def __init__(\n self, web_app_name: Optional[str] = None, *, api_kwargs: Optional[JSONDict] = None\n ):\n super().__init__(api_kwargs=api_kwargs)\n self.web_app_name: Optional[str] = web_app_name\n\n self._id_attrs = (self.web_app_name,)\n\n self._freeze()\n", "path": "telegram/_writeaccessallowed.py"}]}
871
242
gh_patches_debug_2450
rasdani/github-patches
git_diff
MAKENTNU__web-204
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Fix delete permissions for course registration --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `make_queue/views/admin/course.py` Content: ``` 1 import io 2 3 import xlsxwriter 4 from django.contrib.auth.mixins import PermissionRequiredMixin 5 from django.db.models import Q 6 from django.http import HttpResponse 7 from django.shortcuts import redirect 8 from django.urls import reverse 9 from django.views.generic import TemplateView, View, CreateView, UpdateView, DeleteView 10 11 from make_queue.forms import Printer3DCourseForm 12 from make_queue.models.course import Printer3DCourse 13 14 15 class CourseView(TemplateView): 16 template_name = "make_queue/course/course_panel.html" 17 18 def get_context_data(self, **kwargs): 19 context_data = super().get_context_data(**kwargs) 20 context_data.update({ 21 "registrations": Printer3DCourse.objects.order_by("name"), 22 "possible_statuses": Printer3DCourse.STATUS_CHOICES, 23 }) 24 return context_data 25 26 27 class CreateRegistrationView(PermissionRequiredMixin, CreateView): 28 is_next = False 29 model = Printer3DCourse 30 form_class = Printer3DCourseForm 31 template_name = "make_queue/course/registration_create.html" 32 permission_required = ( 33 "make_queue.add_printer3dcourse", 34 ) 35 36 def get_context_data(self, **kwargs): 37 context_data = super().get_context_data(**kwargs) 38 if self.is_next: 39 context_data["is_next"] = True 40 return context_data 41 42 def get_success_url(self): 43 return reverse("create_course_registration_success") 44 45 46 class EditRegistrationView(PermissionRequiredMixin, UpdateView): 47 model = Printer3DCourse 48 form_class = Printer3DCourseForm 49 template_name = "make_queue/course/registration_edit.html" 50 permission_required = ( 51 "make_queue.change_printer3dcourse", 52 ) 53 54 def get_success_url(self): 55 return reverse("course_panel") 56 57 58 class DeleteRegistrationView(PermissionRequiredMixin, DeleteView): 59 model = Printer3DCourse 60 permission_required = ( 61 "make_queue.delete_printer3d_course", 62 ) 63 64 def get_success_url(self): 65 return reverse("course_panel") 66 67 68 class BulkStatusUpdate(View): 69 """ 70 Provides a method for bulk updating the status of course registrations 71 """ 72 73 def post(self, request): 74 status = request.POST.get("status") 75 registrations = list(map(int, request.POST.getlist("users"))) 76 Printer3DCourse.objects.filter(pk__in=registrations).update(status=status) 77 78 return redirect("course_panel") 79 80 81 class CourseXLSXView(View): 82 83 def post(self, request): 84 search_string = request.POST.get("search_text") 85 status_filter = request.POST.get("status_filter") 86 87 course_registrations = Printer3DCourse.objects.filter( 88 Q(username__icontains=search_string) | Q(name__icontains=search_string), status__icontains=status_filter) 89 90 # Use an in-memory output file, to avoid having to clean up the disk 91 output_file = io.BytesIO() 92 93 workbook = xlsxwriter.Workbook(output_file, {"in_memory": True}) 94 worksheet = workbook.add_worksheet("Kursdeltagere") 95 96 # Styles 97 format_header = workbook.add_format({ 98 "bold": True, 99 "font_size": 10, 100 "font_name": "Arial", 101 "font_color": "#000000", 102 "bg_color": "#f8c700", 103 "border": 1, 104 "border_color": "#000000", 105 }) 106 107 format_row = workbook.add_format({ 108 "font_size": 10, 109 "font_name": "Arial", 110 "font_color": "#000000", 111 "bg_color": "#fff2cc", 112 "border": 1, 113 "border_color": "#000000", 114 }) 115 116 # Set column width 117 worksheet.set_column("A:A", 40) 118 worksheet.set_column("B:B", 20) 119 worksheet.set_column("C:C", 15) 120 worksheet.set_column("D:D", 10) 121 122 # Header 123 worksheet.write(0, 0, "Navn", format_header) 124 worksheet.write(0, 1, "Brukernavn", format_header) 125 worksheet.write(0, 2, "Kortnummer", format_header) 126 worksheet.write(0, 3, "Dato", format_header) 127 128 for index, registration in enumerate(course_registrations): 129 worksheet.write(index + 1, 0, registration.name, format_row) 130 worksheet.write(index + 1, 1, registration.username, format_row) 131 worksheet.write(index + 1, 2, registration.card_number if registration.card_number is not None else "", 132 format_row) 133 worksheet.write(index + 1, 3, registration.date.strftime("%Y-%m-%d"), format_row) 134 135 workbook.close() 136 output_file.seek(0) 137 138 response = HttpResponse(output_file.read(), 139 content_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet") 140 141 response['Content-Disposition'] = 'attachment; filename="Kursdeltagere.xlsx"' 142 143 return response 144 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/make_queue/views/admin/course.py b/make_queue/views/admin/course.py --- a/make_queue/views/admin/course.py +++ b/make_queue/views/admin/course.py @@ -58,7 +58,7 @@ class DeleteRegistrationView(PermissionRequiredMixin, DeleteView): model = Printer3DCourse permission_required = ( - "make_queue.delete_printer3d_course", + "make_queue.delete_printer3dcourse", ) def get_success_url(self):
{"golden_diff": "diff --git a/make_queue/views/admin/course.py b/make_queue/views/admin/course.py\n--- a/make_queue/views/admin/course.py\n+++ b/make_queue/views/admin/course.py\n@@ -58,7 +58,7 @@\n class DeleteRegistrationView(PermissionRequiredMixin, DeleteView):\n model = Printer3DCourse\n permission_required = (\n- \"make_queue.delete_printer3d_course\",\n+ \"make_queue.delete_printer3dcourse\",\n )\n \n def get_success_url(self):\n", "issue": "Fix delete permissions for course registration\n\n", "before_files": [{"content": "import io\n\nimport xlsxwriter\nfrom django.contrib.auth.mixins import PermissionRequiredMixin\nfrom django.db.models import Q\nfrom django.http import HttpResponse\nfrom django.shortcuts import redirect\nfrom django.urls import reverse\nfrom django.views.generic import TemplateView, View, CreateView, UpdateView, DeleteView\n\nfrom make_queue.forms import Printer3DCourseForm\nfrom make_queue.models.course import Printer3DCourse\n\n\nclass CourseView(TemplateView):\n template_name = \"make_queue/course/course_panel.html\"\n\n def get_context_data(self, **kwargs):\n context_data = super().get_context_data(**kwargs)\n context_data.update({\n \"registrations\": Printer3DCourse.objects.order_by(\"name\"),\n \"possible_statuses\": Printer3DCourse.STATUS_CHOICES,\n })\n return context_data\n\n\nclass CreateRegistrationView(PermissionRequiredMixin, CreateView):\n is_next = False\n model = Printer3DCourse\n form_class = Printer3DCourseForm\n template_name = \"make_queue/course/registration_create.html\"\n permission_required = (\n \"make_queue.add_printer3dcourse\",\n )\n\n def get_context_data(self, **kwargs):\n context_data = super().get_context_data(**kwargs)\n if self.is_next:\n context_data[\"is_next\"] = True\n return context_data\n\n def get_success_url(self):\n return reverse(\"create_course_registration_success\")\n\n\nclass EditRegistrationView(PermissionRequiredMixin, UpdateView):\n model = Printer3DCourse\n form_class = Printer3DCourseForm\n template_name = \"make_queue/course/registration_edit.html\"\n permission_required = (\n \"make_queue.change_printer3dcourse\",\n )\n\n def get_success_url(self):\n return reverse(\"course_panel\")\n\n\nclass DeleteRegistrationView(PermissionRequiredMixin, DeleteView):\n model = Printer3DCourse\n permission_required = (\n \"make_queue.delete_printer3d_course\",\n )\n\n def get_success_url(self):\n return reverse(\"course_panel\")\n\n\nclass BulkStatusUpdate(View):\n \"\"\"\n Provides a method for bulk updating the status of course registrations\n \"\"\"\n\n def post(self, request):\n status = request.POST.get(\"status\")\n registrations = list(map(int, request.POST.getlist(\"users\")))\n Printer3DCourse.objects.filter(pk__in=registrations).update(status=status)\n\n return redirect(\"course_panel\")\n\n\nclass CourseXLSXView(View):\n\n def post(self, request):\n search_string = request.POST.get(\"search_text\")\n status_filter = request.POST.get(\"status_filter\")\n\n course_registrations = Printer3DCourse.objects.filter(\n Q(username__icontains=search_string) | Q(name__icontains=search_string), status__icontains=status_filter)\n\n # Use an in-memory output file, to avoid having to clean up the disk\n output_file = io.BytesIO()\n\n workbook = xlsxwriter.Workbook(output_file, {\"in_memory\": True})\n worksheet = workbook.add_worksheet(\"Kursdeltagere\")\n\n # Styles\n format_header = workbook.add_format({\n \"bold\": True,\n \"font_size\": 10,\n \"font_name\": \"Arial\",\n \"font_color\": \"#000000\",\n \"bg_color\": \"#f8c700\",\n \"border\": 1,\n \"border_color\": \"#000000\",\n })\n\n format_row = workbook.add_format({\n \"font_size\": 10,\n \"font_name\": \"Arial\",\n \"font_color\": \"#000000\",\n \"bg_color\": \"#fff2cc\",\n \"border\": 1,\n \"border_color\": \"#000000\",\n })\n\n # Set column width\n worksheet.set_column(\"A:A\", 40)\n worksheet.set_column(\"B:B\", 20)\n worksheet.set_column(\"C:C\", 15)\n worksheet.set_column(\"D:D\", 10)\n\n # Header\n worksheet.write(0, 0, \"Navn\", format_header)\n worksheet.write(0, 1, \"Brukernavn\", format_header)\n worksheet.write(0, 2, \"Kortnummer\", format_header)\n worksheet.write(0, 3, \"Dato\", format_header)\n\n for index, registration in enumerate(course_registrations):\n worksheet.write(index + 1, 0, registration.name, format_row)\n worksheet.write(index + 1, 1, registration.username, format_row)\n worksheet.write(index + 1, 2, registration.card_number if registration.card_number is not None else \"\",\n format_row)\n worksheet.write(index + 1, 3, registration.date.strftime(\"%Y-%m-%d\"), format_row)\n\n workbook.close()\n output_file.seek(0)\n\n response = HttpResponse(output_file.read(),\n content_type=\"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\")\n\n response['Content-Disposition'] = 'attachment; filename=\"Kursdeltagere.xlsx\"'\n\n return response\n", "path": "make_queue/views/admin/course.py"}], "after_files": [{"content": "import io\n\nimport xlsxwriter\nfrom django.contrib.auth.mixins import PermissionRequiredMixin\nfrom django.db.models import Q\nfrom django.http import HttpResponse\nfrom django.shortcuts import redirect\nfrom django.urls import reverse\nfrom django.views.generic import TemplateView, View, CreateView, UpdateView, DeleteView\n\nfrom make_queue.forms import Printer3DCourseForm\nfrom make_queue.models.course import Printer3DCourse\n\n\nclass CourseView(TemplateView):\n template_name = \"make_queue/course/course_panel.html\"\n\n def get_context_data(self, **kwargs):\n context_data = super().get_context_data(**kwargs)\n context_data.update({\n \"registrations\": Printer3DCourse.objects.order_by(\"name\"),\n \"possible_statuses\": Printer3DCourse.STATUS_CHOICES,\n })\n return context_data\n\n\nclass CreateRegistrationView(PermissionRequiredMixin, CreateView):\n is_next = False\n model = Printer3DCourse\n form_class = Printer3DCourseForm\n template_name = \"make_queue/course/registration_create.html\"\n permission_required = (\n \"make_queue.add_printer3dcourse\",\n )\n\n def get_context_data(self, **kwargs):\n context_data = super().get_context_data(**kwargs)\n if self.is_next:\n context_data[\"is_next\"] = True\n return context_data\n\n def get_success_url(self):\n return reverse(\"create_course_registration_success\")\n\n\nclass EditRegistrationView(PermissionRequiredMixin, UpdateView):\n model = Printer3DCourse\n form_class = Printer3DCourseForm\n template_name = \"make_queue/course/registration_edit.html\"\n permission_required = (\n \"make_queue.change_printer3dcourse\",\n )\n\n def get_success_url(self):\n return reverse(\"course_panel\")\n\n\nclass DeleteRegistrationView(PermissionRequiredMixin, DeleteView):\n model = Printer3DCourse\n permission_required = (\n \"make_queue.delete_printer3dcourse\",\n )\n\n def get_success_url(self):\n return reverse(\"course_panel\")\n\n\nclass BulkStatusUpdate(View):\n \"\"\"\n Provides a method for bulk updating the status of course registrations\n \"\"\"\n\n def post(self, request):\n status = request.POST.get(\"status\")\n registrations = list(map(int, request.POST.getlist(\"users\")))\n Printer3DCourse.objects.filter(pk__in=registrations).update(status=status)\n\n return redirect(\"course_panel\")\n\n\nclass CourseXLSXView(View):\n\n def post(self, request):\n search_string = request.POST.get(\"search_text\")\n status_filter = request.POST.get(\"status_filter\")\n\n course_registrations = Printer3DCourse.objects.filter(\n Q(username__icontains=search_string) | Q(name__icontains=search_string), status__icontains=status_filter)\n\n # Use an in-memory output file, to avoid having to clean up the disk\n output_file = io.BytesIO()\n\n workbook = xlsxwriter.Workbook(output_file, {\"in_memory\": True})\n worksheet = workbook.add_worksheet(\"Kursdeltagere\")\n\n # Styles\n format_header = workbook.add_format({\n \"bold\": True,\n \"font_size\": 10,\n \"font_name\": \"Arial\",\n \"font_color\": \"#000000\",\n \"bg_color\": \"#f8c700\",\n \"border\": 1,\n \"border_color\": \"#000000\",\n })\n\n format_row = workbook.add_format({\n \"font_size\": 10,\n \"font_name\": \"Arial\",\n \"font_color\": \"#000000\",\n \"bg_color\": \"#fff2cc\",\n \"border\": 1,\n \"border_color\": \"#000000\",\n })\n\n # Set column width\n worksheet.set_column(\"A:A\", 40)\n worksheet.set_column(\"B:B\", 20)\n worksheet.set_column(\"C:C\", 15)\n worksheet.set_column(\"D:D\", 10)\n\n # Header\n worksheet.write(0, 0, \"Navn\", format_header)\n worksheet.write(0, 1, \"Brukernavn\", format_header)\n worksheet.write(0, 2, \"Kortnummer\", format_header)\n worksheet.write(0, 3, \"Dato\", format_header)\n\n for index, registration in enumerate(course_registrations):\n worksheet.write(index + 1, 0, registration.name, format_row)\n worksheet.write(index + 1, 1, registration.username, format_row)\n worksheet.write(index + 1, 2, registration.card_number if registration.card_number is not None else \"\",\n format_row)\n worksheet.write(index + 1, 3, registration.date.strftime(\"%Y-%m-%d\"), format_row)\n\n workbook.close()\n output_file.seek(0)\n\n response = HttpResponse(output_file.read(),\n content_type=\"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\")\n\n response['Content-Disposition'] = 'attachment; filename=\"Kursdeltagere.xlsx\"'\n\n return response\n", "path": "make_queue/views/admin/course.py"}]}
1,680
108
gh_patches_debug_10211
rasdani/github-patches
git_diff
google__clusterfuzz-189
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- linting in CI works differently than locally I'm pretty sure it is pylint 1.9.4. See https://github.com/google/clusterfuzz/pull/185 for a discrepancy I noticed between running pylint locally and running it in CI. When I upgraded my local copy of pylint to 1.9.4 I was able to discover the issue showing up in CI. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `src/local/butler/lint.py` Content: ``` 1 # Copyright 2019 Google LLC 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 """Lint changed code in current branch.""" 15 16 import os 17 import sys 18 19 from local.butler import common 20 21 22 def execute(_): 23 """Lint changed code.""" 24 if "GOOGLE_CLOUDBUILD" in os.environ: 25 # Explicitly compare against master if we're running on the CI 26 _, output = common.execute('git diff --name-only master FETCH_HEAD') 27 else: 28 _, output = common.execute('git diff --name-only FETCH_HEAD') 29 30 py_changed_file_paths = [ 31 f for f in output.splitlines() if f.endswith('.py') and 32 # Exclude auto-generated files. 33 not f.endswith('_pb2.py') and not f.endswith('_pb2_grpc.py') 34 ] 35 go_changed_file_paths = [f for f in output.splitlines() if f.endswith('.go')] 36 37 for file_path in py_changed_file_paths: 38 if os.path.exists(file_path): 39 common.execute('pylint ' + file_path) 40 common.execute('yapf -d ' + file_path) 41 42 golint_path = os.path.join('local', 'bin', 'golint') 43 for file_path in go_changed_file_paths: 44 if os.path.exists(file_path): 45 common.execute(golint_path + ' ' + file_path) 46 47 _, output = common.execute('gofmt -d ' + file_path) 48 if output.strip(): 49 sys.exit(1) 50 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/src/local/butler/lint.py b/src/local/butler/lint.py --- a/src/local/butler/lint.py +++ b/src/local/butler/lint.py @@ -16,11 +16,15 @@ import os import sys +from local.butler import appengine from local.butler import common def execute(_): """Lint changed code.""" + pythonpath = os.getenv('PYTHONPATH', '') + os.environ['PYTHONPATH'] = appengine.find_sdk_path() + ':' + pythonpath + if "GOOGLE_CLOUDBUILD" in os.environ: # Explicitly compare against master if we're running on the CI _, output = common.execute('git diff --name-only master FETCH_HEAD')
{"golden_diff": "diff --git a/src/local/butler/lint.py b/src/local/butler/lint.py\n--- a/src/local/butler/lint.py\n+++ b/src/local/butler/lint.py\n@@ -16,11 +16,15 @@\n import os\n import sys\n \n+from local.butler import appengine\n from local.butler import common\n \n \n def execute(_):\n \"\"\"Lint changed code.\"\"\"\n+ pythonpath = os.getenv('PYTHONPATH', '')\n+ os.environ['PYTHONPATH'] = appengine.find_sdk_path() + ':' + pythonpath\n+\n if \"GOOGLE_CLOUDBUILD\" in os.environ:\n # Explicitly compare against master if we're running on the CI\n _, output = common.execute('git diff --name-only master FETCH_HEAD')\n", "issue": "linting in CI works differently than locally\nI'm pretty sure it is pylint 1.9.4.\r\nSee https://github.com/google/clusterfuzz/pull/185 for a discrepancy I noticed between running pylint locally and running it in CI.\r\n\r\nWhen I upgraded my local copy of pylint to 1.9.4 I was able to discover the issue showing up in CI.\n", "before_files": [{"content": "# Copyright 2019 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Lint changed code in current branch.\"\"\"\n\nimport os\nimport sys\n\nfrom local.butler import common\n\n\ndef execute(_):\n \"\"\"Lint changed code.\"\"\"\n if \"GOOGLE_CLOUDBUILD\" in os.environ:\n # Explicitly compare against master if we're running on the CI\n _, output = common.execute('git diff --name-only master FETCH_HEAD')\n else:\n _, output = common.execute('git diff --name-only FETCH_HEAD')\n\n py_changed_file_paths = [\n f for f in output.splitlines() if f.endswith('.py') and\n # Exclude auto-generated files.\n not f.endswith('_pb2.py') and not f.endswith('_pb2_grpc.py')\n ]\n go_changed_file_paths = [f for f in output.splitlines() if f.endswith('.go')]\n\n for file_path in py_changed_file_paths:\n if os.path.exists(file_path):\n common.execute('pylint ' + file_path)\n common.execute('yapf -d ' + file_path)\n\n golint_path = os.path.join('local', 'bin', 'golint')\n for file_path in go_changed_file_paths:\n if os.path.exists(file_path):\n common.execute(golint_path + ' ' + file_path)\n\n _, output = common.execute('gofmt -d ' + file_path)\n if output.strip():\n sys.exit(1)\n", "path": "src/local/butler/lint.py"}], "after_files": [{"content": "# Copyright 2019 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Lint changed code in current branch.\"\"\"\n\nimport os\nimport sys\n\nfrom local.butler import appengine\nfrom local.butler import common\n\n\ndef execute(_):\n \"\"\"Lint changed code.\"\"\"\n pythonpath = os.getenv('PYTHONPATH', '')\n os.environ['PYTHONPATH'] = appengine.find_sdk_path() + ':' + pythonpath\n\n if \"GOOGLE_CLOUDBUILD\" in os.environ:\n # Explicitly compare against master if we're running on the CI\n _, output = common.execute('git diff --name-only master FETCH_HEAD')\n else:\n _, output = common.execute('git diff --name-only FETCH_HEAD')\n\n py_changed_file_paths = [\n f for f in output.splitlines() if f.endswith('.py') and\n # Exclude auto-generated files.\n not f.endswith('_pb2.py') and not f.endswith('_pb2_grpc.py')\n ]\n go_changed_file_paths = [f for f in output.splitlines() if f.endswith('.go')]\n\n for file_path in py_changed_file_paths:\n if os.path.exists(file_path):\n common.execute('pylint ' + file_path)\n common.execute('yapf -d ' + file_path)\n\n golint_path = os.path.join('local', 'bin', 'golint')\n for file_path in go_changed_file_paths:\n if os.path.exists(file_path):\n common.execute(golint_path + ' ' + file_path)\n\n _, output = common.execute('gofmt -d ' + file_path)\n if output.strip():\n sys.exit(1)\n", "path": "src/local/butler/lint.py"}]}
863
171
gh_patches_debug_281
rasdani/github-patches
git_diff
vega__altair-3387
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- minimum pyarrow version enforced even if pandas is installed The error we are facing in an environment says: ```python RuntimeError: The pyarrow package must be version 11.0.0 or greater. Found version 6.0.1 ``` And is caused by these lines: https://github.com/altair-viz/altair/blob/main/altair/utils/core.py#L591-L592 ```python # if data is specified and type is not, infer type from data if "type" not in attrs: if pyarrow_available() and data is not None and isinstance(data, DataFrameLike): ... elif isinstance(data, pd.DataFrame): # Fallback if pyarrow is not installed or if pandas is older than 1.5 ``` In that particular environment pandas is installed by default and we are not able to upgrade pyarrow. Now the altair specifications errors as the code never tries the pandas approach as it has found a pyarrow version that is too old. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `altair/utils/_importers.py` Content: ``` 1 from types import ModuleType 2 from packaging.version import Version 3 from importlib.metadata import version as importlib_version 4 5 6 def import_vegafusion() -> ModuleType: 7 min_version = "1.5.0" 8 try: 9 version = importlib_version("vegafusion") 10 embed_version = importlib_version("vegafusion-python-embed") 11 if version != embed_version or Version(version) < Version(min_version): 12 raise RuntimeError( 13 "The versions of the vegafusion and vegafusion-python-embed packages must match\n" 14 f"and must be version {min_version} or greater.\n" 15 f"Found:\n" 16 f" - vegafusion=={version}\n" 17 f" - vegafusion-python-embed=={embed_version}\n" 18 ) 19 import vegafusion as vf # type: ignore 20 21 return vf 22 except ImportError as err: 23 raise ImportError( 24 'The "vegafusion" data transformer and chart.transformed_data feature requires\n' 25 f"version {min_version} or greater of the 'vegafusion-python-embed' and 'vegafusion' packages.\n" 26 "These can be installed with pip using:\n" 27 f' pip install "vegafusion[embed]>={min_version}"\n' 28 "Or with conda using:\n" 29 f' conda install -c conda-forge "vegafusion-python-embed>={min_version}" ' 30 f'"vegafusion>={min_version}"\n\n' 31 f"ImportError: {err.args[0]}" 32 ) from err 33 34 35 def import_vl_convert() -> ModuleType: 36 min_version = "1.3.0" 37 try: 38 version = importlib_version("vl-convert-python") 39 if Version(version) < Version(min_version): 40 raise RuntimeError( 41 f"The vl-convert-python package must be version {min_version} or greater. " 42 f"Found version {version}" 43 ) 44 import vl_convert as vlc 45 46 return vlc 47 except ImportError as err: 48 raise ImportError( 49 f"The vl-convert Vega-Lite compiler and file export feature requires\n" 50 f"version {min_version} or greater of the 'vl-convert-python' package. \n" 51 f"This can be installed with pip using:\n" 52 f' pip install "vl-convert-python>={min_version}"\n' 53 "or conda:\n" 54 f' conda install -c conda-forge "vl-convert-python>={min_version}"\n\n' 55 f"ImportError: {err.args[0]}" 56 ) from err 57 58 59 def vl_version_for_vl_convert() -> str: 60 from ..vegalite import SCHEMA_VERSION 61 62 # Compute VlConvert's vl_version string (of the form 'v5_2') 63 # from SCHEMA_VERSION (of the form 'v5.2.0') 64 return "_".join(SCHEMA_VERSION.split(".")[:2]) 65 66 67 def import_pyarrow_interchange() -> ModuleType: 68 min_version = "11.0.0" 69 try: 70 version = importlib_version("pyarrow") 71 72 if Version(version) < Version(min_version): 73 raise RuntimeError( 74 f"The pyarrow package must be version {min_version} or greater. " 75 f"Found version {version}" 76 ) 77 import pyarrow.interchange as pi 78 79 return pi 80 except ImportError as err: 81 raise ImportError( 82 f"Usage of the DataFrame Interchange Protocol requires\n" 83 f"version {min_version} or greater of the pyarrow package. \n" 84 f"This can be installed with pip using:\n" 85 f' pip install "pyarrow>={min_version}"\n' 86 "or conda:\n" 87 f' conda install -c conda-forge "pyarrow>={min_version}"\n\n' 88 f"ImportError: {err.args[0]}" 89 ) from err 90 91 92 def pyarrow_available() -> bool: 93 try: 94 import_pyarrow_interchange() 95 return True 96 except ImportError: 97 return False 98 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/altair/utils/_importers.py b/altair/utils/_importers.py --- a/altair/utils/_importers.py +++ b/altair/utils/_importers.py @@ -93,5 +93,5 @@ try: import_pyarrow_interchange() return True - except ImportError: + except (ImportError, RuntimeError): return False
{"golden_diff": "diff --git a/altair/utils/_importers.py b/altair/utils/_importers.py\n--- a/altair/utils/_importers.py\n+++ b/altair/utils/_importers.py\n@@ -93,5 +93,5 @@\n try:\n import_pyarrow_interchange()\n return True\n- except ImportError:\n+ except (ImportError, RuntimeError):\n return False\n", "issue": "minimum pyarrow version enforced even if pandas is installed\nThe error we are facing in an environment says:\r\n```python\r\nRuntimeError: The pyarrow package must be version 11.0.0 or greater. Found version 6.0.1\r\n```\r\n\r\nAnd is caused by these lines:\r\nhttps://github.com/altair-viz/altair/blob/main/altair/utils/core.py#L591-L592\r\n\r\n```python\r\n # if data is specified and type is not, infer type from data\r\n if \"type\" not in attrs:\r\n if pyarrow_available() and data is not None and isinstance(data, DataFrameLike):\r\n ...\r\n\r\n elif isinstance(data, pd.DataFrame):\r\n # Fallback if pyarrow is not installed or if pandas is older than 1.5\r\n```\r\nIn that particular environment pandas is installed by default and we are not able to upgrade pyarrow. \r\n\r\nNow the altair specifications errors as the code never tries the pandas approach as it has found a pyarrow version that is too old. \r\n\n", "before_files": [{"content": "from types import ModuleType\nfrom packaging.version import Version\nfrom importlib.metadata import version as importlib_version\n\n\ndef import_vegafusion() -> ModuleType:\n min_version = \"1.5.0\"\n try:\n version = importlib_version(\"vegafusion\")\n embed_version = importlib_version(\"vegafusion-python-embed\")\n if version != embed_version or Version(version) < Version(min_version):\n raise RuntimeError(\n \"The versions of the vegafusion and vegafusion-python-embed packages must match\\n\"\n f\"and must be version {min_version} or greater.\\n\"\n f\"Found:\\n\"\n f\" - vegafusion=={version}\\n\"\n f\" - vegafusion-python-embed=={embed_version}\\n\"\n )\n import vegafusion as vf # type: ignore\n\n return vf\n except ImportError as err:\n raise ImportError(\n 'The \"vegafusion\" data transformer and chart.transformed_data feature requires\\n'\n f\"version {min_version} or greater of the 'vegafusion-python-embed' and 'vegafusion' packages.\\n\"\n \"These can be installed with pip using:\\n\"\n f' pip install \"vegafusion[embed]>={min_version}\"\\n'\n \"Or with conda using:\\n\"\n f' conda install -c conda-forge \"vegafusion-python-embed>={min_version}\" '\n f'\"vegafusion>={min_version}\"\\n\\n'\n f\"ImportError: {err.args[0]}\"\n ) from err\n\n\ndef import_vl_convert() -> ModuleType:\n min_version = \"1.3.0\"\n try:\n version = importlib_version(\"vl-convert-python\")\n if Version(version) < Version(min_version):\n raise RuntimeError(\n f\"The vl-convert-python package must be version {min_version} or greater. \"\n f\"Found version {version}\"\n )\n import vl_convert as vlc\n\n return vlc\n except ImportError as err:\n raise ImportError(\n f\"The vl-convert Vega-Lite compiler and file export feature requires\\n\"\n f\"version {min_version} or greater of the 'vl-convert-python' package. \\n\"\n f\"This can be installed with pip using:\\n\"\n f' pip install \"vl-convert-python>={min_version}\"\\n'\n \"or conda:\\n\"\n f' conda install -c conda-forge \"vl-convert-python>={min_version}\"\\n\\n'\n f\"ImportError: {err.args[0]}\"\n ) from err\n\n\ndef vl_version_for_vl_convert() -> str:\n from ..vegalite import SCHEMA_VERSION\n\n # Compute VlConvert's vl_version string (of the form 'v5_2')\n # from SCHEMA_VERSION (of the form 'v5.2.0')\n return \"_\".join(SCHEMA_VERSION.split(\".\")[:2])\n\n\ndef import_pyarrow_interchange() -> ModuleType:\n min_version = \"11.0.0\"\n try:\n version = importlib_version(\"pyarrow\")\n\n if Version(version) < Version(min_version):\n raise RuntimeError(\n f\"The pyarrow package must be version {min_version} or greater. \"\n f\"Found version {version}\"\n )\n import pyarrow.interchange as pi\n\n return pi\n except ImportError as err:\n raise ImportError(\n f\"Usage of the DataFrame Interchange Protocol requires\\n\"\n f\"version {min_version} or greater of the pyarrow package. \\n\"\n f\"This can be installed with pip using:\\n\"\n f' pip install \"pyarrow>={min_version}\"\\n'\n \"or conda:\\n\"\n f' conda install -c conda-forge \"pyarrow>={min_version}\"\\n\\n'\n f\"ImportError: {err.args[0]}\"\n ) from err\n\n\ndef pyarrow_available() -> bool:\n try:\n import_pyarrow_interchange()\n return True\n except ImportError:\n return False\n", "path": "altair/utils/_importers.py"}], "after_files": [{"content": "from types import ModuleType\nfrom packaging.version import Version\nfrom importlib.metadata import version as importlib_version\n\n\ndef import_vegafusion() -> ModuleType:\n min_version = \"1.5.0\"\n try:\n version = importlib_version(\"vegafusion\")\n embed_version = importlib_version(\"vegafusion-python-embed\")\n if version != embed_version or Version(version) < Version(min_version):\n raise RuntimeError(\n \"The versions of the vegafusion and vegafusion-python-embed packages must match\\n\"\n f\"and must be version {min_version} or greater.\\n\"\n f\"Found:\\n\"\n f\" - vegafusion=={version}\\n\"\n f\" - vegafusion-python-embed=={embed_version}\\n\"\n )\n import vegafusion as vf # type: ignore\n\n return vf\n except ImportError as err:\n raise ImportError(\n 'The \"vegafusion\" data transformer and chart.transformed_data feature requires\\n'\n f\"version {min_version} or greater of the 'vegafusion-python-embed' and 'vegafusion' packages.\\n\"\n \"These can be installed with pip using:\\n\"\n f' pip install \"vegafusion[embed]>={min_version}\"\\n'\n \"Or with conda using:\\n\"\n f' conda install -c conda-forge \"vegafusion-python-embed>={min_version}\" '\n f'\"vegafusion>={min_version}\"\\n\\n'\n f\"ImportError: {err.args[0]}\"\n ) from err\n\n\ndef import_vl_convert() -> ModuleType:\n min_version = \"1.3.0\"\n try:\n version = importlib_version(\"vl-convert-python\")\n if Version(version) < Version(min_version):\n raise RuntimeError(\n f\"The vl-convert-python package must be version {min_version} or greater. \"\n f\"Found version {version}\"\n )\n import vl_convert as vlc\n\n return vlc\n except ImportError as err:\n raise ImportError(\n f\"The vl-convert Vega-Lite compiler and file export feature requires\\n\"\n f\"version {min_version} or greater of the 'vl-convert-python' package. \\n\"\n f\"This can be installed with pip using:\\n\"\n f' pip install \"vl-convert-python>={min_version}\"\\n'\n \"or conda:\\n\"\n f' conda install -c conda-forge \"vl-convert-python>={min_version}\"\\n\\n'\n f\"ImportError: {err.args[0]}\"\n ) from err\n\n\ndef vl_version_for_vl_convert() -> str:\n from ..vegalite import SCHEMA_VERSION\n\n # Compute VlConvert's vl_version string (of the form 'v5_2')\n # from SCHEMA_VERSION (of the form 'v5.2.0')\n return \"_\".join(SCHEMA_VERSION.split(\".\")[:2])\n\n\ndef import_pyarrow_interchange() -> ModuleType:\n min_version = \"11.0.0\"\n try:\n version = importlib_version(\"pyarrow\")\n\n if Version(version) < Version(min_version):\n raise RuntimeError(\n f\"The pyarrow package must be version {min_version} or greater. \"\n f\"Found version {version}\"\n )\n import pyarrow.interchange as pi\n\n return pi\n except ImportError as err:\n raise ImportError(\n f\"Usage of the DataFrame Interchange Protocol requires\\n\"\n f\"version {min_version} or greater of the pyarrow package. \\n\"\n f\"This can be installed with pip using:\\n\"\n f' pip install \"pyarrow>={min_version}\"\\n'\n \"or conda:\\n\"\n f' conda install -c conda-forge \"pyarrow>={min_version}\"\\n\\n'\n f\"ImportError: {err.args[0]}\"\n ) from err\n\n\ndef pyarrow_available() -> bool:\n try:\n import_pyarrow_interchange()\n return True\n except (ImportError, RuntimeError):\n return False\n", "path": "altair/utils/_importers.py"}]}
1,577
88
gh_patches_debug_2526
rasdani/github-patches
git_diff
scikit-hep__pyhf-1049
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Fix Fixture use in pytest # Description In pytest `v4.0.0` the [direct call of a fixture results in an error](https://travis-ci.org/diana-hep/pyhf/jobs/455364238#L661-L669). ``` ==================================== ERRORS ==================================== __________________ ERROR collecting tests/test_validation.py ___________________ tests/test_validation.py:13: in <module> def spec_1bin_shapesys(source=source_1bin_example1()): E _pytest.warning_types.RemovedInPytest4Warning: Fixture "source_1bin_example1" called directly. Fixtures are not meant to be called directly, are created automatically when test functions request them as parameters. See https://docs.pytest.org/en/latest/fixture.html for more information. __________________ ERROR collecting tests/test_validation.py ___________________ tests/test_validation.py:13: in <module> def spec_1bin_shapesys(source=source_1bin_example1()): E _pytest.warning_types.RemovedInPytest4Warning: Fixture "source_1bin_example1" called directly. Fixtures are not meant to be called directly, are created automatically when test functions request them as parameters. See https://docs.pytest.org/en/latest/fixture.html for more information. ``` This requires changing the way that pytest is used a bit. This was noticed in preparation of PR #369 # Checklist - [x] Run `git fetch` to get the most up to date version of `master` - [x] Searched through existing Issues to confirm this is not a duplicate issue - [x] Filled out the Description, Expected Behavior, Actual Behavior, and Steps to Reproduce sections above or have edited/removed them in a way that fully describes the issue --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `setup.py` Content: ``` 1 from setuptools import setup 2 3 extras_require = { 4 'shellcomplete': ['click_completion'], 5 'tensorflow': [ 6 'tensorflow~=2.2.0', # TensorFlow minor releases are as volatile as major 7 'tensorflow-probability~=0.10.0', 8 ], 9 'torch': ['torch~=1.2'], 10 'jax': ['jax~=0.1,>0.1.51', 'jaxlib~=0.1,>0.1.33'], 11 'xmlio': ['uproot~=3.6'], # Future proof against uproot4 API changes 12 'minuit': ['iminuit~=1.4,>=1.4.3'], # Use "name" keyword in MINUIT optimizer 13 } 14 extras_require['backends'] = sorted( 15 set( 16 extras_require['tensorflow'] 17 + extras_require['torch'] 18 + extras_require['jax'] 19 + extras_require['minuit'] 20 ) 21 ) 22 extras_require['contrib'] = sorted(set(['matplotlib'])) 23 extras_require['lint'] = sorted(set(['pyflakes', 'black'])) 24 25 extras_require['test'] = sorted( 26 set( 27 extras_require['backends'] 28 + extras_require['xmlio'] 29 + extras_require['contrib'] 30 + extras_require['shellcomplete'] 31 + [ 32 'pytest~=3.5', 33 'pytest-cov>=2.5.1', 34 'pytest-mock', 35 'pytest-benchmark[histogram]', 36 'pytest-console-scripts', 37 'pytest-mpl', 38 'pydocstyle', 39 'coverage>=4.0', # coveralls 40 'papermill~=2.0', 41 'nteract-scrapbook~=0.2', 42 'jupyter', 43 'uproot~=3.3', 44 'graphviz', 45 'jsonpatch', 46 ] 47 ) 48 ) 49 extras_require['docs'] = sorted( 50 set( 51 [ 52 'sphinx>=3.1.2', 53 'sphinxcontrib-bibtex', 54 'sphinx-click', 55 'sphinx_rtd_theme', 56 'nbsphinx', 57 'ipywidgets', 58 'sphinx-issues', 59 'sphinx-copybutton>0.2.9', 60 ] 61 ) 62 ) 63 extras_require['develop'] = sorted( 64 set( 65 extras_require['docs'] 66 + extras_require['lint'] 67 + extras_require['test'] 68 + ['nbdime', 'bumpversion', 'ipython', 'pre-commit', 'check-manifest', 'twine'] 69 ) 70 ) 71 extras_require['complete'] = sorted(set(sum(extras_require.values(), []))) 72 73 74 setup( 75 extras_require=extras_require, 76 use_scm_version=lambda: {'local_scheme': lambda version: ''}, 77 ) 78 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -29,7 +29,7 @@ + extras_require['contrib'] + extras_require['shellcomplete'] + [ - 'pytest~=3.5', + 'pytest~=6.0', 'pytest-cov>=2.5.1', 'pytest-mock', 'pytest-benchmark[histogram]',
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -29,7 +29,7 @@\n + extras_require['contrib']\n + extras_require['shellcomplete']\n + [\n- 'pytest~=3.5',\n+ 'pytest~=6.0',\n 'pytest-cov>=2.5.1',\n 'pytest-mock',\n 'pytest-benchmark[histogram]',\n", "issue": "Fix Fixture use in pytest\n# Description\r\n\r\nIn pytest `v4.0.0` the [direct call of a fixture results in an error](https://travis-ci.org/diana-hep/pyhf/jobs/455364238#L661-L669). \r\n\r\n```\r\n==================================== ERRORS ====================================\r\n__________________ ERROR collecting tests/test_validation.py ___________________\r\ntests/test_validation.py:13: in <module>\r\n def spec_1bin_shapesys(source=source_1bin_example1()):\r\nE _pytest.warning_types.RemovedInPytest4Warning: Fixture \"source_1bin_example1\" called directly. Fixtures are not meant to be called directly, are created automatically when test functions request them as parameters. See https://docs.pytest.org/en/latest/fixture.html for more information.\r\n__________________ ERROR collecting tests/test_validation.py ___________________\r\ntests/test_validation.py:13: in <module>\r\n def spec_1bin_shapesys(source=source_1bin_example1()):\r\nE _pytest.warning_types.RemovedInPytest4Warning: Fixture \"source_1bin_example1\" called directly. Fixtures are not meant to be called directly, are created automatically when test functions request them as parameters. See https://docs.pytest.org/en/latest/fixture.html for more information.\r\n```\r\n\r\nThis requires changing the way that pytest is used a bit.\r\n\r\nThis was noticed in preparation of PR #369 \r\n\r\n# Checklist\r\n\r\n- [x] Run `git fetch` to get the most up to date version of `master`\r\n- [x] Searched through existing Issues to confirm this is not a duplicate issue\r\n- [x] Filled out the Description, Expected Behavior, Actual Behavior, and Steps to Reproduce sections above or have edited/removed them in a way that fully describes the issue\r\n\n", "before_files": [{"content": "from setuptools import setup\n\nextras_require = {\n 'shellcomplete': ['click_completion'],\n 'tensorflow': [\n 'tensorflow~=2.2.0', # TensorFlow minor releases are as volatile as major\n 'tensorflow-probability~=0.10.0',\n ],\n 'torch': ['torch~=1.2'],\n 'jax': ['jax~=0.1,>0.1.51', 'jaxlib~=0.1,>0.1.33'],\n 'xmlio': ['uproot~=3.6'], # Future proof against uproot4 API changes\n 'minuit': ['iminuit~=1.4,>=1.4.3'], # Use \"name\" keyword in MINUIT optimizer\n}\nextras_require['backends'] = sorted(\n set(\n extras_require['tensorflow']\n + extras_require['torch']\n + extras_require['jax']\n + extras_require['minuit']\n )\n)\nextras_require['contrib'] = sorted(set(['matplotlib']))\nextras_require['lint'] = sorted(set(['pyflakes', 'black']))\n\nextras_require['test'] = sorted(\n set(\n extras_require['backends']\n + extras_require['xmlio']\n + extras_require['contrib']\n + extras_require['shellcomplete']\n + [\n 'pytest~=3.5',\n 'pytest-cov>=2.5.1',\n 'pytest-mock',\n 'pytest-benchmark[histogram]',\n 'pytest-console-scripts',\n 'pytest-mpl',\n 'pydocstyle',\n 'coverage>=4.0', # coveralls\n 'papermill~=2.0',\n 'nteract-scrapbook~=0.2',\n 'jupyter',\n 'uproot~=3.3',\n 'graphviz',\n 'jsonpatch',\n ]\n )\n)\nextras_require['docs'] = sorted(\n set(\n [\n 'sphinx>=3.1.2',\n 'sphinxcontrib-bibtex',\n 'sphinx-click',\n 'sphinx_rtd_theme',\n 'nbsphinx',\n 'ipywidgets',\n 'sphinx-issues',\n 'sphinx-copybutton>0.2.9',\n ]\n )\n)\nextras_require['develop'] = sorted(\n set(\n extras_require['docs']\n + extras_require['lint']\n + extras_require['test']\n + ['nbdime', 'bumpversion', 'ipython', 'pre-commit', 'check-manifest', 'twine']\n )\n)\nextras_require['complete'] = sorted(set(sum(extras_require.values(), [])))\n\n\nsetup(\n extras_require=extras_require,\n use_scm_version=lambda: {'local_scheme': lambda version: ''},\n)\n", "path": "setup.py"}], "after_files": [{"content": "from setuptools import setup\n\nextras_require = {\n 'shellcomplete': ['click_completion'],\n 'tensorflow': [\n 'tensorflow~=2.2.0', # TensorFlow minor releases are as volatile as major\n 'tensorflow-probability~=0.10.0',\n ],\n 'torch': ['torch~=1.2'],\n 'jax': ['jax~=0.1,>0.1.51', 'jaxlib~=0.1,>0.1.33'],\n 'xmlio': ['uproot~=3.6'], # Future proof against uproot4 API changes\n 'minuit': ['iminuit~=1.4,>=1.4.3'], # Use \"name\" keyword in MINUIT optimizer\n}\nextras_require['backends'] = sorted(\n set(\n extras_require['tensorflow']\n + extras_require['torch']\n + extras_require['jax']\n + extras_require['minuit']\n )\n)\nextras_require['contrib'] = sorted(set(['matplotlib']))\nextras_require['lint'] = sorted(set(['pyflakes', 'black']))\n\nextras_require['test'] = sorted(\n set(\n extras_require['backends']\n + extras_require['xmlio']\n + extras_require['contrib']\n + extras_require['shellcomplete']\n + [\n 'pytest~=6.0',\n 'pytest-cov>=2.5.1',\n 'pytest-mock',\n 'pytest-benchmark[histogram]',\n 'pytest-console-scripts',\n 'pytest-mpl',\n 'pydocstyle',\n 'coverage>=4.0', # coveralls\n 'papermill~=2.0',\n 'nteract-scrapbook~=0.2',\n 'jupyter',\n 'uproot~=3.3',\n 'graphviz',\n 'jsonpatch',\n ]\n )\n)\nextras_require['docs'] = sorted(\n set(\n [\n 'sphinx>=3.1.2',\n 'sphinxcontrib-bibtex',\n 'sphinx-click',\n 'sphinx_rtd_theme',\n 'nbsphinx',\n 'ipywidgets',\n 'sphinx-issues',\n 'sphinx-copybutton>0.2.9',\n ]\n )\n)\nextras_require['develop'] = sorted(\n set(\n extras_require['docs']\n + extras_require['lint']\n + extras_require['test']\n + ['nbdime', 'bumpversion', 'ipython', 'pre-commit', 'check-manifest', 'twine']\n )\n)\nextras_require['complete'] = sorted(set(sum(extras_require.values(), [])))\n\n\nsetup(\n extras_require=extras_require,\n use_scm_version=lambda: {'local_scheme': lambda version: ''},\n)\n", "path": "setup.py"}]}
1,378
95
gh_patches_debug_10876
rasdani/github-patches
git_diff
privacyidea__privacyidea-2280
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Support other keytypes with SSHKey Token We would like to import other public key types like ecdsa into the SSHKey token type. * [x] The UI and the /token/init API needs to allow this. * [x] We also need to check, if the keytype is saved and returned correctly by GET /authitem --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `privacyidea/lib/tokens/sshkeytoken.py` Content: ``` 1 # -*- coding: utf-8 -*- 2 # 3 # privacyIDEA 4 # Jul 18, 2014 Cornelius Kölbel 5 # License: AGPLv3 6 # contact: http://www.privacyidea.org 7 # 8 # This code is free software; you can redistribute it and/or 9 # modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE 10 # License as published by the Free Software Foundation; either 11 # version 3 of the License, or any later version. 12 # 13 # This code is distributed in the hope that it will be useful, 14 # but WITHOUT ANY WARRANTY; without even the implied warranty of 15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 16 # GNU AFFERO GENERAL PUBLIC LICENSE for more details. 17 # 18 # You should have received a copy of the GNU Affero General Public 19 # License along with this program. If not, see <http://www.gnu.org/licenses/>. 20 # 21 __doc__="""The SSHKeyTokenClass provides a TokenClass that stores the public 22 SSH key and can give the public SSH key via the getotp function. 23 This can be used to manage SSH keys and retrieve the public ssh key 24 to import it to authorized keys files. 25 26 The code is tested in tests/test_lib_tokens_ssh 27 """ 28 29 import logging 30 from privacyidea.lib import _ 31 from privacyidea.api.lib.utils import getParam 32 from privacyidea.lib.log import log_with 33 from privacyidea.lib.tokenclass import TokenClass 34 from privacyidea.lib.policy import SCOPE, ACTION, GROUP 35 36 log = logging.getLogger(__name__) 37 38 39 optional = True 40 required = False 41 42 43 ##TODO: We should save a fingerprint of the SSH Key in the encrypted OTP 44 # field, so that we can be sure, that the public ssh key was not changed in 45 # the database! 46 47 48 class SSHkeyTokenClass(TokenClass): 49 """ 50 The SSHKeyTokenClass provides a TokenClass that stores the public 51 SSH key and can give the public SSH key via the getotp function. 52 This can be used to manage SSH keys and retrieve the public ssh key 53 to import it to authorized keys files. 54 """ 55 mode = ['authenticate'] 56 using_pin = False 57 58 def __init__(self, db_token): 59 TokenClass.__init__(self, db_token) 60 self.set_type(u"sshkey") 61 62 @staticmethod 63 def get_class_type(): 64 return "sshkey" 65 66 @staticmethod 67 def get_class_prefix(): 68 return "SSHK" 69 70 @staticmethod 71 @log_with(log) 72 def get_class_info(key=None, ret='all'): 73 """ 74 returns a subtree of the token definition 75 76 :param key: subsection identifier 77 :type key: string 78 :param ret: default return value, if nothing is found 79 :type ret: user defined 80 :return: subsection if key exists or user defined 81 :rtype: dictionary 82 """ 83 res = {'type': 'sshkey', 84 'title': 'SSHkey Token', 85 'description': _('SSH Public Key: The public SSH key.'), 86 'config': {}, 87 'user': ['enroll'], 88 # This tokentype is enrollable in the UI for... 89 'ui_enroll': ["admin", "user"], 90 'policy': { 91 SCOPE.ENROLL: { 92 ACTION.MAXTOKENUSER: { 93 'type': 'int', 94 'desc': _("The user may only have this maximum number of SSH keys assigned."), 95 'group': GROUP.TOKEN 96 }, 97 ACTION.MAXACTIVETOKENUSER: { 98 'type': 'int', 99 'desc': _( 100 "The user may only have this maximum number of active SSH keys assigned."), 101 'group': GROUP.TOKEN 102 } 103 } 104 }, 105 } 106 if key: 107 ret = res.get(key, {}) 108 else: 109 if ret == 'all': 110 ret = res 111 112 return ret 113 114 def update(self, param): 115 """ 116 The key holds the public ssh key and this is required 117 118 The key probably is of the form "ssh-rsa BASE64 comment" 119 """ 120 # We need to save the token, so that we can later add the tokeninfo 121 # Otherwise we might not have created the DB entry, yet and we would 122 # be missing the token.id 123 self.token.save() 124 125 getParam(param, "sshkey", required) 126 127 key_elem = param.get("sshkey").split(" ", 2) 128 if len(key_elem) != 3 or key_elem[0] != "ssh-rsa": 129 raise Exception("The key must consist of 'ssh-rsa BASE64 comment'") 130 131 key_type = key_elem[0] 132 key = key_elem[1] 133 key_comment = key_elem[2] 134 135 # convert key to hex 136 self.add_tokeninfo("ssh_key", key, value_type="password") 137 self.add_tokeninfo("ssh_type", key_type) 138 self.add_tokeninfo("ssh_comment", key_comment) 139 140 # call the parents function 141 TokenClass.update(self, param) 142 143 @log_with(log) 144 def get_sshkey(self): 145 """ 146 returns the public SSH key 147 148 :return: SSH pub key 149 :rtype: string 150 """ 151 ti = self.get_tokeninfo() 152 key_type = ti.get("ssh_type") 153 key_comment = ti.get("ssh_comment") 154 # get the ssh key directly, otherwise it will not be decrypted 155 sshkey = self.get_tokeninfo("ssh_key") 156 return u"{0!s} {1!s} {2!s}".format(key_type, sshkey, key_comment) 157 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/privacyidea/lib/tokens/sshkeytoken.py b/privacyidea/lib/tokens/sshkeytoken.py --- a/privacyidea/lib/tokens/sshkeytoken.py +++ b/privacyidea/lib/tokens/sshkeytoken.py @@ -125,8 +125,10 @@ getParam(param, "sshkey", required) key_elem = param.get("sshkey").split(" ", 2) - if len(key_elem) != 3 or key_elem[0] != "ssh-rsa": - raise Exception("The key must consist of 'ssh-rsa BASE64 comment'") + if len(key_elem) != 3: + raise Exception("The key must consist of 'ssh-keytype BASE64 comment'") + if key_elem[0] not in ["ssh-rsa", "ssh-ed25519", "ecdsa-sha2-nistp256"]: + raise Exception("The keytype you specified is not supported.") key_type = key_elem[0] key = key_elem[1]
{"golden_diff": "diff --git a/privacyidea/lib/tokens/sshkeytoken.py b/privacyidea/lib/tokens/sshkeytoken.py\n--- a/privacyidea/lib/tokens/sshkeytoken.py\n+++ b/privacyidea/lib/tokens/sshkeytoken.py\n@@ -125,8 +125,10 @@\n getParam(param, \"sshkey\", required)\n \n key_elem = param.get(\"sshkey\").split(\" \", 2)\n- if len(key_elem) != 3 or key_elem[0] != \"ssh-rsa\":\n- raise Exception(\"The key must consist of 'ssh-rsa BASE64 comment'\")\n+ if len(key_elem) != 3:\n+ raise Exception(\"The key must consist of 'ssh-keytype BASE64 comment'\")\n+ if key_elem[0] not in [\"ssh-rsa\", \"ssh-ed25519\", \"ecdsa-sha2-nistp256\"]:\n+ raise Exception(\"The keytype you specified is not supported.\")\n \n key_type = key_elem[0]\n key = key_elem[1]\n", "issue": "Support other keytypes with SSHKey Token\nWe would like to import other public key types like ecdsa into the SSHKey token type.\r\n\r\n* [x] The UI and the /token/init API needs to allow this.\r\n* [x] We also need to check, if the keytype is saved and returned correctly by GET /authitem\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# privacyIDEA\n# Jul 18, 2014 Cornelius K\u00f6lbel\n# License: AGPLv3\n# contact: http://www.privacyidea.org\n#\n# This code is free software; you can redistribute it and/or\n# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE\n# License as published by the Free Software Foundation; either\n# version 3 of the License, or any later version.\n#\n# This code is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU AFFERO GENERAL PUBLIC LICENSE for more details.\n#\n# You should have received a copy of the GNU Affero General Public\n# License along with this program. If not, see <http://www.gnu.org/licenses/>.\n#\n__doc__=\"\"\"The SSHKeyTokenClass provides a TokenClass that stores the public\nSSH key and can give the public SSH key via the getotp function.\nThis can be used to manage SSH keys and retrieve the public ssh key\nto import it to authorized keys files.\n\nThe code is tested in tests/test_lib_tokens_ssh\n\"\"\"\n\nimport logging\nfrom privacyidea.lib import _\nfrom privacyidea.api.lib.utils import getParam\nfrom privacyidea.lib.log import log_with\nfrom privacyidea.lib.tokenclass import TokenClass\nfrom privacyidea.lib.policy import SCOPE, ACTION, GROUP\n\nlog = logging.getLogger(__name__)\n\n\noptional = True\nrequired = False\n\n\n##TODO: We should save a fingerprint of the SSH Key in the encrypted OTP\n# field, so that we can be sure, that the public ssh key was not changed in\n# the database!\n\n\nclass SSHkeyTokenClass(TokenClass):\n \"\"\"\n The SSHKeyTokenClass provides a TokenClass that stores the public\n SSH key and can give the public SSH key via the getotp function.\n This can be used to manage SSH keys and retrieve the public ssh key\n to import it to authorized keys files.\n \"\"\"\n mode = ['authenticate']\n using_pin = False\n\n def __init__(self, db_token):\n TokenClass.__init__(self, db_token)\n self.set_type(u\"sshkey\")\n\n @staticmethod\n def get_class_type():\n return \"sshkey\"\n\n @staticmethod\n def get_class_prefix():\n return \"SSHK\"\n\n @staticmethod\n @log_with(log)\n def get_class_info(key=None, ret='all'):\n \"\"\"\n returns a subtree of the token definition\n\n :param key: subsection identifier\n :type key: string\n :param ret: default return value, if nothing is found\n :type ret: user defined\n :return: subsection if key exists or user defined\n :rtype: dictionary\n \"\"\"\n res = {'type': 'sshkey',\n 'title': 'SSHkey Token',\n 'description': _('SSH Public Key: The public SSH key.'),\n 'config': {},\n 'user': ['enroll'],\n # This tokentype is enrollable in the UI for...\n 'ui_enroll': [\"admin\", \"user\"],\n 'policy': {\n SCOPE.ENROLL: {\n ACTION.MAXTOKENUSER: {\n 'type': 'int',\n 'desc': _(\"The user may only have this maximum number of SSH keys assigned.\"),\n 'group': GROUP.TOKEN\n },\n ACTION.MAXACTIVETOKENUSER: {\n 'type': 'int',\n 'desc': _(\n \"The user may only have this maximum number of active SSH keys assigned.\"),\n 'group': GROUP.TOKEN\n }\n }\n },\n }\n if key:\n ret = res.get(key, {})\n else:\n if ret == 'all':\n ret = res\n\n return ret\n\n def update(self, param):\n \"\"\"\n The key holds the public ssh key and this is required\n \n The key probably is of the form \"ssh-rsa BASE64 comment\"\n \"\"\"\n # We need to save the token, so that we can later add the tokeninfo\n # Otherwise we might not have created the DB entry, yet and we would\n # be missing the token.id\n self.token.save()\n\n getParam(param, \"sshkey\", required)\n \n key_elem = param.get(\"sshkey\").split(\" \", 2)\n if len(key_elem) != 3 or key_elem[0] != \"ssh-rsa\":\n raise Exception(\"The key must consist of 'ssh-rsa BASE64 comment'\")\n\n key_type = key_elem[0]\n key = key_elem[1]\n key_comment = key_elem[2]\n \n # convert key to hex\n self.add_tokeninfo(\"ssh_key\", key, value_type=\"password\")\n self.add_tokeninfo(\"ssh_type\", key_type)\n self.add_tokeninfo(\"ssh_comment\", key_comment)\n\n # call the parents function\n TokenClass.update(self, param)\n \n @log_with(log)\n def get_sshkey(self):\n \"\"\"\n returns the public SSH key\n \n :return: SSH pub key\n :rtype: string\n \"\"\"\n ti = self.get_tokeninfo()\n key_type = ti.get(\"ssh_type\")\n key_comment = ti.get(\"ssh_comment\")\n # get the ssh key directly, otherwise it will not be decrypted\n sshkey = self.get_tokeninfo(\"ssh_key\")\n return u\"{0!s} {1!s} {2!s}\".format(key_type, sshkey, key_comment)\n", "path": "privacyidea/lib/tokens/sshkeytoken.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# privacyIDEA\n# Jul 18, 2014 Cornelius K\u00f6lbel\n# License: AGPLv3\n# contact: http://www.privacyidea.org\n#\n# This code is free software; you can redistribute it and/or\n# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE\n# License as published by the Free Software Foundation; either\n# version 3 of the License, or any later version.\n#\n# This code is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU AFFERO GENERAL PUBLIC LICENSE for more details.\n#\n# You should have received a copy of the GNU Affero General Public\n# License along with this program. If not, see <http://www.gnu.org/licenses/>.\n#\n__doc__=\"\"\"The SSHKeyTokenClass provides a TokenClass that stores the public\nSSH key and can give the public SSH key via the getotp function.\nThis can be used to manage SSH keys and retrieve the public ssh key\nto import it to authorized keys files.\n\nThe code is tested in tests/test_lib_tokens_ssh\n\"\"\"\n\nimport logging\nfrom privacyidea.lib import _\nfrom privacyidea.api.lib.utils import getParam\nfrom privacyidea.lib.log import log_with\nfrom privacyidea.lib.tokenclass import TokenClass\nfrom privacyidea.lib.policy import SCOPE, ACTION, GROUP\n\nlog = logging.getLogger(__name__)\n\n\noptional = True\nrequired = False\n\n\n##TODO: We should save a fingerprint of the SSH Key in the encrypted OTP\n# field, so that we can be sure, that the public ssh key was not changed in\n# the database!\n\n\nclass SSHkeyTokenClass(TokenClass):\n \"\"\"\n The SSHKeyTokenClass provides a TokenClass that stores the public\n SSH key and can give the public SSH key via the getotp function.\n This can be used to manage SSH keys and retrieve the public ssh key\n to import it to authorized keys files.\n \"\"\"\n mode = ['authenticate']\n using_pin = False\n\n def __init__(self, db_token):\n TokenClass.__init__(self, db_token)\n self.set_type(u\"sshkey\")\n\n @staticmethod\n def get_class_type():\n return \"sshkey\"\n\n @staticmethod\n def get_class_prefix():\n return \"SSHK\"\n\n @staticmethod\n @log_with(log)\n def get_class_info(key=None, ret='all'):\n \"\"\"\n returns a subtree of the token definition\n\n :param key: subsection identifier\n :type key: string\n :param ret: default return value, if nothing is found\n :type ret: user defined\n :return: subsection if key exists or user defined\n :rtype: dictionary\n \"\"\"\n res = {'type': 'sshkey',\n 'title': 'SSHkey Token',\n 'description': _('SSH Public Key: The public SSH key.'),\n 'config': {},\n 'user': ['enroll'],\n # This tokentype is enrollable in the UI for...\n 'ui_enroll': [\"admin\", \"user\"],\n 'policy': {\n SCOPE.ENROLL: {\n ACTION.MAXTOKENUSER: {\n 'type': 'int',\n 'desc': _(\"The user may only have this maximum number of SSH keys assigned.\"),\n 'group': GROUP.TOKEN\n },\n ACTION.MAXACTIVETOKENUSER: {\n 'type': 'int',\n 'desc': _(\n \"The user may only have this maximum number of active SSH keys assigned.\"),\n 'group': GROUP.TOKEN\n }\n }\n },\n }\n if key:\n ret = res.get(key, {})\n else:\n if ret == 'all':\n ret = res\n\n return ret\n\n def update(self, param):\n \"\"\"\n The key holds the public ssh key and this is required\n \n The key probably is of the form \"ssh-rsa BASE64 comment\"\n \"\"\"\n # We need to save the token, so that we can later add the tokeninfo\n # Otherwise we might not have created the DB entry, yet and we would\n # be missing the token.id\n self.token.save()\n\n getParam(param, \"sshkey\", required)\n \n key_elem = param.get(\"sshkey\").split(\" \", 2)\n if len(key_elem) != 3:\n raise Exception(\"The key must consist of 'ssh-keytype BASE64 comment'\")\n if key_elem[0] not in [\"ssh-rsa\", \"ssh-ed25519\", \"ecdsa-sha2-nistp256\"]:\n raise Exception(\"The keytype you specified is not supported.\")\n\n key_type = key_elem[0]\n key = key_elem[1]\n key_comment = key_elem[2]\n \n # convert key to hex\n self.add_tokeninfo(\"ssh_key\", key, value_type=\"password\")\n self.add_tokeninfo(\"ssh_type\", key_type)\n self.add_tokeninfo(\"ssh_comment\", key_comment)\n\n # call the parents function\n TokenClass.update(self, param)\n \n @log_with(log)\n def get_sshkey(self):\n \"\"\"\n returns the public SSH key\n \n :return: SSH pub key\n :rtype: string\n \"\"\"\n ti = self.get_tokeninfo()\n key_type = ti.get(\"ssh_type\")\n key_comment = ti.get(\"ssh_comment\")\n # get the ssh key directly, otherwise it will not be decrypted\n sshkey = self.get_tokeninfo(\"ssh_key\")\n return u\"{0!s} {1!s} {2!s}\".format(key_type, sshkey, key_comment)\n", "path": "privacyidea/lib/tokens/sshkeytoken.py"}]}
1,908
239
gh_patches_debug_345
rasdani/github-patches
git_diff
NVIDIA__apex-564
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- RuntimeError: "GeluCUDAKernelImpl" not implemented for 'Half' PyTorch 1.2 introduced the `gelu` activation function. Unfortunately, this leads to terminal errors when using with AMP. Trace (`self.activation` is `gelu`): ``` Traceback (most recent call last): File "predict.py", line 282, in <module> predictor.predict() File "predict.py", line 74, in predict fig = trainer.train() File "/home/bram/Python/projects/transformer-classifiers/transformer_classifiers/TransformerTrainer.py", line 232, in train self._process('train', epoch) File "/home/bram/Python/projects/transformer-classifiers/transformer_classifiers/TransformerTrainer.py", line 124, in _process preds = self.model(input_ids, attention_mask=input_mask) File "/home/bram/.local/share/virtualenvs/transformer-classifiers-x27iJBv7/lib/python3.7/site-packages/torch/nn/modules/module.py", line 541, in __call__ result = self.forward(*input, **kwargs) File "/home/bram/.local/share/virtualenvs/transformer-classifiers-x27iJBv7/lib/python3.7/site-packages/torch/nn/parallel/distributed.py", line 442, in forward output = self.module(*inputs[0], **kwargs[0]) File "/home/bram/.local/share/virtualenvs/transformer-classifiers-x27iJBv7/lib/python3.7/site-packages/torch/nn/modules/module.py", line 541, in __call__ result = self.forward(*input, **kwargs) File "/home/bram/Python/projects/transformer-classifiers/transformer_classifiers/models.py", line 140, in forward cls_output = self.activation(cls_output) File "/home/bram/.local/share/virtualenvs/transformer-classifiers-x27iJBv7/lib/python3.7/site-packages/torch/nn/functional.py", line 1126, in gelu return torch._C._nn.gelu(input) RuntimeError: "GeluCUDAKernelImpl" not implemented for 'Half' ``` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `apex/amp/lists/functional_overrides.py` Content: ``` 1 2 # TODO: think about the following two. They do weird things. 3 # - torch.nn.utils.clip_grad (but it should always be fp32 anyway) 4 # - torch.nn.utils.weight_norm 5 6 # Notes: 7 # F.instance_norm uses batch_norm internally. Which correctly handles 8 # fp16 in/out with fp32 weights. So we shouldn't do anything for 9 # either of these. 10 # F.normalize calls `input.norm()` internally, so it's redundant, but 11 # kept here in case impl. changes. 12 # F.cosine_similarity is same: calls `x.norm()` internally. 13 14 import torch.nn.functional 15 16 MODULE = torch.nn.functional 17 18 FP16_FUNCS = [ 19 'conv1d', 20 'conv2d', 21 'conv3d', 22 'conv_transpose1d', 23 'conv_transpose2d', 24 'conv_transpose3d', 25 'conv_tbc', # Undocumented / maybe new? 26 'linear', 27 ] 28 29 FP32_FUNCS = [ 30 31 # Interpolation/Upsampling TODO: Remove for 1.2 32 'interpolate', 33 'grid_sample', 34 35 # Pointwise 36 'softplus', 37 'softmin', 38 'log_softmax', 39 'softmax', 40 41 # Normalization 42 'layer_norm', 43 'group_norm', 44 'local_response_norm', 45 'normalize', 46 'cosine_similarity', 47 48 # Loss functions 49 # TODO: which of these can be fp16? 50 'poisson_nll_loss', 51 'cosine_embedding_loss', 52 'cross_entropy', 53 'hinge_embedding_loss', 54 'kl_div', 55 'l1_loss', 56 'mse_loss', 57 'margin_ranking_loss', 58 'multilabel_margin_loss', 59 'multilabel_soft_margin_loss', 60 'multi_margin_loss', 61 'nll_loss', 62 'binary_cross_entropy_with_logits', 63 'smooth_l1_loss', 64 'soft_margin_loss', 65 'triplet_margin_loss' 66 ] 67 68 BANNED_FUNCS = [ 69 ('binary_cross_entropy', 70 ("\namp does not work out-of-the-box with `F.binary_cross_entropy` or `torch.nn.BCELoss.` " 71 "It requires that the output of the previous function be already a FloatTensor. \n\n" 72 "Most models have a Sigmoid right before BCELoss. In that case, you can use\n" 73 " torch.nn.BCEWithLogitsLoss\nto combine Sigmoid+BCELoss into a single layer " 74 "that is compatible with amp.\nAnother option is to add\n" 75 " amp.register_float_function(torch, 'sigmoid')\nbefore calling `amp.init()`.\n" 76 "If you _really_ know what you are doing, you can disable this warning by passing " 77 "allow_banned=True to `amp.init()`.")) 78 ] 79 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/apex/amp/lists/functional_overrides.py b/apex/amp/lists/functional_overrides.py --- a/apex/amp/lists/functional_overrides.py +++ b/apex/amp/lists/functional_overrides.py @@ -37,7 +37,8 @@ 'softmin', 'log_softmax', 'softmax', - + 'gelu', + # Normalization 'layer_norm', 'group_norm',
{"golden_diff": "diff --git a/apex/amp/lists/functional_overrides.py b/apex/amp/lists/functional_overrides.py\n--- a/apex/amp/lists/functional_overrides.py\n+++ b/apex/amp/lists/functional_overrides.py\n@@ -37,7 +37,8 @@\n 'softmin',\n 'log_softmax',\n 'softmax',\n-\n+ 'gelu',\n+ \n # Normalization\n 'layer_norm',\n 'group_norm',\n", "issue": "RuntimeError: \"GeluCUDAKernelImpl\" not implemented for 'Half'\nPyTorch 1.2 introduced the `gelu` activation function. Unfortunately, this leads to terminal errors when using with AMP.\r\n\r\nTrace (`self.activation` is `gelu`): \r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"predict.py\", line 282, in <module>\r\n predictor.predict()\r\n File \"predict.py\", line 74, in predict\r\n fig = trainer.train()\r\n File \"/home/bram/Python/projects/transformer-classifiers/transformer_classifiers/TransformerTrainer.py\", line 232, in train\r\n self._process('train', epoch)\r\n File \"/home/bram/Python/projects/transformer-classifiers/transformer_classifiers/TransformerTrainer.py\", line 124, in _process\r\n preds = self.model(input_ids, attention_mask=input_mask)\r\n File \"/home/bram/.local/share/virtualenvs/transformer-classifiers-x27iJBv7/lib/python3.7/site-packages/torch/nn/modules/module.py\", line 541, in __call__\r\n result = self.forward(*input, **kwargs)\r\n File \"/home/bram/.local/share/virtualenvs/transformer-classifiers-x27iJBv7/lib/python3.7/site-packages/torch/nn/parallel/distributed.py\", line 442, in forward\r\n output = self.module(*inputs[0], **kwargs[0])\r\n File \"/home/bram/.local/share/virtualenvs/transformer-classifiers-x27iJBv7/lib/python3.7/site-packages/torch/nn/modules/module.py\", line 541, in __call__\r\n result = self.forward(*input, **kwargs)\r\n File \"/home/bram/Python/projects/transformer-classifiers/transformer_classifiers/models.py\", line 140, in forward\r\n cls_output = self.activation(cls_output)\r\n File \"/home/bram/.local/share/virtualenvs/transformer-classifiers-x27iJBv7/lib/python3.7/site-packages/torch/nn/functional.py\", line 1126, in gelu\r\n return torch._C._nn.gelu(input)\r\nRuntimeError: \"GeluCUDAKernelImpl\" not implemented for 'Half'\r\n```\n", "before_files": [{"content": "\n# TODO: think about the following two. They do weird things.\n# - torch.nn.utils.clip_grad (but it should always be fp32 anyway)\n# - torch.nn.utils.weight_norm\n\n# Notes:\n# F.instance_norm uses batch_norm internally. Which correctly handles\n# fp16 in/out with fp32 weights. So we shouldn't do anything for\n# either of these.\n# F.normalize calls `input.norm()` internally, so it's redundant, but\n# kept here in case impl. changes.\n# F.cosine_similarity is same: calls `x.norm()` internally.\n\nimport torch.nn.functional\n\nMODULE = torch.nn.functional\n\nFP16_FUNCS = [\n 'conv1d',\n 'conv2d',\n 'conv3d',\n 'conv_transpose1d',\n 'conv_transpose2d',\n 'conv_transpose3d',\n 'conv_tbc', # Undocumented / maybe new?\n 'linear',\n]\n\nFP32_FUNCS = [\n\n # Interpolation/Upsampling TODO: Remove for 1.2\n 'interpolate',\n 'grid_sample',\n\n # Pointwise\n 'softplus',\n 'softmin',\n 'log_softmax',\n 'softmax',\n\n # Normalization\n 'layer_norm',\n 'group_norm',\n 'local_response_norm',\n 'normalize',\n 'cosine_similarity',\n\n # Loss functions\n # TODO: which of these can be fp16?\n 'poisson_nll_loss',\n 'cosine_embedding_loss',\n 'cross_entropy',\n 'hinge_embedding_loss',\n 'kl_div',\n 'l1_loss',\n 'mse_loss',\n 'margin_ranking_loss',\n 'multilabel_margin_loss',\n 'multilabel_soft_margin_loss',\n 'multi_margin_loss',\n 'nll_loss',\n 'binary_cross_entropy_with_logits',\n 'smooth_l1_loss',\n 'soft_margin_loss',\n 'triplet_margin_loss'\n]\n\nBANNED_FUNCS = [\n ('binary_cross_entropy',\n (\"\\namp does not work out-of-the-box with `F.binary_cross_entropy` or `torch.nn.BCELoss.` \"\n \"It requires that the output of the previous function be already a FloatTensor. \\n\\n\"\n \"Most models have a Sigmoid right before BCELoss. In that case, you can use\\n\"\n \" torch.nn.BCEWithLogitsLoss\\nto combine Sigmoid+BCELoss into a single layer \"\n \"that is compatible with amp.\\nAnother option is to add\\n\"\n \" amp.register_float_function(torch, 'sigmoid')\\nbefore calling `amp.init()`.\\n\"\n \"If you _really_ know what you are doing, you can disable this warning by passing \"\n \"allow_banned=True to `amp.init()`.\"))\n]\n", "path": "apex/amp/lists/functional_overrides.py"}], "after_files": [{"content": "\n# TODO: think about the following two. They do weird things.\n# - torch.nn.utils.clip_grad (but it should always be fp32 anyway)\n# - torch.nn.utils.weight_norm\n\n# Notes:\n# F.instance_norm uses batch_norm internally. Which correctly handles\n# fp16 in/out with fp32 weights. So we shouldn't do anything for\n# either of these.\n# F.normalize calls `input.norm()` internally, so it's redundant, but\n# kept here in case impl. changes.\n# F.cosine_similarity is same: calls `x.norm()` internally.\n\nimport torch.nn.functional\n\nMODULE = torch.nn.functional\n\nFP16_FUNCS = [\n 'conv1d',\n 'conv2d',\n 'conv3d',\n 'conv_transpose1d',\n 'conv_transpose2d',\n 'conv_transpose3d',\n 'conv_tbc', # Undocumented / maybe new?\n 'linear',\n]\n\nFP32_FUNCS = [\n\n # Interpolation/Upsampling TODO: Remove for 1.2\n 'interpolate',\n 'grid_sample',\n\n # Pointwise\n 'softplus',\n 'softmin',\n 'log_softmax',\n 'softmax',\n 'gelu',\n \n # Normalization\n 'layer_norm',\n 'group_norm',\n 'local_response_norm',\n 'normalize',\n 'cosine_similarity',\n\n # Loss functions\n # TODO: which of these can be fp16?\n 'poisson_nll_loss',\n 'cosine_embedding_loss',\n 'cross_entropy',\n 'hinge_embedding_loss',\n 'kl_div',\n 'l1_loss',\n 'mse_loss',\n 'margin_ranking_loss',\n 'multilabel_margin_loss',\n 'multilabel_soft_margin_loss',\n 'multi_margin_loss',\n 'nll_loss',\n 'binary_cross_entropy_with_logits',\n 'smooth_l1_loss',\n 'soft_margin_loss',\n 'triplet_margin_loss'\n]\n\nBANNED_FUNCS = [\n ('binary_cross_entropy',\n (\"\\namp does not work out-of-the-box with `F.binary_cross_entropy` or `torch.nn.BCELoss.` \"\n \"It requires that the output of the previous function be already a FloatTensor. \\n\\n\"\n \"Most models have a Sigmoid right before BCELoss. In that case, you can use\\n\"\n \" torch.nn.BCEWithLogitsLoss\\nto combine Sigmoid+BCELoss into a single layer \"\n \"that is compatible with amp.\\nAnother option is to add\\n\"\n \" amp.register_float_function(torch, 'sigmoid')\\nbefore calling `amp.init()`.\\n\"\n \"If you _really_ know what you are doing, you can disable this warning by passing \"\n \"allow_banned=True to `amp.init()`.\"))\n]\n", "path": "apex/amp/lists/functional_overrides.py"}]}
1,540
102
gh_patches_debug_23388
rasdani/github-patches
git_diff
cal-itp__benefits-1550
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Bug: Grid-width issue Related to #1545 Almost all instances of `col-lg-10` app should now be `col-lg-8`. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `benefits/eligibility/forms.py` Content: ``` 1 """ 2 The eligibility application: Form definition for the eligibility verification flow. 3 """ 4 import logging 5 6 from django import forms 7 from django.utils.translation import gettext_lazy as _ 8 9 from benefits.core import models, recaptcha, widgets 10 11 12 logger = logging.getLogger(__name__) 13 14 15 class EligibilityVerifierSelectionForm(forms.Form): 16 """Form to capture eligibility verifier selection.""" 17 18 action_url = "eligibility:index" 19 id = "form-verifier-selection" 20 method = "POST" 21 22 verifier = forms.ChoiceField(label="", widget=widgets.VerifierRadioSelect) 23 # sets label to empty string so the radio_select template can override the label style 24 submit_value = _("eligibility.buttons.choose") 25 26 def __init__(self, agency: models.TransitAgency, *args, **kwargs): 27 super().__init__(*args, **kwargs) 28 verifiers = agency.eligibility_verifiers.all() 29 30 self.classes = "offset-lg-1 col-lg-9" 31 # second element is not used since we render the whole label using selection_label_template, 32 # therefore set to None 33 self.fields["verifier"].choices = [(v.id, None) for v in verifiers] 34 self.fields["verifier"].widget.selection_label_templates = {v.id: v.selection_label_template for v in verifiers} 35 36 def clean(self): 37 if not recaptcha.verify(self.data): 38 raise forms.ValidationError("reCAPTCHA failed") 39 40 41 class EligibilityVerificationForm(forms.Form): 42 """Form to collect eligibility verification details.""" 43 44 action_url = "eligibility:confirm" 45 id = "form-eligibility-verification" 46 method = "POST" 47 48 submit_value = _("eligibility.forms.confirm.submit") 49 submitting_value = _("eligibility.forms.confirm.submitting") 50 51 _error_messages = { 52 "invalid": _("eligibility.forms.confirm.errors.invalid"), 53 "missing": _("eligibility.forms.confirm.errors.missing"), 54 } 55 56 def __init__(self, verifier: models.EligibilityVerifier, *args, **kwargs): 57 super().__init__(*args, **kwargs) 58 59 self.classes = "offset-lg-3 col-lg-6" 60 sub_widget = widgets.FormControlTextInput(placeholder=verifier.form_sub_placeholder) 61 if verifier.form_sub_pattern: 62 sub_widget.attrs.update({"pattern": verifier.form_sub_pattern}) 63 if verifier.form_input_mode: 64 sub_widget.attrs.update({"inputmode": verifier.form_input_mode}) 65 if verifier.form_max_length: 66 sub_widget.attrs.update({"maxlength": verifier.form_max_length}) 67 68 self.fields["sub"] = forms.CharField( 69 label=_(verifier.form_sub_label), 70 widget=sub_widget, 71 help_text=_(verifier.form_sub_help_text), 72 ) 73 74 name_widget = widgets.FormControlTextInput(placeholder=verifier.form_name_placeholder) 75 if verifier.form_name_max_length: 76 name_widget.attrs.update({"maxlength": verifier.form_name_max_length}) 77 78 self.fields["name"] = forms.CharField( 79 label=_(verifier.form_name_label), widget=name_widget, help_text=_(verifier.form_name_help_text) 80 ) 81 82 def clean(self): 83 if not recaptcha.verify(self.data): 84 raise forms.ValidationError("reCAPTCHA failed") 85 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/benefits/eligibility/forms.py b/benefits/eligibility/forms.py --- a/benefits/eligibility/forms.py +++ b/benefits/eligibility/forms.py @@ -27,7 +27,7 @@ super().__init__(*args, **kwargs) verifiers = agency.eligibility_verifiers.all() - self.classes = "offset-lg-1 col-lg-9" + self.classes = "col-lg-8" # second element is not used since we render the whole label using selection_label_template, # therefore set to None self.fields["verifier"].choices = [(v.id, None) for v in verifiers] @@ -56,7 +56,7 @@ def __init__(self, verifier: models.EligibilityVerifier, *args, **kwargs): super().__init__(*args, **kwargs) - self.classes = "offset-lg-3 col-lg-6" + self.classes = "col-lg-6" sub_widget = widgets.FormControlTextInput(placeholder=verifier.form_sub_placeholder) if verifier.form_sub_pattern: sub_widget.attrs.update({"pattern": verifier.form_sub_pattern})
{"golden_diff": "diff --git a/benefits/eligibility/forms.py b/benefits/eligibility/forms.py\n--- a/benefits/eligibility/forms.py\n+++ b/benefits/eligibility/forms.py\n@@ -27,7 +27,7 @@\n super().__init__(*args, **kwargs)\n verifiers = agency.eligibility_verifiers.all()\n \n- self.classes = \"offset-lg-1 col-lg-9\"\n+ self.classes = \"col-lg-8\"\n # second element is not used since we render the whole label using selection_label_template,\n # therefore set to None\n self.fields[\"verifier\"].choices = [(v.id, None) for v in verifiers]\n@@ -56,7 +56,7 @@\n def __init__(self, verifier: models.EligibilityVerifier, *args, **kwargs):\n super().__init__(*args, **kwargs)\n \n- self.classes = \"offset-lg-3 col-lg-6\"\n+ self.classes = \"col-lg-6\"\n sub_widget = widgets.FormControlTextInput(placeholder=verifier.form_sub_placeholder)\n if verifier.form_sub_pattern:\n sub_widget.attrs.update({\"pattern\": verifier.form_sub_pattern})\n", "issue": "Bug: Grid-width issue\nRelated to #1545 \r\n\r\nAlmost all instances of `col-lg-10` app should now be `col-lg-8`.\n", "before_files": [{"content": "\"\"\"\nThe eligibility application: Form definition for the eligibility verification flow.\n\"\"\"\nimport logging\n\nfrom django import forms\nfrom django.utils.translation import gettext_lazy as _\n\nfrom benefits.core import models, recaptcha, widgets\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass EligibilityVerifierSelectionForm(forms.Form):\n \"\"\"Form to capture eligibility verifier selection.\"\"\"\n\n action_url = \"eligibility:index\"\n id = \"form-verifier-selection\"\n method = \"POST\"\n\n verifier = forms.ChoiceField(label=\"\", widget=widgets.VerifierRadioSelect)\n # sets label to empty string so the radio_select template can override the label style\n submit_value = _(\"eligibility.buttons.choose\")\n\n def __init__(self, agency: models.TransitAgency, *args, **kwargs):\n super().__init__(*args, **kwargs)\n verifiers = agency.eligibility_verifiers.all()\n\n self.classes = \"offset-lg-1 col-lg-9\"\n # second element is not used since we render the whole label using selection_label_template,\n # therefore set to None\n self.fields[\"verifier\"].choices = [(v.id, None) for v in verifiers]\n self.fields[\"verifier\"].widget.selection_label_templates = {v.id: v.selection_label_template for v in verifiers}\n\n def clean(self):\n if not recaptcha.verify(self.data):\n raise forms.ValidationError(\"reCAPTCHA failed\")\n\n\nclass EligibilityVerificationForm(forms.Form):\n \"\"\"Form to collect eligibility verification details.\"\"\"\n\n action_url = \"eligibility:confirm\"\n id = \"form-eligibility-verification\"\n method = \"POST\"\n\n submit_value = _(\"eligibility.forms.confirm.submit\")\n submitting_value = _(\"eligibility.forms.confirm.submitting\")\n\n _error_messages = {\n \"invalid\": _(\"eligibility.forms.confirm.errors.invalid\"),\n \"missing\": _(\"eligibility.forms.confirm.errors.missing\"),\n }\n\n def __init__(self, verifier: models.EligibilityVerifier, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n self.classes = \"offset-lg-3 col-lg-6\"\n sub_widget = widgets.FormControlTextInput(placeholder=verifier.form_sub_placeholder)\n if verifier.form_sub_pattern:\n sub_widget.attrs.update({\"pattern\": verifier.form_sub_pattern})\n if verifier.form_input_mode:\n sub_widget.attrs.update({\"inputmode\": verifier.form_input_mode})\n if verifier.form_max_length:\n sub_widget.attrs.update({\"maxlength\": verifier.form_max_length})\n\n self.fields[\"sub\"] = forms.CharField(\n label=_(verifier.form_sub_label),\n widget=sub_widget,\n help_text=_(verifier.form_sub_help_text),\n )\n\n name_widget = widgets.FormControlTextInput(placeholder=verifier.form_name_placeholder)\n if verifier.form_name_max_length:\n name_widget.attrs.update({\"maxlength\": verifier.form_name_max_length})\n\n self.fields[\"name\"] = forms.CharField(\n label=_(verifier.form_name_label), widget=name_widget, help_text=_(verifier.form_name_help_text)\n )\n\n def clean(self):\n if not recaptcha.verify(self.data):\n raise forms.ValidationError(\"reCAPTCHA failed\")\n", "path": "benefits/eligibility/forms.py"}], "after_files": [{"content": "\"\"\"\nThe eligibility application: Form definition for the eligibility verification flow.\n\"\"\"\nimport logging\n\nfrom django import forms\nfrom django.utils.translation import gettext_lazy as _\n\nfrom benefits.core import models, recaptcha, widgets\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass EligibilityVerifierSelectionForm(forms.Form):\n \"\"\"Form to capture eligibility verifier selection.\"\"\"\n\n action_url = \"eligibility:index\"\n id = \"form-verifier-selection\"\n method = \"POST\"\n\n verifier = forms.ChoiceField(label=\"\", widget=widgets.VerifierRadioSelect)\n # sets label to empty string so the radio_select template can override the label style\n submit_value = _(\"eligibility.buttons.choose\")\n\n def __init__(self, agency: models.TransitAgency, *args, **kwargs):\n super().__init__(*args, **kwargs)\n verifiers = agency.eligibility_verifiers.all()\n\n self.classes = \"col-lg-8\"\n # second element is not used since we render the whole label using selection_label_template,\n # therefore set to None\n self.fields[\"verifier\"].choices = [(v.id, None) for v in verifiers]\n self.fields[\"verifier\"].widget.selection_label_templates = {v.id: v.selection_label_template for v in verifiers}\n\n def clean(self):\n if not recaptcha.verify(self.data):\n raise forms.ValidationError(\"reCAPTCHA failed\")\n\n\nclass EligibilityVerificationForm(forms.Form):\n \"\"\"Form to collect eligibility verification details.\"\"\"\n\n action_url = \"eligibility:confirm\"\n id = \"form-eligibility-verification\"\n method = \"POST\"\n\n submit_value = _(\"eligibility.forms.confirm.submit\")\n submitting_value = _(\"eligibility.forms.confirm.submitting\")\n\n _error_messages = {\n \"invalid\": _(\"eligibility.forms.confirm.errors.invalid\"),\n \"missing\": _(\"eligibility.forms.confirm.errors.missing\"),\n }\n\n def __init__(self, verifier: models.EligibilityVerifier, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n self.classes = \"col-lg-6\"\n sub_widget = widgets.FormControlTextInput(placeholder=verifier.form_sub_placeholder)\n if verifier.form_sub_pattern:\n sub_widget.attrs.update({\"pattern\": verifier.form_sub_pattern})\n if verifier.form_input_mode:\n sub_widget.attrs.update({\"inputmode\": verifier.form_input_mode})\n if verifier.form_max_length:\n sub_widget.attrs.update({\"maxlength\": verifier.form_max_length})\n\n self.fields[\"sub\"] = forms.CharField(\n label=_(verifier.form_sub_label),\n widget=sub_widget,\n help_text=_(verifier.form_sub_help_text),\n )\n\n name_widget = widgets.FormControlTextInput(placeholder=verifier.form_name_placeholder)\n if verifier.form_name_max_length:\n name_widget.attrs.update({\"maxlength\": verifier.form_name_max_length})\n\n self.fields[\"name\"] = forms.CharField(\n label=_(verifier.form_name_label), widget=name_widget, help_text=_(verifier.form_name_help_text)\n )\n\n def clean(self):\n if not recaptcha.verify(self.data):\n raise forms.ValidationError(\"reCAPTCHA failed\")\n", "path": "benefits/eligibility/forms.py"}]}
1,129
258
gh_patches_debug_30699
rasdani/github-patches
git_diff
plotly__dash-490
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Allow excluding file patterns when generating component definitions Thanks a lot for making Dash -- we have found it very useful for building dashboards. We ran into an issue migrating to the new `dash-generate-components` utility. In [dash-bootstrap-components](https://github.com/ASIDataScience/dash-bootstrap-components), we keep the components in `src/components`. Besides components files, this also includes a few unit test files called, e.g. `src/components/__tests__/DropdownMenu.test.js`. When we run `dash-generate-components ./src/components <output>`, these test files also get picked up. This leads to a traceback in the build logs: ``` Error with path src/components/__tests__/DropdownMenu.test.jsError: No suitable component definition found. Error: No suitable component definition found. at parse (/project/pascal/dash-bootstrap-components/node_modules/react-docgen/dist/parse.js:84:9) at Object.defaultParse [as parse] (/project/pascal/dash-bootstrap-components/node_modules/react-docgen/dist/main.js:66:30) at parseFile (/opt/anaconda/envs/Python3/lib/python3.6/site-packages/dash/extract-meta.js:64:51) at dirs.forEach.filename (/opt/anaconda/envs/Python3/lib/python3.6/site-packages/dash/extract-meta.js:84:17) at Array.forEach (<anonymous>) at collectMetadataRecursively (/opt/anaconda/envs/Python3/lib/python3.6/site-packages/dash/extract-meta.js:79:14) at dirs.forEach.filename (/opt/anaconda/envs/Python3/lib/python3.6/site-packages/dash/extract-meta.js:82:17) at Array.forEach (<anonymous>) at collectMetadataRecursively (/opt/anaconda/envs/Python3/lib/python3.6/site-packages/dash/extract-meta.js:79:14) at componentPaths.forEach.componentPath (/opt/anaconda/envs/Python3/lib/python3.6/site-packages/dash/extract-meta.js:15:5) ``` While the error is, of course, legitimate, it would be nice to be able to suppress these tracebacks. I can see three routes: - do nothing -- after all, this doesn't stop us from building `metadata.json`, it just makes it slightly more confusing. - in `dash/extract-meta.js`, we could explicitly blacklist `__tests__` directories in the same way that files that don't end in `jsx?` are black-listed. AFAICT, the `__tests__` directory structure is the default with [jest](https://jestjs.io/docs/en/configuration.html#testmatch-array-string). [react-docgen](https://github.com/reactjs/react-docgen#cli) ignores `node_modules`, `__tests__` and `__mocks__` by default, so there is definitely a precedent. - add a `--ignore` argument to `dash-generate-components` that allows passing file globs to be excluded. Very happy to submit a PR if you decide on what the best course of action is. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `dash/development/component_generator.py` Content: ``` 1 from __future__ import print_function 2 3 import json 4 import sys 5 import subprocess 6 import shlex 7 import os 8 import argparse 9 import shutil 10 11 import pkg_resources 12 13 from ._py_components_generation import generate_class_file 14 from ._py_components_generation import generate_imports 15 from ._py_components_generation import generate_classes_files 16 17 18 class _CombinedFormatter(argparse.ArgumentDefaultsHelpFormatter, 19 argparse.RawDescriptionHelpFormatter): 20 pass 21 22 23 # pylint: disable=too-many-locals 24 def generate_components(components_source, project_shortname, 25 package_info_filename='package.json'): 26 is_windows = sys.platform == 'win32' 27 28 extract_path = pkg_resources.resource_filename('dash', 'extract-meta.js') 29 30 os.environ['NODE_PATH'] = 'node_modules' 31 cmd = shlex.split('node {} {}'.format(extract_path, components_source), 32 posix=not is_windows) 33 34 shutil.copyfile('package.json', 35 os.path.join(project_shortname, package_info_filename)) 36 37 proc = subprocess.Popen(cmd, 38 stdout=subprocess.PIPE, 39 stderr=subprocess.PIPE, 40 shell=is_windows) 41 out, err = proc.communicate() 42 status = proc.poll() 43 44 if err: 45 print(err.decode(), file=sys.stderr) 46 47 if not out: 48 print( 49 'Error generating metadata in {} (status={})'.format( 50 project_shortname, status), 51 file=sys.stderr) 52 sys.exit(1) 53 54 metadata = json.loads(out.decode()) 55 56 components = generate_classes_files( 57 project_shortname, 58 metadata, 59 generate_class_file 60 ) 61 62 with open(os.path.join(project_shortname, 'metadata.json'), 'w') as f: 63 json.dump(metadata, f) 64 65 generate_imports(project_shortname, components) 66 67 68 def cli(): 69 parser = argparse.ArgumentParser( 70 prog='dash-generate-components', 71 formatter_class=_CombinedFormatter, 72 description='Generate dash components by extracting the metadata ' 73 'using react-docgen. Then map the metadata to python classes.' 74 ) 75 parser.add_argument('components_source', 76 help='React components source directory.') 77 parser.add_argument( 78 'project_shortname', 79 help='Name of the project to export the classes files.' 80 ) 81 parser.add_argument( 82 '-p', '--package-info-filename', 83 default='package.json', 84 help='The filename of the copied `package.json` to `project_shortname`' 85 ) 86 87 args = parser.parse_args() 88 generate_components(args.components_source, args.project_shortname, 89 package_info_filename=args.package_info_filename) 90 91 92 if __name__ == '__main__': 93 cli() 94 ``` Path: `dash/version.py` Content: ``` 1 __version__ = '0.33.0' 2 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/dash/development/component_generator.py b/dash/development/component_generator.py --- a/dash/development/component_generator.py +++ b/dash/development/component_generator.py @@ -22,14 +22,17 @@ # pylint: disable=too-many-locals def generate_components(components_source, project_shortname, - package_info_filename='package.json'): + package_info_filename='package.json', + ignore='^_'): is_windows = sys.platform == 'win32' extract_path = pkg_resources.resource_filename('dash', 'extract-meta.js') os.environ['NODE_PATH'] = 'node_modules' - cmd = shlex.split('node {} {}'.format(extract_path, components_source), - posix=not is_windows) + cmd = shlex.split( + 'node {} {} {}'.format(extract_path, ignore, components_source), + posix=not is_windows + ) shutil.copyfile('package.json', os.path.join(project_shortname, package_info_filename)) @@ -83,10 +86,18 @@ default='package.json', help='The filename of the copied `package.json` to `project_shortname`' ) + parser.add_argument( + '-i', '--ignore', + default='^_', + help='Files/directories matching the pattern will be ignored' + ) args = parser.parse_args() - generate_components(args.components_source, args.project_shortname, - package_info_filename=args.package_info_filename) + generate_components( + args.components_source, args.project_shortname, + package_info_filename=args.package_info_filename, + ignore=args.ignore + ) if __name__ == '__main__': diff --git a/dash/version.py b/dash/version.py --- a/dash/version.py +++ b/dash/version.py @@ -1 +1 @@ -__version__ = '0.33.0' +__version__ = '0.34.0'
{"golden_diff": "diff --git a/dash/development/component_generator.py b/dash/development/component_generator.py\n--- a/dash/development/component_generator.py\n+++ b/dash/development/component_generator.py\n@@ -22,14 +22,17 @@\n \n # pylint: disable=too-many-locals\n def generate_components(components_source, project_shortname,\n- package_info_filename='package.json'):\n+ package_info_filename='package.json',\n+ ignore='^_'):\n is_windows = sys.platform == 'win32'\n \n extract_path = pkg_resources.resource_filename('dash', 'extract-meta.js')\n \n os.environ['NODE_PATH'] = 'node_modules'\n- cmd = shlex.split('node {} {}'.format(extract_path, components_source),\n- posix=not is_windows)\n+ cmd = shlex.split(\n+ 'node {} {} {}'.format(extract_path, ignore, components_source),\n+ posix=not is_windows\n+ )\n \n shutil.copyfile('package.json',\n os.path.join(project_shortname, package_info_filename))\n@@ -83,10 +86,18 @@\n default='package.json',\n help='The filename of the copied `package.json` to `project_shortname`'\n )\n+ parser.add_argument(\n+ '-i', '--ignore',\n+ default='^_',\n+ help='Files/directories matching the pattern will be ignored'\n+ )\n \n args = parser.parse_args()\n- generate_components(args.components_source, args.project_shortname,\n- package_info_filename=args.package_info_filename)\n+ generate_components(\n+ args.components_source, args.project_shortname,\n+ package_info_filename=args.package_info_filename,\n+ ignore=args.ignore\n+ )\n \n \n if __name__ == '__main__':\ndiff --git a/dash/version.py b/dash/version.py\n--- a/dash/version.py\n+++ b/dash/version.py\n@@ -1 +1 @@\n-__version__ = '0.33.0'\n+__version__ = '0.34.0'\n", "issue": "Allow excluding file patterns when generating component definitions \nThanks a lot for making Dash -- we have found it very useful for building dashboards.\r\n\r\nWe ran into an issue migrating to the new `dash-generate-components` utility.\r\n\r\nIn [dash-bootstrap-components](https://github.com/ASIDataScience/dash-bootstrap-components), we keep the components in `src/components`. Besides components files, this also includes a few unit test files called, e.g. `src/components/__tests__/DropdownMenu.test.js`. When we run `dash-generate-components ./src/components <output>`, these test files also get picked up. This leads to a traceback in the build logs: \r\n\r\n```\r\nError with path src/components/__tests__/DropdownMenu.test.jsError: No suitable component definition found.\r\nError: No suitable component definition found.\r\nat parse (/project/pascal/dash-bootstrap-components/node_modules/react-docgen/dist/parse.js:84:9)\r\nat Object.defaultParse [as parse] (/project/pascal/dash-bootstrap-components/node_modules/react-docgen/dist/main.js:66:30)\r\nat parseFile (/opt/anaconda/envs/Python3/lib/python3.6/site-packages/dash/extract-meta.js:64:51)\r\nat dirs.forEach.filename (/opt/anaconda/envs/Python3/lib/python3.6/site-packages/dash/extract-meta.js:84:17)\r\nat Array.forEach (<anonymous>)\r\nat collectMetadataRecursively (/opt/anaconda/envs/Python3/lib/python3.6/site-packages/dash/extract-meta.js:79:14)\r\nat dirs.forEach.filename (/opt/anaconda/envs/Python3/lib/python3.6/site-packages/dash/extract-meta.js:82:17)\r\nat Array.forEach (<anonymous>)\r\nat collectMetadataRecursively (/opt/anaconda/envs/Python3/lib/python3.6/site-packages/dash/extract-meta.js:79:14)\r\nat componentPaths.forEach.componentPath (/opt/anaconda/envs/Python3/lib/python3.6/site-packages/dash/extract-meta.js:15:5)\r\n```\r\n\r\nWhile the error is, of course, legitimate, it would be nice to be able to suppress these tracebacks. I can see three routes:\r\n- do nothing -- after all, this doesn't stop us from building `metadata.json`, it just makes it slightly more confusing.\r\n- in `dash/extract-meta.js`, we could explicitly blacklist `__tests__` directories in the same way that files that don't end in `jsx?` are black-listed. AFAICT, the `__tests__` directory structure is the default with [jest](https://jestjs.io/docs/en/configuration.html#testmatch-array-string). [react-docgen](https://github.com/reactjs/react-docgen#cli) ignores `node_modules`, `__tests__` and `__mocks__` by default, so there is definitely a precedent.\r\n- add a `--ignore` argument to `dash-generate-components` that allows passing file globs to be excluded. \r\n\r\nVery happy to submit a PR if you decide on what the best course of action is.\n", "before_files": [{"content": "from __future__ import print_function\n\nimport json\nimport sys\nimport subprocess\nimport shlex\nimport os\nimport argparse\nimport shutil\n\nimport pkg_resources\n\nfrom ._py_components_generation import generate_class_file\nfrom ._py_components_generation import generate_imports\nfrom ._py_components_generation import generate_classes_files\n\n\nclass _CombinedFormatter(argparse.ArgumentDefaultsHelpFormatter,\n argparse.RawDescriptionHelpFormatter):\n pass\n\n\n# pylint: disable=too-many-locals\ndef generate_components(components_source, project_shortname,\n package_info_filename='package.json'):\n is_windows = sys.platform == 'win32'\n\n extract_path = pkg_resources.resource_filename('dash', 'extract-meta.js')\n\n os.environ['NODE_PATH'] = 'node_modules'\n cmd = shlex.split('node {} {}'.format(extract_path, components_source),\n posix=not is_windows)\n\n shutil.copyfile('package.json',\n os.path.join(project_shortname, package_info_filename))\n\n proc = subprocess.Popen(cmd,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n shell=is_windows)\n out, err = proc.communicate()\n status = proc.poll()\n\n if err:\n print(err.decode(), file=sys.stderr)\n\n if not out:\n print(\n 'Error generating metadata in {} (status={})'.format(\n project_shortname, status),\n file=sys.stderr)\n sys.exit(1)\n\n metadata = json.loads(out.decode())\n\n components = generate_classes_files(\n project_shortname,\n metadata,\n generate_class_file\n )\n\n with open(os.path.join(project_shortname, 'metadata.json'), 'w') as f:\n json.dump(metadata, f)\n\n generate_imports(project_shortname, components)\n\n\ndef cli():\n parser = argparse.ArgumentParser(\n prog='dash-generate-components',\n formatter_class=_CombinedFormatter,\n description='Generate dash components by extracting the metadata '\n 'using react-docgen. Then map the metadata to python classes.'\n )\n parser.add_argument('components_source',\n help='React components source directory.')\n parser.add_argument(\n 'project_shortname',\n help='Name of the project to export the classes files.'\n )\n parser.add_argument(\n '-p', '--package-info-filename',\n default='package.json',\n help='The filename of the copied `package.json` to `project_shortname`'\n )\n\n args = parser.parse_args()\n generate_components(args.components_source, args.project_shortname,\n package_info_filename=args.package_info_filename)\n\n\nif __name__ == '__main__':\n cli()\n", "path": "dash/development/component_generator.py"}, {"content": "__version__ = '0.33.0'\n", "path": "dash/version.py"}], "after_files": [{"content": "from __future__ import print_function\n\nimport json\nimport sys\nimport subprocess\nimport shlex\nimport os\nimport argparse\nimport shutil\n\nimport pkg_resources\n\nfrom ._py_components_generation import generate_class_file\nfrom ._py_components_generation import generate_imports\nfrom ._py_components_generation import generate_classes_files\n\n\nclass _CombinedFormatter(argparse.ArgumentDefaultsHelpFormatter,\n argparse.RawDescriptionHelpFormatter):\n pass\n\n\n# pylint: disable=too-many-locals\ndef generate_components(components_source, project_shortname,\n package_info_filename='package.json',\n ignore='^_'):\n is_windows = sys.platform == 'win32'\n\n extract_path = pkg_resources.resource_filename('dash', 'extract-meta.js')\n\n os.environ['NODE_PATH'] = 'node_modules'\n cmd = shlex.split(\n 'node {} {} {}'.format(extract_path, ignore, components_source),\n posix=not is_windows\n )\n\n shutil.copyfile('package.json',\n os.path.join(project_shortname, package_info_filename))\n\n proc = subprocess.Popen(cmd,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n shell=is_windows)\n out, err = proc.communicate()\n status = proc.poll()\n\n if err:\n print(err.decode(), file=sys.stderr)\n\n if not out:\n print(\n 'Error generating metadata in {} (status={})'.format(\n project_shortname, status),\n file=sys.stderr)\n sys.exit(1)\n\n metadata = json.loads(out.decode())\n\n components = generate_classes_files(\n project_shortname,\n metadata,\n generate_class_file\n )\n\n with open(os.path.join(project_shortname, 'metadata.json'), 'w') as f:\n json.dump(metadata, f)\n\n generate_imports(project_shortname, components)\n\n\ndef cli():\n parser = argparse.ArgumentParser(\n prog='dash-generate-components',\n formatter_class=_CombinedFormatter,\n description='Generate dash components by extracting the metadata '\n 'using react-docgen. Then map the metadata to python classes.'\n )\n parser.add_argument('components_source',\n help='React components source directory.')\n parser.add_argument(\n 'project_shortname',\n help='Name of the project to export the classes files.'\n )\n parser.add_argument(\n '-p', '--package-info-filename',\n default='package.json',\n help='The filename of the copied `package.json` to `project_shortname`'\n )\n parser.add_argument(\n '-i', '--ignore',\n default='^_',\n help='Files/directories matching the pattern will be ignored'\n )\n\n args = parser.parse_args()\n generate_components(\n args.components_source, args.project_shortname,\n package_info_filename=args.package_info_filename,\n ignore=args.ignore\n )\n\n\nif __name__ == '__main__':\n cli()\n", "path": "dash/development/component_generator.py"}, {"content": "__version__ = '0.34.0'\n", "path": "dash/version.py"}]}
1,677
441
gh_patches_debug_22410
rasdani/github-patches
git_diff
ESMCI__cime-2298
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- query_config --component not working for E3SM output from scripts_regression_tests: ``` ====================================================================== ERROR: test_query_components (__main__.X_TestQueryConfig) ---------------------------------------------------------------------- Traceback (most recent call last): File "./scripts_regression_tests.py", line 1452, in test_query_components run_cmd_no_fail("{}/query_config --components".format(SCRIPT_DIR)) File "/lcrc/group/earthscience/jacob/cimeESMCI2/scripts/tests/../lib/CIME/utils.py", line 424, in run_cmd_no_fail expect(False, "Command: '{}' failed with error '{}' from dir '{}'".format(cmd, errput.encode('utf-8'), os.getcwd() if from_dir is None else from_dir)) File "/lcrc/group/earthscience/jacob/cimeESMCI2/scripts/tests/../lib/CIME/utils.py", line 112, in expect raise exc_type(msg) SystemExit: ERROR: Command: '/lcrc/group/earthscience/jacob/cimeESMCI2/scripts/query_config --components' failed with error 'ERROR: Expected one child' from dir '/lcrc/group/earthscience/jacob/cimeESMCI2/scripts/tests' ``` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `scripts/lib/CIME/XML/files.py` Content: ``` 1 """ 2 Interface to the config_files.xml file. This class inherits from EntryID.py 3 """ 4 import re 5 from CIME.XML.standard_module_setup import * 6 7 from CIME.XML.entry_id import EntryID 8 from CIME.utils import expect, get_cime_root, get_model 9 10 logger = logging.getLogger(__name__) 11 12 class Files(EntryID): 13 14 def __init__(self): 15 """ 16 initialize an object 17 18 >>> files = Files() 19 >>> files.get_value('CASEFILE_HEADERS',resolved=False) 20 '$CIMEROOT/config/config_headers.xml' 21 """ 22 cimeroot = get_cime_root() 23 infile = os.path.join(cimeroot, "config", get_model(), "config_files.xml") 24 expect(os.path.isfile(infile), "Could not find or open file {}".format(infile)) 25 schema = os.path.join(cimeroot, "config", "xml_schemas", "entry_id.xsd") 26 EntryID.__init__(self, infile, schema=schema) 27 config_files_override = os.path.join(os.path.dirname(cimeroot),".config_files.xml") 28 # variables COMP_ROOT_DIR_{} are mutable, all other variables are read only 29 self.COMP_ROOT_DIR = {} 30 31 # .config_file.xml at the top level may overwrite COMP_ROOT_DIR_ nodes in config_files 32 33 if os.path.isfile(config_files_override): 34 self.read(config_files_override) 35 self.overwrite_existing_entries() 36 37 def get_value(self, vid, attribute=None, resolved=True, subgroup=None): 38 if "COMP_ROOT_DIR" in vid: 39 if vid in self.COMP_ROOT_DIR: 40 if attribute is not None: 41 if vid+attribute["component"] in self.COMP_ROOT_DIR: 42 return self.COMP_ROOT_DIR[vid+attribute["component"]] 43 else: 44 return self.COMP_ROOT_DIR[vid] 45 46 value = super(Files, self).get_value(vid, attribute=attribute, resolved=False, subgroup=subgroup) 47 if value is None and attribute is not None: 48 value = super(Files, self).get_value(vid, attribute=None, resolved=False, subgroup=subgroup) 49 50 if "COMP_ROOT_DIR" not in vid and value is not None and "COMP_ROOT_DIR" in value: 51 m = re.search("(COMP_ROOT_DIR_[^/]+)/", value) 52 comp_root_dir_var_name = m.group(1) 53 comp_root_dir = self.get_value(comp_root_dir_var_name, attribute=attribute, resolved=False, subgroup=subgroup) 54 self.set_value(comp_root_dir_var_name, comp_root_dir,subgroup=attribute) 55 if resolved: 56 value = value.replace("$"+comp_root_dir_var_name, comp_root_dir) 57 58 59 if resolved and value is not None: 60 value = self.get_resolved_value(value) 61 62 return value 63 64 def set_value(self, vid, value,subgroup=None,ignore_type=False): 65 if "COMP_ROOT_DIR" in vid: 66 if subgroup is not None: 67 self.COMP_ROOT_DIR[vid+subgroup["component"]] = value 68 else: 69 self.COMP_ROOT_DIR[vid] = value 70 71 else: 72 expect(False, "Attempt to set a nonmutable variable {}".format(vid)) 73 return value 74 75 76 def get_schema(self, nodename, attributes=None): 77 node = self.get_optional_child("entry", {"id":nodename}) 78 schemanode = self.get_optional_child("schema", root=node, attributes=attributes) 79 if schemanode is not None: 80 logger.debug("Found schema for {}".format(nodename)) 81 return self.get_resolved_value(self.text(schemanode)) 82 return None 83 84 def get_components(self, nodename): 85 node = self.get_child("entry", {"id":nodename}) 86 valnodes = self.get_children("value", root=self.get_child("values", root=node)) 87 values = [] 88 for valnode in valnodes: 89 value = self.get(valnode, "component") 90 values.append(value) 91 return values 92 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/scripts/lib/CIME/XML/files.py b/scripts/lib/CIME/XML/files.py --- a/scripts/lib/CIME/XML/files.py +++ b/scripts/lib/CIME/XML/files.py @@ -67,7 +67,7 @@ self.COMP_ROOT_DIR[vid+subgroup["component"]] = value else: self.COMP_ROOT_DIR[vid] = value - + else: expect(False, "Attempt to set a nonmutable variable {}".format(vid)) return value @@ -82,10 +82,13 @@ return None def get_components(self, nodename): - node = self.get_child("entry", {"id":nodename}) - valnodes = self.get_children("value", root=self.get_child("values", root=node)) - values = [] - for valnode in valnodes: - value = self.get(valnode, "component") - values.append(value) - return values + node = self.get_optional_child("entry", {"id":nodename}) + if node is not None: + valnodes = self.get_children("value", root=self.get_child("values", root=node)) + values = [] + for valnode in valnodes: + value = self.get(valnode, "component") + values.append(value) + return values + + return None
{"golden_diff": "diff --git a/scripts/lib/CIME/XML/files.py b/scripts/lib/CIME/XML/files.py\n--- a/scripts/lib/CIME/XML/files.py\n+++ b/scripts/lib/CIME/XML/files.py\n@@ -67,7 +67,7 @@\n self.COMP_ROOT_DIR[vid+subgroup[\"component\"]] = value\n else:\n self.COMP_ROOT_DIR[vid] = value\n- \n+\n else:\n expect(False, \"Attempt to set a nonmutable variable {}\".format(vid))\n return value\n@@ -82,10 +82,13 @@\n return None\n \n def get_components(self, nodename):\n- node = self.get_child(\"entry\", {\"id\":nodename})\n- valnodes = self.get_children(\"value\", root=self.get_child(\"values\", root=node))\n- values = []\n- for valnode in valnodes:\n- value = self.get(valnode, \"component\")\n- values.append(value)\n- return values\n+ node = self.get_optional_child(\"entry\", {\"id\":nodename})\n+ if node is not None:\n+ valnodes = self.get_children(\"value\", root=self.get_child(\"values\", root=node))\n+ values = []\n+ for valnode in valnodes:\n+ value = self.get(valnode, \"component\")\n+ values.append(value)\n+ return values\n+\n+ return None\n", "issue": "query_config --component not working for E3SM\noutput from scripts_regression_tests:\r\n```\r\n======================================================================\r\nERROR: test_query_components (__main__.X_TestQueryConfig)\r\n----------------------------------------------------------------------\r\nTraceback (most recent call last):\r\n File \"./scripts_regression_tests.py\", line 1452, in test_query_components\r\n run_cmd_no_fail(\"{}/query_config --components\".format(SCRIPT_DIR))\r\n File \"/lcrc/group/earthscience/jacob/cimeESMCI2/scripts/tests/../lib/CIME/utils.py\", line 424, in run_cmd_no_fail\r\n expect(False, \"Command: '{}' failed with error '{}' from dir '{}'\".format(cmd, errput.encode('utf-8'), os.getcwd() if from_dir is None else from_dir))\r\n File \"/lcrc/group/earthscience/jacob/cimeESMCI2/scripts/tests/../lib/CIME/utils.py\", line 112, in expect\r\n raise exc_type(msg)\r\nSystemExit: ERROR: Command: '/lcrc/group/earthscience/jacob/cimeESMCI2/scripts/query_config --components' failed with error 'ERROR: Expected one child' from dir '/lcrc/group/earthscience/jacob/cimeESMCI2/scripts/tests'\r\n```\r\n\r\n\n", "before_files": [{"content": "\"\"\"\nInterface to the config_files.xml file. This class inherits from EntryID.py\n\"\"\"\nimport re\nfrom CIME.XML.standard_module_setup import *\n\nfrom CIME.XML.entry_id import EntryID\nfrom CIME.utils import expect, get_cime_root, get_model\n\nlogger = logging.getLogger(__name__)\n\nclass Files(EntryID):\n\n def __init__(self):\n \"\"\"\n initialize an object\n\n >>> files = Files()\n >>> files.get_value('CASEFILE_HEADERS',resolved=False)\n '$CIMEROOT/config/config_headers.xml'\n \"\"\"\n cimeroot = get_cime_root()\n infile = os.path.join(cimeroot, \"config\", get_model(), \"config_files.xml\")\n expect(os.path.isfile(infile), \"Could not find or open file {}\".format(infile))\n schema = os.path.join(cimeroot, \"config\", \"xml_schemas\", \"entry_id.xsd\")\n EntryID.__init__(self, infile, schema=schema)\n config_files_override = os.path.join(os.path.dirname(cimeroot),\".config_files.xml\")\n # variables COMP_ROOT_DIR_{} are mutable, all other variables are read only\n self.COMP_ROOT_DIR = {}\n\n # .config_file.xml at the top level may overwrite COMP_ROOT_DIR_ nodes in config_files\n\n if os.path.isfile(config_files_override):\n self.read(config_files_override)\n self.overwrite_existing_entries()\n\n def get_value(self, vid, attribute=None, resolved=True, subgroup=None):\n if \"COMP_ROOT_DIR\" in vid:\n if vid in self.COMP_ROOT_DIR:\n if attribute is not None:\n if vid+attribute[\"component\"] in self.COMP_ROOT_DIR:\n return self.COMP_ROOT_DIR[vid+attribute[\"component\"]]\n else:\n return self.COMP_ROOT_DIR[vid]\n\n value = super(Files, self).get_value(vid, attribute=attribute, resolved=False, subgroup=subgroup)\n if value is None and attribute is not None:\n value = super(Files, self).get_value(vid, attribute=None, resolved=False, subgroup=subgroup)\n\n if \"COMP_ROOT_DIR\" not in vid and value is not None and \"COMP_ROOT_DIR\" in value:\n m = re.search(\"(COMP_ROOT_DIR_[^/]+)/\", value)\n comp_root_dir_var_name = m.group(1)\n comp_root_dir = self.get_value(comp_root_dir_var_name, attribute=attribute, resolved=False, subgroup=subgroup)\n self.set_value(comp_root_dir_var_name, comp_root_dir,subgroup=attribute)\n if resolved:\n value = value.replace(\"$\"+comp_root_dir_var_name, comp_root_dir)\n\n\n if resolved and value is not None:\n value = self.get_resolved_value(value)\n\n return value\n\n def set_value(self, vid, value,subgroup=None,ignore_type=False):\n if \"COMP_ROOT_DIR\" in vid:\n if subgroup is not None:\n self.COMP_ROOT_DIR[vid+subgroup[\"component\"]] = value\n else:\n self.COMP_ROOT_DIR[vid] = value\n \n else:\n expect(False, \"Attempt to set a nonmutable variable {}\".format(vid))\n return value\n\n\n def get_schema(self, nodename, attributes=None):\n node = self.get_optional_child(\"entry\", {\"id\":nodename})\n schemanode = self.get_optional_child(\"schema\", root=node, attributes=attributes)\n if schemanode is not None:\n logger.debug(\"Found schema for {}\".format(nodename))\n return self.get_resolved_value(self.text(schemanode))\n return None\n\n def get_components(self, nodename):\n node = self.get_child(\"entry\", {\"id\":nodename})\n valnodes = self.get_children(\"value\", root=self.get_child(\"values\", root=node))\n values = []\n for valnode in valnodes:\n value = self.get(valnode, \"component\")\n values.append(value)\n return values\n", "path": "scripts/lib/CIME/XML/files.py"}], "after_files": [{"content": "\"\"\"\nInterface to the config_files.xml file. This class inherits from EntryID.py\n\"\"\"\nimport re\nfrom CIME.XML.standard_module_setup import *\n\nfrom CIME.XML.entry_id import EntryID\nfrom CIME.utils import expect, get_cime_root, get_model\n\nlogger = logging.getLogger(__name__)\n\nclass Files(EntryID):\n\n def __init__(self):\n \"\"\"\n initialize an object\n\n >>> files = Files()\n >>> files.get_value('CASEFILE_HEADERS',resolved=False)\n '$CIMEROOT/config/config_headers.xml'\n \"\"\"\n cimeroot = get_cime_root()\n infile = os.path.join(cimeroot, \"config\", get_model(), \"config_files.xml\")\n expect(os.path.isfile(infile), \"Could not find or open file {}\".format(infile))\n schema = os.path.join(cimeroot, \"config\", \"xml_schemas\", \"entry_id.xsd\")\n EntryID.__init__(self, infile, schema=schema)\n config_files_override = os.path.join(os.path.dirname(cimeroot),\".config_files.xml\")\n # variables COMP_ROOT_DIR_{} are mutable, all other variables are read only\n self.COMP_ROOT_DIR = {}\n\n # .config_file.xml at the top level may overwrite COMP_ROOT_DIR_ nodes in config_files\n\n if os.path.isfile(config_files_override):\n self.read(config_files_override)\n self.overwrite_existing_entries()\n\n def get_value(self, vid, attribute=None, resolved=True, subgroup=None):\n if \"COMP_ROOT_DIR\" in vid:\n if vid in self.COMP_ROOT_DIR:\n if attribute is not None:\n if vid+attribute[\"component\"] in self.COMP_ROOT_DIR:\n return self.COMP_ROOT_DIR[vid+attribute[\"component\"]]\n else:\n return self.COMP_ROOT_DIR[vid]\n\n value = super(Files, self).get_value(vid, attribute=attribute, resolved=False, subgroup=subgroup)\n if value is None and attribute is not None:\n value = super(Files, self).get_value(vid, attribute=None, resolved=False, subgroup=subgroup)\n\n if \"COMP_ROOT_DIR\" not in vid and value is not None and \"COMP_ROOT_DIR\" in value:\n m = re.search(\"(COMP_ROOT_DIR_[^/]+)/\", value)\n comp_root_dir_var_name = m.group(1)\n comp_root_dir = self.get_value(comp_root_dir_var_name, attribute=attribute, resolved=False, subgroup=subgroup)\n self.set_value(comp_root_dir_var_name, comp_root_dir,subgroup=attribute)\n if resolved:\n value = value.replace(\"$\"+comp_root_dir_var_name, comp_root_dir)\n\n\n if resolved and value is not None:\n value = self.get_resolved_value(value)\n\n return value\n\n def set_value(self, vid, value,subgroup=None,ignore_type=False):\n if \"COMP_ROOT_DIR\" in vid:\n if subgroup is not None:\n self.COMP_ROOT_DIR[vid+subgroup[\"component\"]] = value\n else:\n self.COMP_ROOT_DIR[vid] = value\n\n else:\n expect(False, \"Attempt to set a nonmutable variable {}\".format(vid))\n return value\n\n\n def get_schema(self, nodename, attributes=None):\n node = self.get_optional_child(\"entry\", {\"id\":nodename})\n schemanode = self.get_optional_child(\"schema\", root=node, attributes=attributes)\n if schemanode is not None:\n logger.debug(\"Found schema for {}\".format(nodename))\n return self.get_resolved_value(self.text(schemanode))\n return None\n\n def get_components(self, nodename):\n node = self.get_optional_child(\"entry\", {\"id\":nodename})\n if node is not None:\n valnodes = self.get_children(\"value\", root=self.get_child(\"values\", root=node))\n values = []\n for valnode in valnodes:\n value = self.get(valnode, \"component\")\n values.append(value)\n return values\n\n return None\n", "path": "scripts/lib/CIME/XML/files.py"}]}
1,541
299
gh_patches_debug_11870
rasdani/github-patches
git_diff
vllm-project__vllm-5077
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Remove EOS token before passing the tokenized input to model How to remove eos token id before passing the input tokens to model. I'm trying for fine-tuned mistral model. Just because there is an eos token id at the end of sentence, model generates the results for a different input which is similar to original input --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `vllm/engine/output_processor/stop_checker.py` Content: ``` 1 from typing import Callable, Optional 2 3 from transformers import PreTrainedTokenizer 4 5 from vllm.lora.request import LoRARequest 6 from vllm.sampling_params import SamplingParams 7 from vllm.sequence import Sequence, SequenceStatus 8 9 10 class StopChecker: 11 """LLMEngine helper class which separates out the logic involving stop 12 checking. This checks things such as: whether the eos token was emitted, 13 whether the max_tokens has been consumed, whether a stop string has been 14 emitted, or if we have exceeded the max model len. 15 """ 16 17 def __init__(self, max_model_len: int, 18 get_tokenizer_for_seq: Callable[[Sequence], 19 PreTrainedTokenizer]): 20 # Do not use it directly, but use `self._get_max_model_len`. 21 self._max_model_len = max_model_len 22 self.get_tokenizer_for_seq = get_tokenizer_for_seq 23 24 def _get_max_model_len(self, lora_req: Optional[LoRARequest]): 25 if lora_req and lora_req.long_lora_max_len: 26 return lora_req.long_lora_max_len 27 else: 28 return self._max_model_len 29 30 def maybe_stop_sequence( 31 self, 32 seq: Sequence, 33 new_char_count: int, 34 sampling_params: SamplingParams, 35 lora_req: Optional[LoRARequest] = None, 36 ) -> None: 37 """Stop the finished sequences. 38 39 new_char_count is the number of chars added to the 40 sequence's output text for the newly generated token 41 """ 42 43 # Check if the minimum number of tokens has been generated yet; 44 # skip the stop string/token checks if not 45 if seq.get_output_len() < sampling_params.min_tokens: 46 return 47 48 # Check if the sequence has generated the EOS token. 49 if ((not sampling_params.ignore_eos) 50 and seq.get_last_token_id() == seq.eos_token_id): 51 seq.status = SequenceStatus.FINISHED_STOPPED 52 return 53 54 # Check if a stop token was encountered. 55 # This assumes a single token produced per step. 56 last_token_id = seq.get_last_token_id() 57 if last_token_id in sampling_params.stop_token_ids: 58 if new_char_count and ( 59 not sampling_params.include_stop_str_in_output): 60 # Remove last token 61 seq.output_text = seq.output_text[:-new_char_count] 62 seq.status = SequenceStatus.FINISHED_STOPPED 63 seq.stop_reason = last_token_id 64 return 65 66 # Check if any stop strings are matched. 67 stop_str = self._check_stop_strings(seq, new_char_count, 68 sampling_params) 69 if stop_str is not None: 70 seq.status = SequenceStatus.FINISHED_STOPPED 71 seq.stop_reason = stop_str 72 return 73 74 # Check if the sequence has reached max_model_len. 75 if seq.get_len() > self._get_max_model_len(lora_req): 76 seq.status = SequenceStatus.FINISHED_LENGTH_CAPPED 77 return 78 79 # Check if the sequence has reached max_tokens. 80 if seq.get_output_len() == sampling_params.max_tokens: 81 seq.status = SequenceStatus.FINISHED_LENGTH_CAPPED 82 return 83 84 @staticmethod 85 def _check_stop_strings(seq: Sequence, new_char_count: int, 86 sampling_params: SamplingParams) -> Optional[str]: 87 """Check if any stop strings are matched and truncate sequence 88 output text accordingly. 89 90 Returns the stop string if matched or else None. 91 """ 92 if not new_char_count: 93 return None 94 95 for stop_str in sampling_params.stop: 96 stop_string_len = len(stop_str) 97 # Avoid searching already-searched text. 98 stop_index = seq.output_text.find( 99 stop_str, -new_char_count - stop_string_len) 100 if stop_index == -1: 101 continue 102 103 if sampling_params.include_stop_str_in_output: 104 # Truncate to end of stop string. 105 stop_index += stop_string_len 106 if stop_index >= len(seq.output_text): 107 # No truncation required. 108 return stop_str 109 110 # Truncate the output text to either the beginning 111 # or end of the stop string. 112 seq.output_text = seq.output_text[:stop_index] 113 return stop_str 114 return None 115 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/vllm/engine/output_processor/stop_checker.py b/vllm/engine/output_processor/stop_checker.py --- a/vllm/engine/output_processor/stop_checker.py +++ b/vllm/engine/output_processor/stop_checker.py @@ -48,6 +48,11 @@ # Check if the sequence has generated the EOS token. if ((not sampling_params.ignore_eos) and seq.get_last_token_id() == seq.eos_token_id): + # Remove the last EOS token unless explicitly specified + # This prevents unintended exposure of the EOS token + if new_char_count and ( + not sampling_params.include_stop_str_in_output): + seq.output_text = seq.output_text[:-new_char_count] seq.status = SequenceStatus.FINISHED_STOPPED return
{"golden_diff": "diff --git a/vllm/engine/output_processor/stop_checker.py b/vllm/engine/output_processor/stop_checker.py\n--- a/vllm/engine/output_processor/stop_checker.py\n+++ b/vllm/engine/output_processor/stop_checker.py\n@@ -48,6 +48,11 @@\n # Check if the sequence has generated the EOS token.\n if ((not sampling_params.ignore_eos)\n and seq.get_last_token_id() == seq.eos_token_id):\n+ # Remove the last EOS token unless explicitly specified\n+ # This prevents unintended exposure of the EOS token\n+ if new_char_count and (\n+ not sampling_params.include_stop_str_in_output):\n+ seq.output_text = seq.output_text[:-new_char_count]\n seq.status = SequenceStatus.FINISHED_STOPPED\n return\n", "issue": "Remove EOS token before passing the tokenized input to model\n\r\n\r\nHow to remove eos token id before passing the input tokens to model. I'm trying for fine-tuned mistral model. Just because there is an eos token id at the end of sentence, model generates the results for a different input which is similar to original input\n", "before_files": [{"content": "from typing import Callable, Optional\n\nfrom transformers import PreTrainedTokenizer\n\nfrom vllm.lora.request import LoRARequest\nfrom vllm.sampling_params import SamplingParams\nfrom vllm.sequence import Sequence, SequenceStatus\n\n\nclass StopChecker:\n \"\"\"LLMEngine helper class which separates out the logic involving stop\n checking. This checks things such as: whether the eos token was emitted,\n whether the max_tokens has been consumed, whether a stop string has been\n emitted, or if we have exceeded the max model len.\n \"\"\"\n\n def __init__(self, max_model_len: int,\n get_tokenizer_for_seq: Callable[[Sequence],\n PreTrainedTokenizer]):\n # Do not use it directly, but use `self._get_max_model_len`.\n self._max_model_len = max_model_len\n self.get_tokenizer_for_seq = get_tokenizer_for_seq\n\n def _get_max_model_len(self, lora_req: Optional[LoRARequest]):\n if lora_req and lora_req.long_lora_max_len:\n return lora_req.long_lora_max_len\n else:\n return self._max_model_len\n\n def maybe_stop_sequence(\n self,\n seq: Sequence,\n new_char_count: int,\n sampling_params: SamplingParams,\n lora_req: Optional[LoRARequest] = None,\n ) -> None:\n \"\"\"Stop the finished sequences.\n\n new_char_count is the number of chars added to the\n sequence's output text for the newly generated token\n \"\"\"\n\n # Check if the minimum number of tokens has been generated yet;\n # skip the stop string/token checks if not\n if seq.get_output_len() < sampling_params.min_tokens:\n return\n\n # Check if the sequence has generated the EOS token.\n if ((not sampling_params.ignore_eos)\n and seq.get_last_token_id() == seq.eos_token_id):\n seq.status = SequenceStatus.FINISHED_STOPPED\n return\n\n # Check if a stop token was encountered.\n # This assumes a single token produced per step.\n last_token_id = seq.get_last_token_id()\n if last_token_id in sampling_params.stop_token_ids:\n if new_char_count and (\n not sampling_params.include_stop_str_in_output):\n # Remove last token\n seq.output_text = seq.output_text[:-new_char_count]\n seq.status = SequenceStatus.FINISHED_STOPPED\n seq.stop_reason = last_token_id\n return\n\n # Check if any stop strings are matched.\n stop_str = self._check_stop_strings(seq, new_char_count,\n sampling_params)\n if stop_str is not None:\n seq.status = SequenceStatus.FINISHED_STOPPED\n seq.stop_reason = stop_str\n return\n\n # Check if the sequence has reached max_model_len.\n if seq.get_len() > self._get_max_model_len(lora_req):\n seq.status = SequenceStatus.FINISHED_LENGTH_CAPPED\n return\n\n # Check if the sequence has reached max_tokens.\n if seq.get_output_len() == sampling_params.max_tokens:\n seq.status = SequenceStatus.FINISHED_LENGTH_CAPPED\n return\n\n @staticmethod\n def _check_stop_strings(seq: Sequence, new_char_count: int,\n sampling_params: SamplingParams) -> Optional[str]:\n \"\"\"Check if any stop strings are matched and truncate sequence\n output text accordingly.\n\n Returns the stop string if matched or else None.\n \"\"\"\n if not new_char_count:\n return None\n\n for stop_str in sampling_params.stop:\n stop_string_len = len(stop_str)\n # Avoid searching already-searched text.\n stop_index = seq.output_text.find(\n stop_str, -new_char_count - stop_string_len)\n if stop_index == -1:\n continue\n\n if sampling_params.include_stop_str_in_output:\n # Truncate to end of stop string.\n stop_index += stop_string_len\n if stop_index >= len(seq.output_text):\n # No truncation required.\n return stop_str\n\n # Truncate the output text to either the beginning\n # or end of the stop string.\n seq.output_text = seq.output_text[:stop_index]\n return stop_str\n return None\n", "path": "vllm/engine/output_processor/stop_checker.py"}], "after_files": [{"content": "from typing import Callable, Optional\n\nfrom transformers import PreTrainedTokenizer\n\nfrom vllm.lora.request import LoRARequest\nfrom vllm.sampling_params import SamplingParams\nfrom vllm.sequence import Sequence, SequenceStatus\n\n\nclass StopChecker:\n \"\"\"LLMEngine helper class which separates out the logic involving stop\n checking. This checks things such as: whether the eos token was emitted,\n whether the max_tokens has been consumed, whether a stop string has been\n emitted, or if we have exceeded the max model len.\n \"\"\"\n\n def __init__(self, max_model_len: int,\n get_tokenizer_for_seq: Callable[[Sequence],\n PreTrainedTokenizer]):\n # Do not use it directly, but use `self._get_max_model_len`.\n self._max_model_len = max_model_len\n self.get_tokenizer_for_seq = get_tokenizer_for_seq\n\n def _get_max_model_len(self, lora_req: Optional[LoRARequest]):\n if lora_req and lora_req.long_lora_max_len:\n return lora_req.long_lora_max_len\n else:\n return self._max_model_len\n\n def maybe_stop_sequence(\n self,\n seq: Sequence,\n new_char_count: int,\n sampling_params: SamplingParams,\n lora_req: Optional[LoRARequest] = None,\n ) -> None:\n \"\"\"Stop the finished sequences.\n\n new_char_count is the number of chars added to the\n sequence's output text for the newly generated token\n \"\"\"\n\n # Check if the minimum number of tokens has been generated yet;\n # skip the stop string/token checks if not\n if seq.get_output_len() < sampling_params.min_tokens:\n return\n\n # Check if the sequence has generated the EOS token.\n if ((not sampling_params.ignore_eos)\n and seq.get_last_token_id() == seq.eos_token_id):\n # Remove the last EOS token unless explicitly specified\n # This prevents unintended exposure of the EOS token\n if new_char_count and (\n not sampling_params.include_stop_str_in_output):\n seq.output_text = seq.output_text[:-new_char_count]\n seq.status = SequenceStatus.FINISHED_STOPPED\n return\n\n # Check if a stop token was encountered.\n # This assumes a single token produced per step.\n last_token_id = seq.get_last_token_id()\n if last_token_id in sampling_params.stop_token_ids:\n if new_char_count and (\n not sampling_params.include_stop_str_in_output):\n # Remove last token\n seq.output_text = seq.output_text[:-new_char_count]\n seq.status = SequenceStatus.FINISHED_STOPPED\n seq.stop_reason = last_token_id\n return\n\n # Check if any stop strings are matched.\n stop_str = self._check_stop_strings(seq, new_char_count,\n sampling_params)\n if stop_str is not None:\n seq.status = SequenceStatus.FINISHED_STOPPED\n seq.stop_reason = stop_str\n return\n\n # Check if the sequence has reached max_model_len.\n if seq.get_len() > self._get_max_model_len(lora_req):\n seq.status = SequenceStatus.FINISHED_LENGTH_CAPPED\n return\n\n # Check if the sequence has reached max_tokens.\n if seq.get_output_len() == sampling_params.max_tokens:\n seq.status = SequenceStatus.FINISHED_LENGTH_CAPPED\n return\n\n @staticmethod\n def _check_stop_strings(seq: Sequence, new_char_count: int,\n sampling_params: SamplingParams) -> Optional[str]:\n \"\"\"Check if any stop strings are matched and truncate sequence\n output text accordingly.\n\n Returns the stop string if matched or else None.\n \"\"\"\n if not new_char_count:\n return None\n\n for stop_str in sampling_params.stop:\n stop_string_len = len(stop_str)\n # Avoid searching already-searched text.\n stop_index = seq.output_text.find(\n stop_str, -new_char_count - stop_string_len)\n if stop_index == -1:\n continue\n\n if sampling_params.include_stop_str_in_output:\n # Truncate to end of stop string.\n stop_index += stop_string_len\n if stop_index >= len(seq.output_text):\n # No truncation required.\n return stop_str\n\n # Truncate the output text to either the beginning\n # or end of the stop string.\n seq.output_text = seq.output_text[:stop_index]\n return stop_str\n return None\n", "path": "vllm/engine/output_processor/stop_checker.py"}]}
1,478
174
gh_patches_debug_19928
rasdani/github-patches
git_diff
benoitc__gunicorn-900
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- proxy protocol info lost after first keep-alive request env: - python v2.7 - gunicorn v19.1.1 - gevent v1.0.1 worker type - keep-alive proxy-protcol'd lb (aws elb) in front of gunicorn for multiple reqs during keep-alive session: - first parses `req.proxy_protocol_info` - subsequent reqs do not so workers see only proxy network info (`REMOTE_ADDR`, `REMOTE_PORT` ). probably should save off from 1st and propagate to later requests. proxy protocol info lost after first keep-alive request env: - python v2.7 - gunicorn v19.1.1 - gevent v1.0.1 worker type - keep-alive proxy-protcol'd lb (aws elb) in front of gunicorn for multiple reqs during keep-alive session: - first parses `req.proxy_protocol_info` - subsequent reqs do not so workers see only proxy network info (`REMOTE_ADDR`, `REMOTE_PORT` ). probably should save off from 1st and propagate to later requests. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `gunicorn/workers/async.py` Content: ``` 1 # -*- coding: utf-8 - 2 # 3 # This file is part of gunicorn released under the MIT license. 4 # See the NOTICE for more information. 5 6 from datetime import datetime 7 import errno 8 import socket 9 import ssl 10 import sys 11 12 import gunicorn.http as http 13 import gunicorn.http.wsgi as wsgi 14 import gunicorn.util as util 15 import gunicorn.workers.base as base 16 from gunicorn import six 17 18 ALREADY_HANDLED = object() 19 20 21 class AsyncWorker(base.Worker): 22 23 def __init__(self, *args, **kwargs): 24 super(AsyncWorker, self).__init__(*args, **kwargs) 25 self.worker_connections = self.cfg.worker_connections 26 27 def timeout_ctx(self): 28 raise NotImplementedError() 29 30 def handle(self, listener, client, addr): 31 req = None 32 try: 33 parser = http.RequestParser(self.cfg, client) 34 try: 35 listener_name = listener.getsockname() 36 if not self.cfg.keepalive: 37 req = six.next(parser) 38 self.handle_request(listener_name, req, client, addr) 39 else: 40 # keepalive loop 41 while True: 42 req = None 43 with self.timeout_ctx(): 44 req = six.next(parser) 45 if not req: 46 break 47 self.handle_request(listener_name, req, client, addr) 48 except http.errors.NoMoreData as e: 49 self.log.debug("Ignored premature client disconnection. %s", e) 50 except StopIteration as e: 51 self.log.debug("Closing connection. %s", e) 52 except ssl.SSLError: 53 exc_info = sys.exc_info() 54 # pass to next try-except level 55 six.reraise(exc_info[0], exc_info[1], exc_info[2]) 56 except socket.error: 57 exc_info = sys.exc_info() 58 # pass to next try-except level 59 six.reraise(exc_info[0], exc_info[1], exc_info[2]) 60 except Exception as e: 61 self.handle_error(req, client, addr, e) 62 except ssl.SSLError as e: 63 if e.args[0] == ssl.SSL_ERROR_EOF: 64 self.log.debug("ssl connection closed") 65 client.close() 66 else: 67 self.log.debug("Error processing SSL request.") 68 self.handle_error(req, client, addr, e) 69 except socket.error as e: 70 if e.args[0] not in (errno.EPIPE, errno.ECONNRESET): 71 self.log.exception("Socket error processing request.") 72 else: 73 if e.args[0] == errno.ECONNRESET: 74 self.log.debug("Ignoring connection reset") 75 else: 76 self.log.debug("Ignoring EPIPE") 77 except Exception as e: 78 self.handle_error(req, client, addr, e) 79 finally: 80 util.close(client) 81 82 def handle_request(self, listener_name, req, sock, addr): 83 request_start = datetime.now() 84 environ = {} 85 resp = None 86 try: 87 self.cfg.pre_request(self, req) 88 resp, environ = wsgi.create(req, sock, addr, 89 listener_name, self.cfg) 90 environ["wsgi.multithread"] = True 91 self.nr += 1 92 if self.alive and self.nr >= self.max_requests: 93 self.log.info("Autorestarting worker after current request.") 94 resp.force_close() 95 self.alive = False 96 97 if not self.cfg.keepalive: 98 resp.force_close() 99 100 respiter = self.wsgi(environ, resp.start_response) 101 if respiter == ALREADY_HANDLED: 102 return False 103 try: 104 if isinstance(respiter, environ['wsgi.file_wrapper']): 105 resp.write_file(respiter) 106 else: 107 for item in respiter: 108 resp.write(item) 109 resp.close() 110 request_time = datetime.now() - request_start 111 self.log.access(resp, req, environ, request_time) 112 finally: 113 if hasattr(respiter, "close"): 114 respiter.close() 115 if resp.should_close(): 116 raise StopIteration() 117 except StopIteration: 118 raise 119 except Exception: 120 if resp and resp.headers_sent: 121 # If the requests have already been sent, we should close the 122 # connection to indicate the error. 123 self.log.exception("Error handling request") 124 try: 125 sock.shutdown(socket.SHUT_RDWR) 126 sock.close() 127 except socket.error: 128 pass 129 raise StopIteration() 130 raise 131 finally: 132 try: 133 self.cfg.post_request(self, req, environ, resp) 134 except Exception: 135 self.log.exception("Exception in post_request hook") 136 return True 137 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/gunicorn/workers/async.py b/gunicorn/workers/async.py --- a/gunicorn/workers/async.py +++ b/gunicorn/workers/async.py @@ -38,12 +38,14 @@ self.handle_request(listener_name, req, client, addr) else: # keepalive loop + proxy_protocol_info = req.proxy_protocol_info while True: req = None with self.timeout_ctx(): req = six.next(parser) if not req: break + req.proxy_protocol_info = proxy_protocol_info self.handle_request(listener_name, req, client, addr) except http.errors.NoMoreData as e: self.log.debug("Ignored premature client disconnection. %s", e)
{"golden_diff": "diff --git a/gunicorn/workers/async.py b/gunicorn/workers/async.py\n--- a/gunicorn/workers/async.py\n+++ b/gunicorn/workers/async.py\n@@ -38,12 +38,14 @@\n self.handle_request(listener_name, req, client, addr)\n else:\n # keepalive loop\n+ proxy_protocol_info = req.proxy_protocol_info\n while True:\n req = None\n with self.timeout_ctx():\n req = six.next(parser)\n if not req:\n break\n+ req.proxy_protocol_info = proxy_protocol_info\n self.handle_request(listener_name, req, client, addr)\n except http.errors.NoMoreData as e:\n self.log.debug(\"Ignored premature client disconnection. %s\", e)\n", "issue": "proxy protocol info lost after first keep-alive request\nenv:\n- python v2.7\n- gunicorn v19.1.1\n- gevent v1.0.1 worker type\n- keep-alive proxy-protcol'd lb (aws elb) in front of gunicorn\n\nfor multiple reqs during keep-alive session:\n- first parses `req.proxy_protocol_info`\n- subsequent reqs do not\n so workers see only proxy network info (`REMOTE_ADDR`, `REMOTE_PORT` ).\n\nprobably should save off from 1st and propagate to later requests.\n\nproxy protocol info lost after first keep-alive request\nenv:\n- python v2.7\n- gunicorn v19.1.1\n- gevent v1.0.1 worker type\n- keep-alive proxy-protcol'd lb (aws elb) in front of gunicorn\n\nfor multiple reqs during keep-alive session:\n- first parses `req.proxy_protocol_info`\n- subsequent reqs do not\n so workers see only proxy network info (`REMOTE_ADDR`, `REMOTE_PORT` ).\n\nprobably should save off from 1st and propagate to later requests.\n\n", "before_files": [{"content": "# -*- coding: utf-8 -\n#\n# This file is part of gunicorn released under the MIT license.\n# See the NOTICE for more information.\n\nfrom datetime import datetime\nimport errno\nimport socket\nimport ssl\nimport sys\n\nimport gunicorn.http as http\nimport gunicorn.http.wsgi as wsgi\nimport gunicorn.util as util\nimport gunicorn.workers.base as base\nfrom gunicorn import six\n\nALREADY_HANDLED = object()\n\n\nclass AsyncWorker(base.Worker):\n\n def __init__(self, *args, **kwargs):\n super(AsyncWorker, self).__init__(*args, **kwargs)\n self.worker_connections = self.cfg.worker_connections\n\n def timeout_ctx(self):\n raise NotImplementedError()\n\n def handle(self, listener, client, addr):\n req = None\n try:\n parser = http.RequestParser(self.cfg, client)\n try:\n listener_name = listener.getsockname()\n if not self.cfg.keepalive:\n req = six.next(parser)\n self.handle_request(listener_name, req, client, addr)\n else:\n # keepalive loop\n while True:\n req = None\n with self.timeout_ctx():\n req = six.next(parser)\n if not req:\n break\n self.handle_request(listener_name, req, client, addr)\n except http.errors.NoMoreData as e:\n self.log.debug(\"Ignored premature client disconnection. %s\", e)\n except StopIteration as e:\n self.log.debug(\"Closing connection. %s\", e)\n except ssl.SSLError:\n exc_info = sys.exc_info()\n # pass to next try-except level\n six.reraise(exc_info[0], exc_info[1], exc_info[2])\n except socket.error:\n exc_info = sys.exc_info()\n # pass to next try-except level\n six.reraise(exc_info[0], exc_info[1], exc_info[2])\n except Exception as e:\n self.handle_error(req, client, addr, e)\n except ssl.SSLError as e:\n if e.args[0] == ssl.SSL_ERROR_EOF:\n self.log.debug(\"ssl connection closed\")\n client.close()\n else:\n self.log.debug(\"Error processing SSL request.\")\n self.handle_error(req, client, addr, e)\n except socket.error as e:\n if e.args[0] not in (errno.EPIPE, errno.ECONNRESET):\n self.log.exception(\"Socket error processing request.\")\n else:\n if e.args[0] == errno.ECONNRESET:\n self.log.debug(\"Ignoring connection reset\")\n else:\n self.log.debug(\"Ignoring EPIPE\")\n except Exception as e:\n self.handle_error(req, client, addr, e)\n finally:\n util.close(client)\n\n def handle_request(self, listener_name, req, sock, addr):\n request_start = datetime.now()\n environ = {}\n resp = None\n try:\n self.cfg.pre_request(self, req)\n resp, environ = wsgi.create(req, sock, addr,\n listener_name, self.cfg)\n environ[\"wsgi.multithread\"] = True\n self.nr += 1\n if self.alive and self.nr >= self.max_requests:\n self.log.info(\"Autorestarting worker after current request.\")\n resp.force_close()\n self.alive = False\n\n if not self.cfg.keepalive:\n resp.force_close()\n\n respiter = self.wsgi(environ, resp.start_response)\n if respiter == ALREADY_HANDLED:\n return False\n try:\n if isinstance(respiter, environ['wsgi.file_wrapper']):\n resp.write_file(respiter)\n else:\n for item in respiter:\n resp.write(item)\n resp.close()\n request_time = datetime.now() - request_start\n self.log.access(resp, req, environ, request_time)\n finally:\n if hasattr(respiter, \"close\"):\n respiter.close()\n if resp.should_close():\n raise StopIteration()\n except StopIteration:\n raise\n except Exception:\n if resp and resp.headers_sent:\n # If the requests have already been sent, we should close the\n # connection to indicate the error.\n self.log.exception(\"Error handling request\")\n try:\n sock.shutdown(socket.SHUT_RDWR)\n sock.close()\n except socket.error:\n pass\n raise StopIteration()\n raise\n finally:\n try:\n self.cfg.post_request(self, req, environ, resp)\n except Exception:\n self.log.exception(\"Exception in post_request hook\")\n return True\n", "path": "gunicorn/workers/async.py"}], "after_files": [{"content": "# -*- coding: utf-8 -\n#\n# This file is part of gunicorn released under the MIT license.\n# See the NOTICE for more information.\n\nfrom datetime import datetime\nimport errno\nimport socket\nimport ssl\nimport sys\n\nimport gunicorn.http as http\nimport gunicorn.http.wsgi as wsgi\nimport gunicorn.util as util\nimport gunicorn.workers.base as base\nfrom gunicorn import six\n\nALREADY_HANDLED = object()\n\n\nclass AsyncWorker(base.Worker):\n\n def __init__(self, *args, **kwargs):\n super(AsyncWorker, self).__init__(*args, **kwargs)\n self.worker_connections = self.cfg.worker_connections\n\n def timeout_ctx(self):\n raise NotImplementedError()\n\n def handle(self, listener, client, addr):\n req = None\n try:\n parser = http.RequestParser(self.cfg, client)\n try:\n listener_name = listener.getsockname()\n if not self.cfg.keepalive:\n req = six.next(parser)\n self.handle_request(listener_name, req, client, addr)\n else:\n # keepalive loop\n proxy_protocol_info = req.proxy_protocol_info\n while True:\n req = None\n with self.timeout_ctx():\n req = six.next(parser)\n if not req:\n break\n req.proxy_protocol_info = proxy_protocol_info\n self.handle_request(listener_name, req, client, addr)\n except http.errors.NoMoreData as e:\n self.log.debug(\"Ignored premature client disconnection. %s\", e)\n except StopIteration as e:\n self.log.debug(\"Closing connection. %s\", e)\n except ssl.SSLError:\n exc_info = sys.exc_info()\n # pass to next try-except level\n six.reraise(exc_info[0], exc_info[1], exc_info[2])\n except socket.error:\n exc_info = sys.exc_info()\n # pass to next try-except level\n six.reraise(exc_info[0], exc_info[1], exc_info[2])\n except Exception as e:\n self.handle_error(req, client, addr, e)\n except ssl.SSLError as e:\n if e.args[0] == ssl.SSL_ERROR_EOF:\n self.log.debug(\"ssl connection closed\")\n client.close()\n else:\n self.log.debug(\"Error processing SSL request.\")\n self.handle_error(req, client, addr, e)\n except socket.error as e:\n if e.args[0] not in (errno.EPIPE, errno.ECONNRESET):\n self.log.exception(\"Socket error processing request.\")\n else:\n if e.args[0] == errno.ECONNRESET:\n self.log.debug(\"Ignoring connection reset\")\n else:\n self.log.debug(\"Ignoring EPIPE\")\n except Exception as e:\n self.handle_error(req, client, addr, e)\n finally:\n util.close(client)\n\n def handle_request(self, listener_name, req, sock, addr):\n request_start = datetime.now()\n environ = {}\n resp = None\n try:\n self.cfg.pre_request(self, req)\n resp, environ = wsgi.create(req, sock, addr,\n listener_name, self.cfg)\n environ[\"wsgi.multithread\"] = True\n self.nr += 1\n if self.alive and self.nr >= self.max_requests:\n self.log.info(\"Autorestarting worker after current request.\")\n resp.force_close()\n self.alive = False\n\n if not self.cfg.keepalive:\n resp.force_close()\n\n respiter = self.wsgi(environ, resp.start_response)\n if respiter == ALREADY_HANDLED:\n return False\n try:\n if isinstance(respiter, environ['wsgi.file_wrapper']):\n resp.write_file(respiter)\n else:\n for item in respiter:\n resp.write(item)\n resp.close()\n request_time = datetime.now() - request_start\n self.log.access(resp, req, environ, request_time)\n finally:\n if hasattr(respiter, \"close\"):\n respiter.close()\n if resp.should_close():\n raise StopIteration()\n except StopIteration:\n raise\n except Exception:\n if resp and resp.headers_sent:\n # If the requests have already been sent, we should close the\n # connection to indicate the error.\n self.log.exception(\"Error handling request\")\n try:\n sock.shutdown(socket.SHUT_RDWR)\n sock.close()\n except socket.error:\n pass\n raise StopIteration()\n raise\n finally:\n try:\n self.cfg.post_request(self, req, environ, resp)\n except Exception:\n self.log.exception(\"Exception in post_request hook\")\n return True\n", "path": "gunicorn/workers/async.py"}]}
1,778
168
gh_patches_debug_32610
rasdani/github-patches
git_diff
conan-io__conan-center-index-3830
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- [package] libuuid/1.0.3: apple_clang 12 build broken by patch build of libuuid 1.0.3 is broken on macOS11 / apple-clang 12 due to applied patch the define HAVE_SYS_TIME_H seems to be not defined but the include is valid ``` +#if defined(HAVE_SYS_TIME_H) #include <sys/time.h> +#endif ``` this can be fixed by ``` +#if defined(HAVE_SYS_TIME_H) || defined(__APPLE__) #include <sys/time.h> +#endif ``` but I guess there is a better way to ensure the define is set for apple platform --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `recipes/libuuid/all/conanfile.py` Content: ``` 1 from conans import ConanFile, AutoToolsBuildEnvironment, tools 2 from conans.errors import ConanInvalidConfiguration 3 import os 4 5 6 class LibuuidConan(ConanFile): 7 name = "libuuid" 8 description = "Portable uuid C library" 9 url = "https://github.com/conan-io/conan-center-index" 10 homepage = "https://sourceforge.net/projects/libuuid/" 11 license = "BSD-3-Clause" 12 topics = ("conan", "libuuid", "uuid", "unique-id", "unique-identifier") 13 settings = "os", "arch", "compiler", "build_type" 14 exports_sources = "patches/**" 15 options = {"shared": [True, False], "fPIC": [True, False]} 16 default_options = {"shared": False, "fPIC": True} 17 _source_subfolder = "source_subfolder" 18 _autotools = None 19 20 def source(self): 21 tools.get(**self.conan_data["sources"][self.version]) 22 os.rename(self.name + "-" + self.version, self._source_subfolder) 23 24 def _patch_sources(self): 25 for patch in self.conan_data["patches"][self.version]: 26 tools.patch(**patch) 27 28 def config_options(self): 29 if self.settings.os == 'Windows': 30 del self.options.fPIC 31 32 def configure(self): 33 if self.settings.os == "Windows": 34 raise ConanInvalidConfiguration("libuuid is not supported on Windows") 35 del self.settings.compiler.libcxx 36 del self.settings.compiler.cppstd 37 38 def _configure_autotools(self): 39 if not self._autotools: 40 configure_args = [ 41 "--enable-shared=%s" % ("yes" if self.options.shared else "no"), 42 "--enable-static=%s" % ("no" if self.options.shared else "yes") 43 ] 44 self._autotools = AutoToolsBuildEnvironment(self) 45 if "x86" in self.settings.arch: 46 self._autotools.flags.append('-mstackrealign') 47 self._autotools.configure(args=configure_args) 48 return self._autotools 49 50 def build(self): 51 self._patch_sources() 52 with tools.chdir(self._source_subfolder): 53 autotools = self._configure_autotools() 54 autotools.make() 55 56 def package(self): 57 self.copy("COPYING", dst="licenses", src=self._source_subfolder) 58 with tools.chdir(self._source_subfolder): 59 autotools = self._configure_autotools() 60 autotools.install() 61 la_file = os.path.join(self.package_folder, "lib", "libuuid.la") 62 if os.path.isfile(la_file): 63 os.unlink(la_file) 64 tools.rmdir(os.path.join(self.package_folder, "lib", "pkgconfig")) 65 66 def package_info(self): 67 self.cpp_info.libs = tools.collect_libs(self) 68 self.cpp_info.includedirs.append(os.path.join("include", "uuid")) 69 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/recipes/libuuid/all/conanfile.py b/recipes/libuuid/all/conanfile.py --- a/recipes/libuuid/all/conanfile.py +++ b/recipes/libuuid/all/conanfile.py @@ -35,29 +35,34 @@ del self.settings.compiler.libcxx del self.settings.compiler.cppstd + def build_requirements(self): + self.build_requires("libtool/2.4.6") + def _configure_autotools(self): - if not self._autotools: - configure_args = [ - "--enable-shared=%s" % ("yes" if self.options.shared else "no"), - "--enable-static=%s" % ("no" if self.options.shared else "yes") - ] - self._autotools = AutoToolsBuildEnvironment(self) - if "x86" in self.settings.arch: - self._autotools.flags.append('-mstackrealign') - self._autotools.configure(args=configure_args) + if self._autotools: + return self._autotools + self._autotools = AutoToolsBuildEnvironment(self) + yes_no = lambda v: "yes" if v else "no" + configure_args = [ + "--enable-shared={}".format(yes_no(self.options.shared)), + "--enable-static={}".format(yes_no(not self.options.shared)), + ] + if "x86" in self.settings.arch: + self._autotools.flags.append('-mstackrealign') + self._autotools.configure(args=configure_args, configure_dir=self._source_subfolder) return self._autotools def build(self): self._patch_sources() with tools.chdir(self._source_subfolder): - autotools = self._configure_autotools() - autotools.make() + self.run("autoreconf -fiv", run_environment=True) + autotools = self._configure_autotools() + autotools.make() def package(self): self.copy("COPYING", dst="licenses", src=self._source_subfolder) - with tools.chdir(self._source_subfolder): - autotools = self._configure_autotools() - autotools.install() + autotools = self._configure_autotools() + autotools.install() la_file = os.path.join(self.package_folder, "lib", "libuuid.la") if os.path.isfile(la_file): os.unlink(la_file)
{"golden_diff": "diff --git a/recipes/libuuid/all/conanfile.py b/recipes/libuuid/all/conanfile.py\n--- a/recipes/libuuid/all/conanfile.py\n+++ b/recipes/libuuid/all/conanfile.py\n@@ -35,29 +35,34 @@\n del self.settings.compiler.libcxx\n del self.settings.compiler.cppstd\n \n+ def build_requirements(self):\n+ self.build_requires(\"libtool/2.4.6\")\n+\n def _configure_autotools(self):\n- if not self._autotools:\n- configure_args = [\n- \"--enable-shared=%s\" % (\"yes\" if self.options.shared else \"no\"),\n- \"--enable-static=%s\" % (\"no\" if self.options.shared else \"yes\")\n- ]\n- self._autotools = AutoToolsBuildEnvironment(self)\n- if \"x86\" in self.settings.arch:\n- self._autotools.flags.append('-mstackrealign')\n- self._autotools.configure(args=configure_args)\n+ if self._autotools:\n+ return self._autotools\n+ self._autotools = AutoToolsBuildEnvironment(self)\n+ yes_no = lambda v: \"yes\" if v else \"no\"\n+ configure_args = [\n+ \"--enable-shared={}\".format(yes_no(self.options.shared)),\n+ \"--enable-static={}\".format(yes_no(not self.options.shared)),\n+ ]\n+ if \"x86\" in self.settings.arch:\n+ self._autotools.flags.append('-mstackrealign')\n+ self._autotools.configure(args=configure_args, configure_dir=self._source_subfolder)\n return self._autotools\n \n def build(self):\n self._patch_sources()\n with tools.chdir(self._source_subfolder):\n- autotools = self._configure_autotools()\n- autotools.make()\n+ self.run(\"autoreconf -fiv\", run_environment=True)\n+ autotools = self._configure_autotools()\n+ autotools.make()\n \n def package(self):\n self.copy(\"COPYING\", dst=\"licenses\", src=self._source_subfolder)\n- with tools.chdir(self._source_subfolder):\n- autotools = self._configure_autotools()\n- autotools.install()\n+ autotools = self._configure_autotools()\n+ autotools.install()\n la_file = os.path.join(self.package_folder, \"lib\", \"libuuid.la\")\n if os.path.isfile(la_file):\n os.unlink(la_file)\n", "issue": "[package] libuuid/1.0.3: apple_clang 12 build broken by patch\nbuild of libuuid 1.0.3 is broken on macOS11 / apple-clang 12 due to applied patch\r\nthe define HAVE_SYS_TIME_H seems to be not defined but the include is valid\r\n\r\n```\r\n+#if defined(HAVE_SYS_TIME_H)\r\n #include <sys/time.h>\r\n+#endif\r\n```\r\n\r\nthis can be fixed by \r\n\r\n```\r\n+#if defined(HAVE_SYS_TIME_H) || defined(__APPLE__)\r\n #include <sys/time.h>\r\n+#endif\r\n```\r\n\r\nbut I guess there is a better way to ensure the define is set for apple platform\r\n\n", "before_files": [{"content": "from conans import ConanFile, AutoToolsBuildEnvironment, tools\nfrom conans.errors import ConanInvalidConfiguration\nimport os\n\n\nclass LibuuidConan(ConanFile):\n name = \"libuuid\"\n description = \"Portable uuid C library\"\n url = \"https://github.com/conan-io/conan-center-index\"\n homepage = \"https://sourceforge.net/projects/libuuid/\"\n license = \"BSD-3-Clause\"\n topics = (\"conan\", \"libuuid\", \"uuid\", \"unique-id\", \"unique-identifier\")\n settings = \"os\", \"arch\", \"compiler\", \"build_type\"\n exports_sources = \"patches/**\"\n options = {\"shared\": [True, False], \"fPIC\": [True, False]}\n default_options = {\"shared\": False, \"fPIC\": True}\n _source_subfolder = \"source_subfolder\"\n _autotools = None\n\n def source(self):\n tools.get(**self.conan_data[\"sources\"][self.version])\n os.rename(self.name + \"-\" + self.version, self._source_subfolder)\n\n def _patch_sources(self):\n for patch in self.conan_data[\"patches\"][self.version]:\n tools.patch(**patch)\n\n def config_options(self):\n if self.settings.os == 'Windows':\n del self.options.fPIC\n\n def configure(self):\n if self.settings.os == \"Windows\":\n raise ConanInvalidConfiguration(\"libuuid is not supported on Windows\")\n del self.settings.compiler.libcxx\n del self.settings.compiler.cppstd\n\n def _configure_autotools(self):\n if not self._autotools:\n configure_args = [\n \"--enable-shared=%s\" % (\"yes\" if self.options.shared else \"no\"),\n \"--enable-static=%s\" % (\"no\" if self.options.shared else \"yes\")\n ]\n self._autotools = AutoToolsBuildEnvironment(self)\n if \"x86\" in self.settings.arch:\n self._autotools.flags.append('-mstackrealign')\n self._autotools.configure(args=configure_args)\n return self._autotools\n\n def build(self):\n self._patch_sources()\n with tools.chdir(self._source_subfolder):\n autotools = self._configure_autotools()\n autotools.make()\n\n def package(self):\n self.copy(\"COPYING\", dst=\"licenses\", src=self._source_subfolder)\n with tools.chdir(self._source_subfolder):\n autotools = self._configure_autotools()\n autotools.install()\n la_file = os.path.join(self.package_folder, \"lib\", \"libuuid.la\")\n if os.path.isfile(la_file):\n os.unlink(la_file)\n tools.rmdir(os.path.join(self.package_folder, \"lib\", \"pkgconfig\"))\n\n def package_info(self):\n self.cpp_info.libs = tools.collect_libs(self)\n self.cpp_info.includedirs.append(os.path.join(\"include\", \"uuid\"))\n", "path": "recipes/libuuid/all/conanfile.py"}], "after_files": [{"content": "from conans import ConanFile, AutoToolsBuildEnvironment, tools\nfrom conans.errors import ConanInvalidConfiguration\nimport os\n\n\nclass LibuuidConan(ConanFile):\n name = \"libuuid\"\n description = \"Portable uuid C library\"\n url = \"https://github.com/conan-io/conan-center-index\"\n homepage = \"https://sourceforge.net/projects/libuuid/\"\n license = \"BSD-3-Clause\"\n topics = (\"conan\", \"libuuid\", \"uuid\", \"unique-id\", \"unique-identifier\")\n settings = \"os\", \"arch\", \"compiler\", \"build_type\"\n exports_sources = \"patches/**\"\n options = {\"shared\": [True, False], \"fPIC\": [True, False]}\n default_options = {\"shared\": False, \"fPIC\": True}\n _source_subfolder = \"source_subfolder\"\n _autotools = None\n\n def source(self):\n tools.get(**self.conan_data[\"sources\"][self.version])\n os.rename(self.name + \"-\" + self.version, self._source_subfolder)\n\n def _patch_sources(self):\n for patch in self.conan_data[\"patches\"][self.version]:\n tools.patch(**patch)\n\n def config_options(self):\n if self.settings.os == 'Windows':\n del self.options.fPIC\n\n def configure(self):\n if self.settings.os == \"Windows\":\n raise ConanInvalidConfiguration(\"libuuid is not supported on Windows\")\n del self.settings.compiler.libcxx\n del self.settings.compiler.cppstd\n\n def build_requirements(self):\n self.build_requires(\"libtool/2.4.6\")\n\n def _configure_autotools(self):\n if self._autotools:\n return self._autotools\n self._autotools = AutoToolsBuildEnvironment(self)\n yes_no = lambda v: \"yes\" if v else \"no\"\n configure_args = [\n \"--enable-shared={}\".format(yes_no(self.options.shared)),\n \"--enable-static={}\".format(yes_no(not self.options.shared)),\n ]\n if \"x86\" in self.settings.arch:\n self._autotools.flags.append('-mstackrealign')\n self._autotools.configure(args=configure_args, configure_dir=self._source_subfolder)\n return self._autotools\n\n def build(self):\n self._patch_sources()\n with tools.chdir(self._source_subfolder):\n self.run(\"autoreconf -fiv\", run_environment=True)\n autotools = self._configure_autotools()\n autotools.make()\n\n def package(self):\n self.copy(\"COPYING\", dst=\"licenses\", src=self._source_subfolder)\n autotools = self._configure_autotools()\n autotools.install()\n la_file = os.path.join(self.package_folder, \"lib\", \"libuuid.la\")\n if os.path.isfile(la_file):\n os.unlink(la_file)\n tools.rmdir(os.path.join(self.package_folder, \"lib\", \"pkgconfig\"))\n\n def package_info(self):\n self.cpp_info.libs = tools.collect_libs(self)\n self.cpp_info.includedirs.append(os.path.join(\"include\", \"uuid\"))\n", "path": "recipes/libuuid/all/conanfile.py"}]}
1,167
558
gh_patches_debug_61268
rasdani/github-patches
git_diff
lk-geimfari__mimesis-433
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Fix mypy issues There are several things to consider: 1. Fixing bugs like this one: https://travis-ci.org/lk-geimfari/mimesis/jobs/361128185#L600 2. Adding new options to `mypy` to make it stricter: https://github.com/wemake-services/wemake-django-template/blob/master/%7B%7Bcookiecutter.project_name%7D%7D/setup.cfg#L67 3. Add `tests` folder to be checked by `mypy` (not only `mimesis/` folder is checked) I can do it, if @lk-geimfari does not have anything to add/comment. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `mimesis/providers/payment.py` Content: ``` 1 """Provides data related to payment.""" 2 3 import re 4 import string 5 from typing import Optional 6 7 from mimesis.data import CREDIT_CARD_NETWORKS 8 from mimesis.enums import CardType, Gender 9 from mimesis.exceptions import NonEnumerableError 10 from mimesis.helpers import get_random_item 11 from mimesis.providers.base import BaseDataProvider 12 from mimesis.providers.person import Person 13 from mimesis.utils import luhn_checksum 14 15 __all__ = ['Payment'] 16 17 18 class Payment(BaseDataProvider): 19 """Class that provides data related to payments.""" 20 21 def __init__(self, *args, **kwargs) -> None: 22 """Initialize attributes. 23 24 :param args: Arguments. 25 :param kwargs: Keyword arguments. 26 """ 27 super().__init__(*args, **kwargs) 28 self.__person = Person('en', seed=self.seed) 29 30 def cid(self) -> int: 31 """Generate a random CID. 32 33 :return: CID code. 34 35 :Example: 36 7452 37 """ 38 return self.random.randint(1000, 9999) 39 40 def paypal(self) -> str: 41 """Generate a random PayPal account. 42 43 :return: Email of PapPal user. 44 45 :Example: 46 [email protected] 47 """ 48 return self.__person.email() 49 50 def bitcoin_address(self) -> str: 51 """Generate a random bitcoin address. 52 53 :return: Bitcoin address. 54 55 :Example: 56 3EktnHQD7RiAE6uzMj2ZifT9YgRrkSgzQX 57 """ 58 type_ = self.random.choice(['1', '3']) 59 letters = string.ascii_letters + string.digits 60 return type_ + ''.join( 61 self.random.choice(letters) for _ in range(33)) 62 63 def ethereum_address(self) -> str: 64 """Generate a random Ethereum address. 65 66 .. Note: The address will look like Ethereum address, 67 but keep in mind that it is not the valid address. 68 69 :return: Ethereum address. 70 71 :Example: 72 0xe8ece9e6ff7dba52d4c07d37418036a89af9698d 73 """ 74 bits = self.random.getrandbits(160) 75 address = bits.to_bytes(20, byteorder='big') 76 return '0x' + address.hex() 77 78 def credit_card_network(self) -> str: 79 """Generate a random credit card network. 80 81 :return: Credit card network 82 83 :Example: 84 MasterCard 85 """ 86 return self.random.choice(CREDIT_CARD_NETWORKS) 87 88 def credit_card_number(self, card_type: Optional[CardType] = None) -> str: 89 """Generate a random credit card number. 90 91 :param card_type: Issuing Network. Default is Visa. 92 :return: Credit card number. 93 :raises NotImplementedError: if cart_type is not supported. 94 95 :Example: 96 4455 5299 1152 2450 97 """ 98 length = 16 99 regex = re.compile('(\d{4})(\d{4})(\d{4})(\d{4})') 100 101 if card_type is None: 102 card_type = get_random_item(CardType, rnd=self.random) 103 104 if card_type == CardType.VISA: 105 number = self.random.randint(4000, 4999) 106 elif card_type == CardType.MASTER_CARD: 107 number = self.random.choice([ 108 self.random.randint(2221, 2720), 109 self.random.randint(5100, 5500), 110 ]) 111 elif card_type == CardType.AMERICAN_EXPRESS: 112 number = self.random.choice([34, 37]) 113 length = 15 114 regex = re.compile('(\d{4})(\d{6})(\d{5})') 115 else: 116 raise NonEnumerableError(CardType) 117 118 str_num = str(number) 119 while len(str_num) < length - 1: 120 str_num += self.random.choice(string.digits) 121 122 groups = regex.search(str_num + luhn_checksum(str_num)).groups() 123 card = ' '.join(groups) 124 return card 125 126 def credit_card_expiration_date(self, minimum: int = 16, 127 maximum: int = 25) -> str: 128 """Generate a random expiration date for credit card. 129 130 :param minimum: Date of issue. 131 :param maximum: Maximum of expiration_date. 132 :return: Expiration date of credit card. 133 134 :Example: 135 03/19. 136 """ 137 month = self.random.randint(1, 12) 138 year = self.random.randint(minimum, maximum) 139 return '{0:02d}/{1}'.format(month, year) 140 141 def cvv(self) -> int: 142 """Generate a random CVV. 143 144 :return: CVV code. 145 146 :Example: 147 324 148 """ 149 return self.random.randint(100, 999) 150 151 def credit_card_owner(self, gender: Optional[Gender] = None) -> dict: 152 """Generate credit card owner. 153 154 :param gender: Gender of credit card owner. 155 :type gender: Gender's enum object. 156 :return: 157 """ 158 owner = { 159 'credit_card': self.credit_card_number(), 160 'expiration_date': self.credit_card_expiration_date(), 161 'owner': self.__person.full_name(gender=gender).upper(), 162 } 163 return owner 164 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/mimesis/providers/payment.py b/mimesis/providers/payment.py --- a/mimesis/providers/payment.py +++ b/mimesis/providers/payment.py @@ -119,7 +119,9 @@ while len(str_num) < length - 1: str_num += self.random.choice(string.digits) - groups = regex.search(str_num + luhn_checksum(str_num)).groups() + groups = regex.search( # type: ignore + str_num + luhn_checksum(str_num), + ).groups() card = ' '.join(groups) return card
{"golden_diff": "diff --git a/mimesis/providers/payment.py b/mimesis/providers/payment.py\n--- a/mimesis/providers/payment.py\n+++ b/mimesis/providers/payment.py\n@@ -119,7 +119,9 @@\n while len(str_num) < length - 1:\n str_num += self.random.choice(string.digits)\n \n- groups = regex.search(str_num + luhn_checksum(str_num)).groups()\n+ groups = regex.search( # type: ignore\n+ str_num + luhn_checksum(str_num),\n+ ).groups()\n card = ' '.join(groups)\n return card\n", "issue": "Fix mypy issues\nThere are several things to consider:\r\n\r\n1. Fixing bugs like this one: https://travis-ci.org/lk-geimfari/mimesis/jobs/361128185#L600\r\n2. Adding new options to `mypy` to make it stricter: https://github.com/wemake-services/wemake-django-template/blob/master/%7B%7Bcookiecutter.project_name%7D%7D/setup.cfg#L67\r\n3. Add `tests` folder to be checked by `mypy` (not only `mimesis/` folder is checked)\r\n\r\nI can do it, if @lk-geimfari does not have anything to add/comment.\n", "before_files": [{"content": "\"\"\"Provides data related to payment.\"\"\"\n\nimport re\nimport string\nfrom typing import Optional\n\nfrom mimesis.data import CREDIT_CARD_NETWORKS\nfrom mimesis.enums import CardType, Gender\nfrom mimesis.exceptions import NonEnumerableError\nfrom mimesis.helpers import get_random_item\nfrom mimesis.providers.base import BaseDataProvider\nfrom mimesis.providers.person import Person\nfrom mimesis.utils import luhn_checksum\n\n__all__ = ['Payment']\n\n\nclass Payment(BaseDataProvider):\n \"\"\"Class that provides data related to payments.\"\"\"\n\n def __init__(self, *args, **kwargs) -> None:\n \"\"\"Initialize attributes.\n\n :param args: Arguments.\n :param kwargs: Keyword arguments.\n \"\"\"\n super().__init__(*args, **kwargs)\n self.__person = Person('en', seed=self.seed)\n\n def cid(self) -> int:\n \"\"\"Generate a random CID.\n\n :return: CID code.\n\n :Example:\n 7452\n \"\"\"\n return self.random.randint(1000, 9999)\n\n def paypal(self) -> str:\n \"\"\"Generate a random PayPal account.\n\n :return: Email of PapPal user.\n\n :Example:\n [email protected]\n \"\"\"\n return self.__person.email()\n\n def bitcoin_address(self) -> str:\n \"\"\"Generate a random bitcoin address.\n\n :return: Bitcoin address.\n\n :Example:\n 3EktnHQD7RiAE6uzMj2ZifT9YgRrkSgzQX\n \"\"\"\n type_ = self.random.choice(['1', '3'])\n letters = string.ascii_letters + string.digits\n return type_ + ''.join(\n self.random.choice(letters) for _ in range(33))\n\n def ethereum_address(self) -> str:\n \"\"\"Generate a random Ethereum address.\n\n .. Note: The address will look like Ethereum address,\n but keep in mind that it is not the valid address.\n\n :return: Ethereum address.\n\n :Example:\n 0xe8ece9e6ff7dba52d4c07d37418036a89af9698d\n \"\"\"\n bits = self.random.getrandbits(160)\n address = bits.to_bytes(20, byteorder='big')\n return '0x' + address.hex()\n\n def credit_card_network(self) -> str:\n \"\"\"Generate a random credit card network.\n\n :return: Credit card network\n\n :Example:\n MasterCard\n \"\"\"\n return self.random.choice(CREDIT_CARD_NETWORKS)\n\n def credit_card_number(self, card_type: Optional[CardType] = None) -> str:\n \"\"\"Generate a random credit card number.\n\n :param card_type: Issuing Network. Default is Visa.\n :return: Credit card number.\n :raises NotImplementedError: if cart_type is not supported.\n\n :Example:\n 4455 5299 1152 2450\n \"\"\"\n length = 16\n regex = re.compile('(\\d{4})(\\d{4})(\\d{4})(\\d{4})')\n\n if card_type is None:\n card_type = get_random_item(CardType, rnd=self.random)\n\n if card_type == CardType.VISA:\n number = self.random.randint(4000, 4999)\n elif card_type == CardType.MASTER_CARD:\n number = self.random.choice([\n self.random.randint(2221, 2720),\n self.random.randint(5100, 5500),\n ])\n elif card_type == CardType.AMERICAN_EXPRESS:\n number = self.random.choice([34, 37])\n length = 15\n regex = re.compile('(\\d{4})(\\d{6})(\\d{5})')\n else:\n raise NonEnumerableError(CardType)\n\n str_num = str(number)\n while len(str_num) < length - 1:\n str_num += self.random.choice(string.digits)\n\n groups = regex.search(str_num + luhn_checksum(str_num)).groups()\n card = ' '.join(groups)\n return card\n\n def credit_card_expiration_date(self, minimum: int = 16,\n maximum: int = 25) -> str:\n \"\"\"Generate a random expiration date for credit card.\n\n :param minimum: Date of issue.\n :param maximum: Maximum of expiration_date.\n :return: Expiration date of credit card.\n\n :Example:\n 03/19.\n \"\"\"\n month = self.random.randint(1, 12)\n year = self.random.randint(minimum, maximum)\n return '{0:02d}/{1}'.format(month, year)\n\n def cvv(self) -> int:\n \"\"\"Generate a random CVV.\n\n :return: CVV code.\n\n :Example:\n 324\n \"\"\"\n return self.random.randint(100, 999)\n\n def credit_card_owner(self, gender: Optional[Gender] = None) -> dict:\n \"\"\"Generate credit card owner.\n\n :param gender: Gender of credit card owner.\n :type gender: Gender's enum object.\n :return:\n \"\"\"\n owner = {\n 'credit_card': self.credit_card_number(),\n 'expiration_date': self.credit_card_expiration_date(),\n 'owner': self.__person.full_name(gender=gender).upper(),\n }\n return owner\n", "path": "mimesis/providers/payment.py"}], "after_files": [{"content": "\"\"\"Provides data related to payment.\"\"\"\n\nimport re\nimport string\nfrom typing import Optional\n\nfrom mimesis.data import CREDIT_CARD_NETWORKS\nfrom mimesis.enums import CardType, Gender\nfrom mimesis.exceptions import NonEnumerableError\nfrom mimesis.helpers import get_random_item\nfrom mimesis.providers.base import BaseDataProvider\nfrom mimesis.providers.person import Person\nfrom mimesis.utils import luhn_checksum\n\n__all__ = ['Payment']\n\n\nclass Payment(BaseDataProvider):\n \"\"\"Class that provides data related to payments.\"\"\"\n\n def __init__(self, *args, **kwargs) -> None:\n \"\"\"Initialize attributes.\n\n :param args: Arguments.\n :param kwargs: Keyword arguments.\n \"\"\"\n super().__init__(*args, **kwargs)\n self.__person = Person('en', seed=self.seed)\n\n def cid(self) -> int:\n \"\"\"Generate a random CID.\n\n :return: CID code.\n\n :Example:\n 7452\n \"\"\"\n return self.random.randint(1000, 9999)\n\n def paypal(self) -> str:\n \"\"\"Generate a random PayPal account.\n\n :return: Email of PapPal user.\n\n :Example:\n [email protected]\n \"\"\"\n return self.__person.email()\n\n def bitcoin_address(self) -> str:\n \"\"\"Generate a random bitcoin address.\n\n :return: Bitcoin address.\n\n :Example:\n 3EktnHQD7RiAE6uzMj2ZifT9YgRrkSgzQX\n \"\"\"\n type_ = self.random.choice(['1', '3'])\n letters = string.ascii_letters + string.digits\n return type_ + ''.join(\n self.random.choice(letters) for _ in range(33))\n\n def ethereum_address(self) -> str:\n \"\"\"Generate a random Ethereum address.\n\n .. Note: The address will look like Ethereum address,\n but keep in mind that it is not the valid address.\n\n :return: Ethereum address.\n\n :Example:\n 0xe8ece9e6ff7dba52d4c07d37418036a89af9698d\n \"\"\"\n bits = self.random.getrandbits(160)\n address = bits.to_bytes(20, byteorder='big')\n return '0x' + address.hex()\n\n def credit_card_network(self) -> str:\n \"\"\"Generate a random credit card network.\n\n :return: Credit card network\n\n :Example:\n MasterCard\n \"\"\"\n return self.random.choice(CREDIT_CARD_NETWORKS)\n\n def credit_card_number(self, card_type: Optional[CardType] = None) -> str:\n \"\"\"Generate a random credit card number.\n\n :param card_type: Issuing Network. Default is Visa.\n :return: Credit card number.\n :raises NotImplementedError: if cart_type is not supported.\n\n :Example:\n 4455 5299 1152 2450\n \"\"\"\n length = 16\n regex = re.compile('(\\d{4})(\\d{4})(\\d{4})(\\d{4})')\n\n if card_type is None:\n card_type = get_random_item(CardType, rnd=self.random)\n\n if card_type == CardType.VISA:\n number = self.random.randint(4000, 4999)\n elif card_type == CardType.MASTER_CARD:\n number = self.random.choice([\n self.random.randint(2221, 2720),\n self.random.randint(5100, 5500),\n ])\n elif card_type == CardType.AMERICAN_EXPRESS:\n number = self.random.choice([34, 37])\n length = 15\n regex = re.compile('(\\d{4})(\\d{6})(\\d{5})')\n else:\n raise NonEnumerableError(CardType)\n\n str_num = str(number)\n while len(str_num) < length - 1:\n str_num += self.random.choice(string.digits)\n\n groups = regex.search( # type: ignore\n str_num + luhn_checksum(str_num),\n ).groups()\n card = ' '.join(groups)\n return card\n\n def credit_card_expiration_date(self, minimum: int = 16,\n maximum: int = 25) -> str:\n \"\"\"Generate a random expiration date for credit card.\n\n :param minimum: Date of issue.\n :param maximum: Maximum of expiration_date.\n :return: Expiration date of credit card.\n\n :Example:\n 03/19.\n \"\"\"\n month = self.random.randint(1, 12)\n year = self.random.randint(minimum, maximum)\n return '{0:02d}/{1}'.format(month, year)\n\n def cvv(self) -> int:\n \"\"\"Generate a random CVV.\n\n :return: CVV code.\n\n :Example:\n 324\n \"\"\"\n return self.random.randint(100, 999)\n\n def credit_card_owner(self, gender: Optional[Gender] = None) -> dict:\n \"\"\"Generate credit card owner.\n\n :param gender: Gender of credit card owner.\n :type gender: Gender's enum object.\n :return:\n \"\"\"\n owner = {\n 'credit_card': self.credit_card_number(),\n 'expiration_date': self.credit_card_expiration_date(),\n 'owner': self.__person.full_name(gender=gender).upper(),\n }\n return owner\n", "path": "mimesis/providers/payment.py"}]}
2,027
132
gh_patches_debug_4018
rasdani/github-patches
git_diff
ethereum__web3.py-2360
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Upgrade jsonschema to the latest 4.4 ### What was wrong? https://github.com/ethereum/web3.py/blob/16aff7e80a6bc089051c154b5079213317c27da8/setup.py#L86 I cannot combine `web3.py` with the latest `jsonschema` in a single app (without relying on ugly hacks) ### How can it be fixed? Maybe just expand the range to `jsonschema>=3.2.0,<4.5` and let users test compatibility? https://github.com/ethereum/web3.py/blob/16aff7e80a6bc089051c154b5079213317c27da8/docs/web3.pm.rst#L12 so the web3.py dev team shouldn't have to worry too much about maintaining compatibility --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `setup.py` Content: ``` 1 #!/usr/bin/env python 2 from setuptools import ( 3 find_packages, 4 setup, 5 ) 6 7 extras_require = { 8 'tester': [ 9 "eth-tester[py-evm]==v0.6.0-beta.6", 10 "py-geth>=3.7.0,<4", 11 ], 12 'linter': [ 13 "flake8==3.8.3", 14 "isort>=4.2.15,<4.3.5", 15 "mypy==0.910", 16 "types-setuptools>=57.4.4,<58", 17 "types-requests>=2.26.1,<3", 18 "types-protobuf>=3.18.2,<4", 19 ], 20 'docs': [ 21 "mock", 22 "sphinx-better-theme>=0.1.4", 23 "click>=5.1", 24 "configparser==3.5.0", 25 "contextlib2>=0.5.4", 26 "py-geth>=3.6.0,<4", 27 "py-solc>=0.4.0", 28 "pytest>=4.4.0,<5.0.0", 29 "sphinx>=3.0,<4", 30 "sphinx_rtd_theme>=0.1.9", 31 "toposort>=1.4", 32 "towncrier==18.5.0", 33 "urllib3", 34 "wheel" 35 ], 36 'dev': [ 37 "bumpversion", 38 "flaky>=3.7.0,<4", 39 "hypothesis>=3.31.2,<6", 40 "pytest>=4.4.0,<5.0.0", 41 "pytest-asyncio>=0.10.0,<0.11", 42 "pytest-mock>=1.10,<2", 43 "pytest-pythonpath>=0.3", 44 "pytest-watch>=4.2,<5", 45 "pytest-xdist>=1.29,<2", 46 "setuptools>=38.6.0", 47 "tox>=1.8.0", 48 "tqdm>4.32,<5", 49 "twine>=1.13,<2", 50 "pluggy==0.13.1", 51 "when-changed>=0.3.0,<0.4" 52 ] 53 } 54 55 extras_require['dev'] = ( 56 extras_require['tester'] 57 + extras_require['linter'] 58 + extras_require['docs'] 59 + extras_require['dev'] 60 ) 61 62 with open('./README.md') as readme: 63 long_description = readme.read() 64 65 setup( 66 name='web3', 67 # *IMPORTANT*: Don't manually change the version here. Use the 'bumpversion' utility. 68 version='5.28.0', 69 description="""Web3.py""", 70 long_description_content_type='text/markdown', 71 long_description=long_description, 72 author='Piper Merriam', 73 author_email='[email protected]', 74 url='https://github.com/ethereum/web3.py', 75 include_package_data=True, 76 install_requires=[ 77 "aiohttp>=3.7.4.post0,<4", 78 "eth-abi>=2.0.0b6,<3.0.0", 79 "eth-account>=0.5.7,<0.6.0", 80 "eth-hash[pycryptodome]>=0.2.0,<1.0.0", 81 "eth-typing>=2.0.0,<3.0.0", 82 "eth-utils>=1.9.5,<2.0.0", 83 "hexbytes>=0.1.0,<1.0.0", 84 "ipfshttpclient==0.8.0a2", 85 "jsonschema>=3.2.0,<4.0.0", 86 "lru-dict>=1.1.6,<2.0.0", 87 "protobuf>=3.10.0,<4", 88 "pywin32>=223;platform_system=='Windows'", 89 "requests>=2.16.0,<3.0.0", 90 # remove typing_extensions after python_requires>=3.8, see web3._utils.compat 91 "typing-extensions>=3.7.4.1,<5;python_version<'3.8'", 92 "websockets>=9.1,<10", 93 ], 94 python_requires='>=3.6,<4', 95 extras_require=extras_require, 96 py_modules=['web3', 'ens', 'ethpm'], 97 entry_points={"pytest11": ["pytest_ethereum = web3.tools.pytest_ethereum.plugins"]}, 98 license="MIT", 99 zip_safe=False, 100 keywords='ethereum', 101 packages=find_packages(exclude=["tests", "tests.*"]), 102 package_data={"web3": ["py.typed"]}, 103 classifiers=[ 104 'Development Status :: 5 - Production/Stable', 105 'Intended Audience :: Developers', 106 'License :: OSI Approved :: MIT License', 107 'Natural Language :: English', 108 'Programming Language :: Python :: 3', 109 'Programming Language :: Python :: 3.6', 110 'Programming Language :: Python :: 3.7', 111 'Programming Language :: Python :: 3.8', 112 'Programming Language :: Python :: 3.9', 113 ], 114 ) 115 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -82,7 +82,7 @@ "eth-utils>=1.9.5,<2.0.0", "hexbytes>=0.1.0,<1.0.0", "ipfshttpclient==0.8.0a2", - "jsonschema>=3.2.0,<4.0.0", + "jsonschema>=3.2.0,<5", "lru-dict>=1.1.6,<2.0.0", "protobuf>=3.10.0,<4", "pywin32>=223;platform_system=='Windows'",
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -82,7 +82,7 @@\n \"eth-utils>=1.9.5,<2.0.0\",\n \"hexbytes>=0.1.0,<1.0.0\",\n \"ipfshttpclient==0.8.0a2\",\n- \"jsonschema>=3.2.0,<4.0.0\",\n+ \"jsonschema>=3.2.0,<5\",\n \"lru-dict>=1.1.6,<2.0.0\",\n \"protobuf>=3.10.0,<4\",\n \"pywin32>=223;platform_system=='Windows'\",\n", "issue": "Upgrade jsonschema to the latest 4.4\n### What was wrong?\r\n\r\nhttps://github.com/ethereum/web3.py/blob/16aff7e80a6bc089051c154b5079213317c27da8/setup.py#L86\r\n\r\nI cannot combine `web3.py` with the latest `jsonschema` in a single app (without relying on ugly hacks)\r\n\r\n### How can it be fixed?\r\n\r\nMaybe just expand the range to `jsonschema>=3.2.0,<4.5` and let users test compatibility?\r\n\r\nhttps://github.com/ethereum/web3.py/blob/16aff7e80a6bc089051c154b5079213317c27da8/docs/web3.pm.rst#L12\r\n\r\nso the web3.py dev team shouldn't have to worry too much about maintaining compatibility \n", "before_files": [{"content": "#!/usr/bin/env python\nfrom setuptools import (\n find_packages,\n setup,\n)\n\nextras_require = {\n 'tester': [\n \"eth-tester[py-evm]==v0.6.0-beta.6\",\n \"py-geth>=3.7.0,<4\",\n ],\n 'linter': [\n \"flake8==3.8.3\",\n \"isort>=4.2.15,<4.3.5\",\n \"mypy==0.910\",\n \"types-setuptools>=57.4.4,<58\",\n \"types-requests>=2.26.1,<3\",\n \"types-protobuf>=3.18.2,<4\",\n ],\n 'docs': [\n \"mock\",\n \"sphinx-better-theme>=0.1.4\",\n \"click>=5.1\",\n \"configparser==3.5.0\",\n \"contextlib2>=0.5.4\",\n \"py-geth>=3.6.0,<4\",\n \"py-solc>=0.4.0\",\n \"pytest>=4.4.0,<5.0.0\",\n \"sphinx>=3.0,<4\",\n \"sphinx_rtd_theme>=0.1.9\",\n \"toposort>=1.4\",\n \"towncrier==18.5.0\",\n \"urllib3\",\n \"wheel\"\n ],\n 'dev': [\n \"bumpversion\",\n \"flaky>=3.7.0,<4\",\n \"hypothesis>=3.31.2,<6\",\n \"pytest>=4.4.0,<5.0.0\",\n \"pytest-asyncio>=0.10.0,<0.11\",\n \"pytest-mock>=1.10,<2\",\n \"pytest-pythonpath>=0.3\",\n \"pytest-watch>=4.2,<5\",\n \"pytest-xdist>=1.29,<2\",\n \"setuptools>=38.6.0\",\n \"tox>=1.8.0\",\n \"tqdm>4.32,<5\",\n \"twine>=1.13,<2\",\n \"pluggy==0.13.1\",\n \"when-changed>=0.3.0,<0.4\"\n ]\n}\n\nextras_require['dev'] = (\n extras_require['tester']\n + extras_require['linter']\n + extras_require['docs']\n + extras_require['dev']\n)\n\nwith open('./README.md') as readme:\n long_description = readme.read()\n\nsetup(\n name='web3',\n # *IMPORTANT*: Don't manually change the version here. Use the 'bumpversion' utility.\n version='5.28.0',\n description=\"\"\"Web3.py\"\"\",\n long_description_content_type='text/markdown',\n long_description=long_description,\n author='Piper Merriam',\n author_email='[email protected]',\n url='https://github.com/ethereum/web3.py',\n include_package_data=True,\n install_requires=[\n \"aiohttp>=3.7.4.post0,<4\",\n \"eth-abi>=2.0.0b6,<3.0.0\",\n \"eth-account>=0.5.7,<0.6.0\",\n \"eth-hash[pycryptodome]>=0.2.0,<1.0.0\",\n \"eth-typing>=2.0.0,<3.0.0\",\n \"eth-utils>=1.9.5,<2.0.0\",\n \"hexbytes>=0.1.0,<1.0.0\",\n \"ipfshttpclient==0.8.0a2\",\n \"jsonschema>=3.2.0,<4.0.0\",\n \"lru-dict>=1.1.6,<2.0.0\",\n \"protobuf>=3.10.0,<4\",\n \"pywin32>=223;platform_system=='Windows'\",\n \"requests>=2.16.0,<3.0.0\",\n # remove typing_extensions after python_requires>=3.8, see web3._utils.compat\n \"typing-extensions>=3.7.4.1,<5;python_version<'3.8'\",\n \"websockets>=9.1,<10\",\n ],\n python_requires='>=3.6,<4',\n extras_require=extras_require,\n py_modules=['web3', 'ens', 'ethpm'],\n entry_points={\"pytest11\": [\"pytest_ethereum = web3.tools.pytest_ethereum.plugins\"]},\n license=\"MIT\",\n zip_safe=False,\n keywords='ethereum',\n packages=find_packages(exclude=[\"tests\", \"tests.*\"]),\n package_data={\"web3\": [\"py.typed\"]},\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License',\n 'Natural Language :: English',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n ],\n)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\nfrom setuptools import (\n find_packages,\n setup,\n)\n\nextras_require = {\n 'tester': [\n \"eth-tester[py-evm]==v0.6.0-beta.6\",\n \"py-geth>=3.7.0,<4\",\n ],\n 'linter': [\n \"flake8==3.8.3\",\n \"isort>=4.2.15,<4.3.5\",\n \"mypy==0.910\",\n \"types-setuptools>=57.4.4,<58\",\n \"types-requests>=2.26.1,<3\",\n \"types-protobuf>=3.18.2,<4\",\n ],\n 'docs': [\n \"mock\",\n \"sphinx-better-theme>=0.1.4\",\n \"click>=5.1\",\n \"configparser==3.5.0\",\n \"contextlib2>=0.5.4\",\n \"py-geth>=3.6.0,<4\",\n \"py-solc>=0.4.0\",\n \"pytest>=4.4.0,<5.0.0\",\n \"sphinx>=3.0,<4\",\n \"sphinx_rtd_theme>=0.1.9\",\n \"toposort>=1.4\",\n \"towncrier==18.5.0\",\n \"urllib3\",\n \"wheel\"\n ],\n 'dev': [\n \"bumpversion\",\n \"flaky>=3.7.0,<4\",\n \"hypothesis>=3.31.2,<6\",\n \"pytest>=4.4.0,<5.0.0\",\n \"pytest-asyncio>=0.10.0,<0.11\",\n \"pytest-mock>=1.10,<2\",\n \"pytest-pythonpath>=0.3\",\n \"pytest-watch>=4.2,<5\",\n \"pytest-xdist>=1.29,<2\",\n \"setuptools>=38.6.0\",\n \"tox>=1.8.0\",\n \"tqdm>4.32,<5\",\n \"twine>=1.13,<2\",\n \"pluggy==0.13.1\",\n \"when-changed>=0.3.0,<0.4\"\n ]\n}\n\nextras_require['dev'] = (\n extras_require['tester']\n + extras_require['linter']\n + extras_require['docs']\n + extras_require['dev']\n)\n\nwith open('./README.md') as readme:\n long_description = readme.read()\n\nsetup(\n name='web3',\n # *IMPORTANT*: Don't manually change the version here. Use the 'bumpversion' utility.\n version='5.28.0',\n description=\"\"\"Web3.py\"\"\",\n long_description_content_type='text/markdown',\n long_description=long_description,\n author='Piper Merriam',\n author_email='[email protected]',\n url='https://github.com/ethereum/web3.py',\n include_package_data=True,\n install_requires=[\n \"aiohttp>=3.7.4.post0,<4\",\n \"eth-abi>=2.0.0b6,<3.0.0\",\n \"eth-account>=0.5.7,<0.6.0\",\n \"eth-hash[pycryptodome]>=0.2.0,<1.0.0\",\n \"eth-typing>=2.0.0,<3.0.0\",\n \"eth-utils>=1.9.5,<2.0.0\",\n \"hexbytes>=0.1.0,<1.0.0\",\n \"ipfshttpclient==0.8.0a2\",\n \"jsonschema>=3.2.0,<5\",\n \"lru-dict>=1.1.6,<2.0.0\",\n \"protobuf>=3.10.0,<4\",\n \"pywin32>=223;platform_system=='Windows'\",\n \"requests>=2.16.0,<3.0.0\",\n # remove typing_extensions after python_requires>=3.8, see web3._utils.compat\n \"typing-extensions>=3.7.4.1,<5;python_version<'3.8'\",\n \"websockets>=9.1,<10\",\n ],\n python_requires='>=3.6,<4',\n extras_require=extras_require,\n py_modules=['web3', 'ens', 'ethpm'],\n entry_points={\"pytest11\": [\"pytest_ethereum = web3.tools.pytest_ethereum.plugins\"]},\n license=\"MIT\",\n zip_safe=False,\n keywords='ethereum',\n packages=find_packages(exclude=[\"tests\", \"tests.*\"]),\n package_data={\"web3\": [\"py.typed\"]},\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License',\n 'Natural Language :: English',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n ],\n)\n", "path": "setup.py"}]}
1,859
161
gh_patches_debug_28103
rasdani/github-patches
git_diff
bridgecrewio__checkov-39
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- security_groups in aws_security_group rule not supported **Describe the bug** referencing a `security_group` instead of `cidr_block` in a security group rule causes an exception **To Reproduce** Steps to reproduce the behavior: 1. try to run checkov on the following resource: ```tf resource "aws_security_group" "bar-sg" { name = "sg-bar" vpc_id = aws_vpc.main.id ingress { from_port = 22 to_port = 22 protocol = "tcp" security_groups = [aws_security_group.foo-sg.id] description = "foo" } egress { from_port = 0 to_port = 0 protocol = "-1" cidr_blocks = ["0.0.0.0/0"] } } ``` result: ``` Traceback (most recent call last): File "/path/tf-checks/bin/checkov", line 34, in <module> report = Runner().run(root_folder, external_checks_dir=args.external_checks_dir) File "/path/tf-checks/lib/python3.7/site-packages/checkov/terraform/runner.py", line 38, in run results = resource_registry.scan(resource, scanned_file, skipped_checks) File "/pathtf-checks/lib/python3.7/site-packages/checkov/terraform/checks/resource/registry.py", line 38, in scan resource_name=resource_name, resource_type=resource, skip_info=skip_info) File "/path/tf-checks/lib/python3.7/site-packages/checkov/terraform/checks/resource/base_check.py", line 31, in run check_result['result'] = self.scan_resource_conf(resource_configuration) File "/path/tf-checks/lib/python3.7/site-packages/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress22.py", line 25, in scan_resource_conf if rule['from_port'] == [PORT] and rule['to_port'] == [PORT] and rule['cidr_blocks'] == [[ KeyError: 'cidr_blocks' ``` **Expected behavior** such resource definition is perfectly valid **Desktop (please complete the following information):** - OS: Ubuntu `19.10` - Python: `3.7.5` - Checkov Version `1.0.99` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress3389.py` Content: ``` 1 from checkov.terraform.models.enums import CheckResult, CheckCategories 2 from checkov.terraform.checks.resource.base_check import BaseResourceCheck 3 4 PORT = 3389 5 6 7 class SecurityGroupUnrestrictedIngress3389(BaseResourceCheck): 8 def __init__(self): 9 name = "Ensure no security groups allow ingress from 0.0.0.0:0 to port %d" % PORT 10 id = "CKV_AWS_25" 11 supported_resources = ['aws_security_group'] 12 categories = [CheckCategories.NETWORKING] 13 super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources) 14 15 def scan_resource_conf(self, conf): 16 """ 17 Looks for configuration at security group ingress rules : 18 https://www.terraform.io/docs/providers/aws/r/security_group.html 19 :param conf: aws_security_group configuration 20 :return: <CheckResult> 21 """ 22 if 'ingress' in conf.keys(): 23 ingress_conf = conf['ingress'] 24 for rule in ingress_conf: 25 if rule['from_port'] == [PORT] and rule['to_port'] == [PORT] and rule['cidr_blocks'] == [ 26 ["0.0.0.0/0"]] and 'self' not in rule.keys() and 'security_groups' not in rule.keys(): 27 return CheckResult.FAILED 28 29 return CheckResult.PASSED 30 31 32 check = SecurityGroupUnrestrictedIngress3389() 33 ``` Path: `checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress22.py` Content: ``` 1 from checkov.terraform.models.enums import CheckResult, CheckCategories 2 from checkov.terraform.checks.resource.base_check import BaseResourceCheck 3 4 PORT = 22 5 6 7 class SecurityGroupUnrestrictedIngress22(BaseResourceCheck): 8 def __init__(self): 9 name = "Ensure no security groups allow ingress from 0.0.0.0:0 to port %d" % PORT 10 id = "CKV_AWS_24" 11 supported_resources = ['aws_security_group'] 12 categories = [CheckCategories.NETWORKING] 13 super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources) 14 15 def scan_resource_conf(self, conf): 16 """ 17 Looks for configuration at security group ingress rules : 18 https://www.terraform.io/docs/providers/aws/r/security_group.html 19 :param conf: aws_security_group configuration 20 :return: <CheckResult> 21 """ 22 if 'ingress' in conf.keys(): 23 ingress_conf = conf['ingress'] 24 for rule in ingress_conf: 25 if rule['from_port'] == [PORT] and rule['to_port'] == [PORT] and rule['cidr_blocks'] == [[ 26 "0.0.0.0/0"]] and 'self' not in rule.keys() and 'security_groups' not in rule.keys(): 27 return CheckResult.FAILED 28 29 return CheckResult.PASSED 30 31 32 check = SecurityGroupUnrestrictedIngress22() 33 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress22.py b/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress22.py --- a/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress22.py +++ b/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress22.py @@ -22,9 +22,10 @@ if 'ingress' in conf.keys(): ingress_conf = conf['ingress'] for rule in ingress_conf: - if rule['from_port'] == [PORT] and rule['to_port'] == [PORT] and rule['cidr_blocks'] == [[ - "0.0.0.0/0"]] and 'self' not in rule.keys() and 'security_groups' not in rule.keys(): - return CheckResult.FAILED + if rule['from_port'] == [PORT] and rule['to_port'] == [PORT]: + if 'cidr_blocks' in rule.keys(): + if rule['cidr_blocks'] == [["0.0.0.0/0"]] and 'security_groups' not in rule.keys(): + return CheckResult.FAILED return CheckResult.PASSED diff --git a/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress3389.py b/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress3389.py --- a/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress3389.py +++ b/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress3389.py @@ -22,9 +22,11 @@ if 'ingress' in conf.keys(): ingress_conf = conf['ingress'] for rule in ingress_conf: - if rule['from_port'] == [PORT] and rule['to_port'] == [PORT] and rule['cidr_blocks'] == [ - ["0.0.0.0/0"]] and 'self' not in rule.keys() and 'security_groups' not in rule.keys(): - return CheckResult.FAILED + if rule['from_port'] == [PORT] and rule['to_port'] == [PORT]: + if 'cidr_blocks' in rule.keys(): + if rule['cidr_blocks'] == [["0.0.0.0/0"]] and 'security_groups' not in rule.keys(): + return CheckResult.FAILED + return CheckResult.PASSED
{"golden_diff": "diff --git a/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress22.py b/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress22.py\n--- a/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress22.py\n+++ b/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress22.py\n@@ -22,9 +22,10 @@\n if 'ingress' in conf.keys():\n ingress_conf = conf['ingress']\n for rule in ingress_conf:\n- if rule['from_port'] == [PORT] and rule['to_port'] == [PORT] and rule['cidr_blocks'] == [[\n- \"0.0.0.0/0\"]] and 'self' not in rule.keys() and 'security_groups' not in rule.keys():\n- return CheckResult.FAILED\n+ if rule['from_port'] == [PORT] and rule['to_port'] == [PORT]:\n+ if 'cidr_blocks' in rule.keys():\n+ if rule['cidr_blocks'] == [[\"0.0.0.0/0\"]] and 'security_groups' not in rule.keys():\n+ return CheckResult.FAILED\n \n return CheckResult.PASSED\n \ndiff --git a/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress3389.py b/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress3389.py\n--- a/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress3389.py\n+++ b/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress3389.py\n@@ -22,9 +22,11 @@\n if 'ingress' in conf.keys():\n ingress_conf = conf['ingress']\n for rule in ingress_conf:\n- if rule['from_port'] == [PORT] and rule['to_port'] == [PORT] and rule['cidr_blocks'] == [\n- [\"0.0.0.0/0\"]] and 'self' not in rule.keys() and 'security_groups' not in rule.keys():\n- return CheckResult.FAILED\n+ if rule['from_port'] == [PORT] and rule['to_port'] == [PORT]:\n+ if 'cidr_blocks' in rule.keys():\n+ if rule['cidr_blocks'] == [[\"0.0.0.0/0\"]] and 'security_groups' not in rule.keys():\n+ return CheckResult.FAILED\n+\n \n return CheckResult.PASSED\n", "issue": "security_groups in aws_security_group rule not supported\n**Describe the bug**\r\nreferencing a `security_group` instead of `cidr_block` in a security group rule causes an exception\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. try to run checkov on the following resource:\r\n```tf\r\nresource \"aws_security_group\" \"bar-sg\" {\r\n name = \"sg-bar\"\r\n vpc_id = aws_vpc.main.id\r\n\r\n ingress {\r\n from_port = 22\r\n to_port = 22\r\n protocol = \"tcp\"\r\n security_groups = [aws_security_group.foo-sg.id]\r\n description = \"foo\"\r\n }\r\n\r\n egress {\r\n from_port = 0\r\n to_port = 0\r\n protocol = \"-1\"\r\n cidr_blocks = [\"0.0.0.0/0\"]\r\n }\r\n\r\n}\r\n\r\n```\r\nresult:\r\n```\r\nTraceback (most recent call last):\r\n File \"/path/tf-checks/bin/checkov\", line 34, in <module>\r\n report = Runner().run(root_folder, external_checks_dir=args.external_checks_dir)\r\n File \"/path/tf-checks/lib/python3.7/site-packages/checkov/terraform/runner.py\", line 38, in run\r\n results = resource_registry.scan(resource, scanned_file, skipped_checks)\r\n File \"/pathtf-checks/lib/python3.7/site-packages/checkov/terraform/checks/resource/registry.py\", line 38, in scan\r\n resource_name=resource_name, resource_type=resource, skip_info=skip_info)\r\n File \"/path/tf-checks/lib/python3.7/site-packages/checkov/terraform/checks/resource/base_check.py\", line 31, in run\r\n check_result['result'] = self.scan_resource_conf(resource_configuration)\r\n File \"/path/tf-checks/lib/python3.7/site-packages/checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress22.py\", line 25, in scan_resource_conf\r\n if rule['from_port'] == [PORT] and rule['to_port'] == [PORT] and rule['cidr_blocks'] == [[\r\nKeyError: 'cidr_blocks'\r\n```\r\n\r\n**Expected behavior**\r\nsuch resource definition is perfectly valid\r\n\r\n\r\n**Desktop (please complete the following information):**\r\n - OS: Ubuntu `19.10`\r\n - Python: `3.7.5`\r\n - Checkov Version `1.0.99`\r\n\r\n\r\n\n", "before_files": [{"content": "from checkov.terraform.models.enums import CheckResult, CheckCategories\nfrom checkov.terraform.checks.resource.base_check import BaseResourceCheck\n\nPORT = 3389\n\n\nclass SecurityGroupUnrestrictedIngress3389(BaseResourceCheck):\n def __init__(self):\n name = \"Ensure no security groups allow ingress from 0.0.0.0:0 to port %d\" % PORT\n id = \"CKV_AWS_25\"\n supported_resources = ['aws_security_group']\n categories = [CheckCategories.NETWORKING]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def scan_resource_conf(self, conf):\n \"\"\"\n Looks for configuration at security group ingress rules :\n https://www.terraform.io/docs/providers/aws/r/security_group.html\n :param conf: aws_security_group configuration\n :return: <CheckResult>\n \"\"\"\n if 'ingress' in conf.keys():\n ingress_conf = conf['ingress']\n for rule in ingress_conf:\n if rule['from_port'] == [PORT] and rule['to_port'] == [PORT] and rule['cidr_blocks'] == [\n [\"0.0.0.0/0\"]] and 'self' not in rule.keys() and 'security_groups' not in rule.keys():\n return CheckResult.FAILED\n\n return CheckResult.PASSED\n\n\ncheck = SecurityGroupUnrestrictedIngress3389()\n", "path": "checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress3389.py"}, {"content": "from checkov.terraform.models.enums import CheckResult, CheckCategories\nfrom checkov.terraform.checks.resource.base_check import BaseResourceCheck\n\nPORT = 22\n\n\nclass SecurityGroupUnrestrictedIngress22(BaseResourceCheck):\n def __init__(self):\n name = \"Ensure no security groups allow ingress from 0.0.0.0:0 to port %d\" % PORT\n id = \"CKV_AWS_24\"\n supported_resources = ['aws_security_group']\n categories = [CheckCategories.NETWORKING]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def scan_resource_conf(self, conf):\n \"\"\"\n Looks for configuration at security group ingress rules :\n https://www.terraform.io/docs/providers/aws/r/security_group.html\n :param conf: aws_security_group configuration\n :return: <CheckResult>\n \"\"\"\n if 'ingress' in conf.keys():\n ingress_conf = conf['ingress']\n for rule in ingress_conf:\n if rule['from_port'] == [PORT] and rule['to_port'] == [PORT] and rule['cidr_blocks'] == [[\n \"0.0.0.0/0\"]] and 'self' not in rule.keys() and 'security_groups' not in rule.keys():\n return CheckResult.FAILED\n\n return CheckResult.PASSED\n\n\ncheck = SecurityGroupUnrestrictedIngress22()\n", "path": "checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress22.py"}], "after_files": [{"content": "from checkov.terraform.models.enums import CheckResult, CheckCategories\nfrom checkov.terraform.checks.resource.base_check import BaseResourceCheck\n\nPORT = 3389\n\n\nclass SecurityGroupUnrestrictedIngress3389(BaseResourceCheck):\n def __init__(self):\n name = \"Ensure no security groups allow ingress from 0.0.0.0:0 to port %d\" % PORT\n id = \"CKV_AWS_25\"\n supported_resources = ['aws_security_group']\n categories = [CheckCategories.NETWORKING]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def scan_resource_conf(self, conf):\n \"\"\"\n Looks for configuration at security group ingress rules :\n https://www.terraform.io/docs/providers/aws/r/security_group.html\n :param conf: aws_security_group configuration\n :return: <CheckResult>\n \"\"\"\n if 'ingress' in conf.keys():\n ingress_conf = conf['ingress']\n for rule in ingress_conf:\n if rule['from_port'] == [PORT] and rule['to_port'] == [PORT]:\n if 'cidr_blocks' in rule.keys():\n if rule['cidr_blocks'] == [[\"0.0.0.0/0\"]] and 'security_groups' not in rule.keys():\n return CheckResult.FAILED\n\n\n return CheckResult.PASSED\n\n\ncheck = SecurityGroupUnrestrictedIngress3389()\n", "path": "checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress3389.py"}, {"content": "from checkov.terraform.models.enums import CheckResult, CheckCategories\nfrom checkov.terraform.checks.resource.base_check import BaseResourceCheck\n\nPORT = 22\n\n\nclass SecurityGroupUnrestrictedIngress22(BaseResourceCheck):\n def __init__(self):\n name = \"Ensure no security groups allow ingress from 0.0.0.0:0 to port %d\" % PORT\n id = \"CKV_AWS_24\"\n supported_resources = ['aws_security_group']\n categories = [CheckCategories.NETWORKING]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def scan_resource_conf(self, conf):\n \"\"\"\n Looks for configuration at security group ingress rules :\n https://www.terraform.io/docs/providers/aws/r/security_group.html\n :param conf: aws_security_group configuration\n :return: <CheckResult>\n \"\"\"\n if 'ingress' in conf.keys():\n ingress_conf = conf['ingress']\n for rule in ingress_conf:\n if rule['from_port'] == [PORT] and rule['to_port'] == [PORT]:\n if 'cidr_blocks' in rule.keys():\n if rule['cidr_blocks'] == [[\"0.0.0.0/0\"]] and 'security_groups' not in rule.keys():\n return CheckResult.FAILED\n\n return CheckResult.PASSED\n\n\ncheck = SecurityGroupUnrestrictedIngress22()\n", "path": "checkov/terraform/checks/resource/aws/SecurityGroupUnrestrictedIngress22.py"}]}
1,582
565
gh_patches_debug_32754
rasdani/github-patches
git_diff
Mailu__Mailu-1349
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Support for SRS See https://en.wikipedia.org/wiki/Sender_Rewriting_Scheme and https://github.com/roehling/postsrsd --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `core/admin/mailu/internal/views/postfix.py` Content: ``` 1 from mailu import models 2 from mailu.internal import internal 3 4 import flask 5 import re 6 7 8 @internal.route("/postfix/domain/<domain_name>") 9 def postfix_mailbox_domain(domain_name): 10 if re.match("^\[.*\]$", domain_name): 11 return flask.abort(404) 12 domain = models.Domain.query.get(domain_name) or \ 13 models.Alternative.query.get(domain_name) or \ 14 flask.abort(404) 15 return flask.jsonify(domain.name) 16 17 18 @internal.route("/postfix/mailbox/<path:email>") 19 def postfix_mailbox_map(email): 20 user = models.User.query.get(email) or flask.abort(404) 21 return flask.jsonify(user.email) 22 23 24 @internal.route("/postfix/alias/<path:alias>") 25 def postfix_alias_map(alias): 26 localpart, domain_name = models.Email.resolve_domain(alias) 27 if localpart is None: 28 return flask.jsonify(domain_name) 29 destination = models.Email.resolve_destination(localpart, domain_name) 30 return flask.jsonify(",".join(destination)) if destination else flask.abort(404) 31 32 33 @internal.route("/postfix/transport/<path:email>") 34 def postfix_transport(email): 35 if email == '*' or re.match("(^|.*@)\[.*\]$", email): 36 return flask.abort(404) 37 localpart, domain_name = models.Email.resolve_domain(email) 38 relay = models.Relay.query.get(domain_name) or flask.abort(404) 39 return flask.jsonify("smtp:[{}]".format(relay.smtp)) 40 41 42 @internal.route("/postfix/sender/login/<path:sender>") 43 def postfix_sender_login(sender): 44 localpart, domain_name = models.Email.resolve_domain(sender) 45 if localpart is None: 46 return flask.abort(404) 47 destination = models.Email.resolve_destination(localpart, domain_name, True) 48 return flask.jsonify(",".join(destination)) if destination else flask.abort(404) 49 50 51 @internal.route("/postfix/sender/access/<path:sender>") 52 def postfix_sender_access(sender): 53 """ Simply reject any sender that pretends to be from a local domain 54 """ 55 if not is_void_address(sender): 56 localpart, domain_name = models.Email.resolve_domain(sender) 57 return flask.jsonify("REJECT") if models.Domain.query.get(domain_name) else flask.abort(404) 58 else: 59 return flask.abort(404) 60 61 62 def is_void_address(email): 63 '''True if the email is void (null) email address. 64 ''' 65 if email.startswith('<') and email.endswith('>'): 66 email = email[1:-1] 67 # Some MTAs use things like '<MAILER-DAEMON>' instead of '<>'; so let's 68 # consider void any such thing. 69 return '@' not in email 70 ``` Path: `core/postfix/start.py` Content: ``` 1 #!/usr/bin/python3 2 3 import os 4 import glob 5 import shutil 6 import multiprocessing 7 import logging as log 8 import sys 9 10 from podop import run_server 11 from socrate import system, conf 12 13 log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "WARNING")) 14 15 def start_podop(): 16 os.setuid(100) 17 url = "http://" + os.environ["ADMIN_ADDRESS"] + "/internal/postfix/" 18 # TODO: Remove verbosity setting from Podop? 19 run_server(0, "postfix", "/tmp/podop.socket", [ 20 ("transport", "url", url + "transport/§"), 21 ("alias", "url", url + "alias/§"), 22 ("domain", "url", url + "domain/§"), 23 ("mailbox", "url", url + "mailbox/§"), 24 ("senderaccess", "url", url + "sender/access/§"), 25 ("senderlogin", "url", url + "sender/login/§") 26 ]) 27 28 # Actual startup script 29 os.environ["FRONT_ADDRESS"] = system.get_host_address_from_environment("FRONT", "front") 30 os.environ["ADMIN_ADDRESS"] = system.get_host_address_from_environment("ADMIN", "admin") 31 os.environ["ANTISPAM_MILTER_ADDRESS"] = system.get_host_address_from_environment("ANTISPAM_MILTER", "antispam:11332") 32 os.environ["LMTP_ADDRESS"] = system.get_host_address_from_environment("LMTP", "imap:2525") 33 34 for postfix_file in glob.glob("/conf/*.cf"): 35 conf.jinja(postfix_file, os.environ, os.path.join("/etc/postfix", os.path.basename(postfix_file))) 36 37 if os.path.exists("/overrides/postfix.cf"): 38 for line in open("/overrides/postfix.cf").read().strip().split("\n"): 39 os.system('postconf -e "{}"'.format(line)) 40 41 if os.path.exists("/overrides/postfix.master"): 42 for line in open("/overrides/postfix.master").read().strip().split("\n"): 43 os.system('postconf -Me "{}"'.format(line)) 44 45 for map_file in glob.glob("/overrides/*.map"): 46 destination = os.path.join("/etc/postfix", os.path.basename(map_file)) 47 shutil.copyfile(map_file, destination) 48 os.system("postmap {}".format(destination)) 49 os.remove(destination) 50 51 if "RELAYUSER" in os.environ: 52 path = "/etc/postfix/sasl_passwd" 53 conf.jinja("/conf/sasl_passwd", os.environ, path) 54 os.system("postmap {}".format(path)) 55 56 # Run Podop and Postfix 57 multiprocessing.Process(target=start_podop).start() 58 os.system("/usr/libexec/postfix/post-install meta_directory=/etc/postfix create-missing") 59 os.system("postfix start-fg") 60 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/core/admin/mailu/internal/views/postfix.py b/core/admin/mailu/internal/views/postfix.py --- a/core/admin/mailu/internal/views/postfix.py +++ b/core/admin/mailu/internal/views/postfix.py @@ -3,6 +3,7 @@ import flask import re +import srslib @internal.route("/postfix/domain/<domain_name>") @@ -39,6 +40,38 @@ return flask.jsonify("smtp:[{}]".format(relay.smtp)) [email protected]("/postfix/recipient/map/<path:recipient>") +def postfix_recipient_map(recipient): + """ Rewrite the envelope recipient if it is a valid SRS address. + + This is meant for bounces to go back to the original sender. + """ + srs = srslib.SRS(flask.current_app.config["SECRET_KEY"]) + if srslib.SRS.is_srs_address(recipient): + try: + return flask.jsonify(srs.reverse(recipient)) + except srslib.Error as error: + return flask.abort(404) + return flask.abort(404) + + [email protected]("/postfix/sender/map/<path:sender>") +def postfix_sender_map(sender): + """ Rewrite the envelope sender in case the mail was not emitted by us. + + This is for bounces to come back the reverse path properly. + """ + srs = srslib.SRS(flask.current_app.config["SECRET_KEY"]) + domain = flask.current_app.config["DOMAIN"] + try: + localpart, domain_name = models.Email.resolve_domain(sender) + except Exception as error: + return flask.abort(404) + if models.Domain.query.get(domain_name): + return flask.abort(404) + return flask.jsonify(srs.forward(sender, domain)) + + @internal.route("/postfix/sender/login/<path:sender>") def postfix_sender_login(sender): localpart, domain_name = models.Email.resolve_domain(sender) diff --git a/core/postfix/start.py b/core/postfix/start.py --- a/core/postfix/start.py +++ b/core/postfix/start.py @@ -21,6 +21,8 @@ ("alias", "url", url + "alias/§"), ("domain", "url", url + "domain/§"), ("mailbox", "url", url + "mailbox/§"), + ("recipientmap", "url", url + "recipient/map/§"), + ("sendermap", "url", url + "sender/map/§"), ("senderaccess", "url", url + "sender/access/§"), ("senderlogin", "url", url + "sender/login/§") ])
{"golden_diff": "diff --git a/core/admin/mailu/internal/views/postfix.py b/core/admin/mailu/internal/views/postfix.py\n--- a/core/admin/mailu/internal/views/postfix.py\n+++ b/core/admin/mailu/internal/views/postfix.py\n@@ -3,6 +3,7 @@\n \n import flask\n import re\n+import srslib\n \n \n @internal.route(\"/postfix/domain/<domain_name>\")\n@@ -39,6 +40,38 @@\n return flask.jsonify(\"smtp:[{}]\".format(relay.smtp))\n \n \[email protected](\"/postfix/recipient/map/<path:recipient>\")\n+def postfix_recipient_map(recipient):\n+ \"\"\" Rewrite the envelope recipient if it is a valid SRS address.\n+\n+ This is meant for bounces to go back to the original sender.\n+ \"\"\"\n+ srs = srslib.SRS(flask.current_app.config[\"SECRET_KEY\"])\n+ if srslib.SRS.is_srs_address(recipient):\n+ try:\n+ return flask.jsonify(srs.reverse(recipient))\n+ except srslib.Error as error:\n+ return flask.abort(404)\n+ return flask.abort(404)\n+\n+\[email protected](\"/postfix/sender/map/<path:sender>\")\n+def postfix_sender_map(sender):\n+ \"\"\" Rewrite the envelope sender in case the mail was not emitted by us.\n+\n+ This is for bounces to come back the reverse path properly.\n+ \"\"\"\n+ srs = srslib.SRS(flask.current_app.config[\"SECRET_KEY\"])\n+ domain = flask.current_app.config[\"DOMAIN\"]\n+ try:\n+ localpart, domain_name = models.Email.resolve_domain(sender)\n+ except Exception as error:\n+ return flask.abort(404)\n+ if models.Domain.query.get(domain_name):\n+ return flask.abort(404)\n+ return flask.jsonify(srs.forward(sender, domain))\n+\n+\n @internal.route(\"/postfix/sender/login/<path:sender>\")\n def postfix_sender_login(sender):\n localpart, domain_name = models.Email.resolve_domain(sender)\ndiff --git a/core/postfix/start.py b/core/postfix/start.py\n--- a/core/postfix/start.py\n+++ b/core/postfix/start.py\n@@ -21,6 +21,8 @@\n \t\t(\"alias\", \"url\", url + \"alias/\u00a7\"),\n \t\t(\"domain\", \"url\", url + \"domain/\u00a7\"),\n (\"mailbox\", \"url\", url + \"mailbox/\u00a7\"),\n+ (\"recipientmap\", \"url\", url + \"recipient/map/\u00a7\"),\n+ (\"sendermap\", \"url\", url + \"sender/map/\u00a7\"),\n (\"senderaccess\", \"url\", url + \"sender/access/\u00a7\"),\n (\"senderlogin\", \"url\", url + \"sender/login/\u00a7\")\n ])\n", "issue": "Support for SRS\nSee https://en.wikipedia.org/wiki/Sender_Rewriting_Scheme and https://github.com/roehling/postsrsd\n", "before_files": [{"content": "from mailu import models\nfrom mailu.internal import internal\n\nimport flask\nimport re\n\n\[email protected](\"/postfix/domain/<domain_name>\")\ndef postfix_mailbox_domain(domain_name):\n if re.match(\"^\\[.*\\]$\", domain_name):\n return flask.abort(404)\n domain = models.Domain.query.get(domain_name) or \\\n models.Alternative.query.get(domain_name) or \\\n flask.abort(404)\n return flask.jsonify(domain.name)\n\n\[email protected](\"/postfix/mailbox/<path:email>\")\ndef postfix_mailbox_map(email):\n user = models.User.query.get(email) or flask.abort(404)\n return flask.jsonify(user.email)\n\n\[email protected](\"/postfix/alias/<path:alias>\")\ndef postfix_alias_map(alias):\n localpart, domain_name = models.Email.resolve_domain(alias)\n if localpart is None:\n return flask.jsonify(domain_name)\n destination = models.Email.resolve_destination(localpart, domain_name)\n return flask.jsonify(\",\".join(destination)) if destination else flask.abort(404)\n\n\[email protected](\"/postfix/transport/<path:email>\")\ndef postfix_transport(email):\n if email == '*' or re.match(\"(^|.*@)\\[.*\\]$\", email):\n return flask.abort(404)\n localpart, domain_name = models.Email.resolve_domain(email)\n relay = models.Relay.query.get(domain_name) or flask.abort(404)\n return flask.jsonify(\"smtp:[{}]\".format(relay.smtp))\n\n\[email protected](\"/postfix/sender/login/<path:sender>\")\ndef postfix_sender_login(sender):\n localpart, domain_name = models.Email.resolve_domain(sender)\n if localpart is None:\n return flask.abort(404)\n destination = models.Email.resolve_destination(localpart, domain_name, True)\n return flask.jsonify(\",\".join(destination)) if destination else flask.abort(404)\n\n\[email protected](\"/postfix/sender/access/<path:sender>\")\ndef postfix_sender_access(sender):\n \"\"\" Simply reject any sender that pretends to be from a local domain\n \"\"\"\n if not is_void_address(sender):\n localpart, domain_name = models.Email.resolve_domain(sender)\n return flask.jsonify(\"REJECT\") if models.Domain.query.get(domain_name) else flask.abort(404)\n else:\n return flask.abort(404)\n\n\ndef is_void_address(email):\n '''True if the email is void (null) email address.\n '''\n if email.startswith('<') and email.endswith('>'):\n email = email[1:-1]\n # Some MTAs use things like '<MAILER-DAEMON>' instead of '<>'; so let's\n # consider void any such thing.\n return '@' not in email\n", "path": "core/admin/mailu/internal/views/postfix.py"}, {"content": "#!/usr/bin/python3\n\nimport os\nimport glob\nimport shutil\nimport multiprocessing\nimport logging as log\nimport sys\n\nfrom podop import run_server\nfrom socrate import system, conf\n\nlog.basicConfig(stream=sys.stderr, level=os.environ.get(\"LOG_LEVEL\", \"WARNING\"))\n\ndef start_podop():\n os.setuid(100)\n url = \"http://\" + os.environ[\"ADMIN_ADDRESS\"] + \"/internal/postfix/\"\n # TODO: Remove verbosity setting from Podop?\n run_server(0, \"postfix\", \"/tmp/podop.socket\", [\n\t\t(\"transport\", \"url\", url + \"transport/\u00a7\"),\n\t\t(\"alias\", \"url\", url + \"alias/\u00a7\"),\n\t\t(\"domain\", \"url\", url + \"domain/\u00a7\"),\n (\"mailbox\", \"url\", url + \"mailbox/\u00a7\"),\n (\"senderaccess\", \"url\", url + \"sender/access/\u00a7\"),\n (\"senderlogin\", \"url\", url + \"sender/login/\u00a7\")\n ])\n\n# Actual startup script\nos.environ[\"FRONT_ADDRESS\"] = system.get_host_address_from_environment(\"FRONT\", \"front\")\nos.environ[\"ADMIN_ADDRESS\"] = system.get_host_address_from_environment(\"ADMIN\", \"admin\")\nos.environ[\"ANTISPAM_MILTER_ADDRESS\"] = system.get_host_address_from_environment(\"ANTISPAM_MILTER\", \"antispam:11332\")\nos.environ[\"LMTP_ADDRESS\"] = system.get_host_address_from_environment(\"LMTP\", \"imap:2525\")\n\nfor postfix_file in glob.glob(\"/conf/*.cf\"):\n conf.jinja(postfix_file, os.environ, os.path.join(\"/etc/postfix\", os.path.basename(postfix_file)))\n\nif os.path.exists(\"/overrides/postfix.cf\"):\n for line in open(\"/overrides/postfix.cf\").read().strip().split(\"\\n\"):\n os.system('postconf -e \"{}\"'.format(line))\n\nif os.path.exists(\"/overrides/postfix.master\"):\n for line in open(\"/overrides/postfix.master\").read().strip().split(\"\\n\"):\n os.system('postconf -Me \"{}\"'.format(line))\n\nfor map_file in glob.glob(\"/overrides/*.map\"):\n destination = os.path.join(\"/etc/postfix\", os.path.basename(map_file))\n shutil.copyfile(map_file, destination)\n os.system(\"postmap {}\".format(destination))\n os.remove(destination)\n\nif \"RELAYUSER\" in os.environ:\n path = \"/etc/postfix/sasl_passwd\"\n conf.jinja(\"/conf/sasl_passwd\", os.environ, path)\n os.system(\"postmap {}\".format(path))\n\n# Run Podop and Postfix\nmultiprocessing.Process(target=start_podop).start()\nos.system(\"/usr/libexec/postfix/post-install meta_directory=/etc/postfix create-missing\")\nos.system(\"postfix start-fg\")\n", "path": "core/postfix/start.py"}], "after_files": [{"content": "from mailu import models\nfrom mailu.internal import internal\n\nimport flask\nimport re\nimport srslib\n\n\[email protected](\"/postfix/domain/<domain_name>\")\ndef postfix_mailbox_domain(domain_name):\n if re.match(\"^\\[.*\\]$\", domain_name):\n return flask.abort(404)\n domain = models.Domain.query.get(domain_name) or \\\n models.Alternative.query.get(domain_name) or \\\n flask.abort(404)\n return flask.jsonify(domain.name)\n\n\[email protected](\"/postfix/mailbox/<path:email>\")\ndef postfix_mailbox_map(email):\n user = models.User.query.get(email) or flask.abort(404)\n return flask.jsonify(user.email)\n\n\[email protected](\"/postfix/alias/<path:alias>\")\ndef postfix_alias_map(alias):\n localpart, domain_name = models.Email.resolve_domain(alias)\n if localpart is None:\n return flask.jsonify(domain_name)\n destination = models.Email.resolve_destination(localpart, domain_name)\n return flask.jsonify(\",\".join(destination)) if destination else flask.abort(404)\n\n\[email protected](\"/postfix/transport/<path:email>\")\ndef postfix_transport(email):\n if email == '*' or re.match(\"(^|.*@)\\[.*\\]$\", email):\n return flask.abort(404)\n localpart, domain_name = models.Email.resolve_domain(email)\n relay = models.Relay.query.get(domain_name) or flask.abort(404)\n return flask.jsonify(\"smtp:[{}]\".format(relay.smtp))\n\n\[email protected](\"/postfix/recipient/map/<path:recipient>\")\ndef postfix_recipient_map(recipient):\n \"\"\" Rewrite the envelope recipient if it is a valid SRS address.\n\n This is meant for bounces to go back to the original sender.\n \"\"\"\n srs = srslib.SRS(flask.current_app.config[\"SECRET_KEY\"])\n if srslib.SRS.is_srs_address(recipient):\n try:\n return flask.jsonify(srs.reverse(recipient))\n except srslib.Error as error:\n return flask.abort(404)\n return flask.abort(404)\n\n\[email protected](\"/postfix/sender/map/<path:sender>\")\ndef postfix_sender_map(sender):\n \"\"\" Rewrite the envelope sender in case the mail was not emitted by us.\n\n This is for bounces to come back the reverse path properly.\n \"\"\"\n srs = srslib.SRS(flask.current_app.config[\"SECRET_KEY\"])\n domain = flask.current_app.config[\"DOMAIN\"]\n try:\n localpart, domain_name = models.Email.resolve_domain(sender)\n except Exception as error:\n return flask.abort(404)\n if models.Domain.query.get(domain_name):\n return flask.abort(404)\n return flask.jsonify(srs.forward(sender, domain))\n\n\[email protected](\"/postfix/sender/login/<path:sender>\")\ndef postfix_sender_login(sender):\n localpart, domain_name = models.Email.resolve_domain(sender)\n if localpart is None:\n return flask.abort(404)\n destination = models.Email.resolve_destination(localpart, domain_name, True)\n return flask.jsonify(\",\".join(destination)) if destination else flask.abort(404)\n\n\[email protected](\"/postfix/sender/access/<path:sender>\")\ndef postfix_sender_access(sender):\n \"\"\" Simply reject any sender that pretends to be from a local domain\n \"\"\"\n if not is_void_address(sender):\n localpart, domain_name = models.Email.resolve_domain(sender)\n return flask.jsonify(\"REJECT\") if models.Domain.query.get(domain_name) else flask.abort(404)\n else:\n return flask.abort(404)\n\n\ndef is_void_address(email):\n '''True if the email is void (null) email address.\n '''\n if email.startswith('<') and email.endswith('>'):\n email = email[1:-1]\n # Some MTAs use things like '<MAILER-DAEMON>' instead of '<>'; so let's\n # consider void any such thing.\n return '@' not in email\n", "path": "core/admin/mailu/internal/views/postfix.py"}, {"content": "#!/usr/bin/python3\n\nimport os\nimport glob\nimport shutil\nimport multiprocessing\nimport logging as log\nimport sys\n\nfrom podop import run_server\nfrom socrate import system, conf\n\nlog.basicConfig(stream=sys.stderr, level=os.environ.get(\"LOG_LEVEL\", \"WARNING\"))\n\ndef start_podop():\n os.setuid(100)\n url = \"http://\" + os.environ[\"ADMIN_ADDRESS\"] + \"/internal/postfix/\"\n # TODO: Remove verbosity setting from Podop?\n run_server(0, \"postfix\", \"/tmp/podop.socket\", [\n\t\t(\"transport\", \"url\", url + \"transport/\u00a7\"),\n\t\t(\"alias\", \"url\", url + \"alias/\u00a7\"),\n\t\t(\"domain\", \"url\", url + \"domain/\u00a7\"),\n (\"mailbox\", \"url\", url + \"mailbox/\u00a7\"),\n (\"recipientmap\", \"url\", url + \"recipient/map/\u00a7\"),\n (\"sendermap\", \"url\", url + \"sender/map/\u00a7\"),\n (\"senderaccess\", \"url\", url + \"sender/access/\u00a7\"),\n (\"senderlogin\", \"url\", url + \"sender/login/\u00a7\")\n ])\n\n# Actual startup script\nos.environ[\"FRONT_ADDRESS\"] = system.get_host_address_from_environment(\"FRONT\", \"front\")\nos.environ[\"ADMIN_ADDRESS\"] = system.get_host_address_from_environment(\"ADMIN\", \"admin\")\nos.environ[\"ANTISPAM_MILTER_ADDRESS\"] = system.get_host_address_from_environment(\"ANTISPAM_MILTER\", \"antispam:11332\")\nos.environ[\"LMTP_ADDRESS\"] = system.get_host_address_from_environment(\"LMTP\", \"imap:2525\")\n\nfor postfix_file in glob.glob(\"/conf/*.cf\"):\n conf.jinja(postfix_file, os.environ, os.path.join(\"/etc/postfix\", os.path.basename(postfix_file)))\n\nif os.path.exists(\"/overrides/postfix.cf\"):\n for line in open(\"/overrides/postfix.cf\").read().strip().split(\"\\n\"):\n os.system('postconf -e \"{}\"'.format(line))\n\nif os.path.exists(\"/overrides/postfix.master\"):\n for line in open(\"/overrides/postfix.master\").read().strip().split(\"\\n\"):\n os.system('postconf -Me \"{}\"'.format(line))\n\nfor map_file in glob.glob(\"/overrides/*.map\"):\n destination = os.path.join(\"/etc/postfix\", os.path.basename(map_file))\n shutil.copyfile(map_file, destination)\n os.system(\"postmap {}\".format(destination))\n os.remove(destination)\n\nif \"RELAYUSER\" in os.environ:\n path = \"/etc/postfix/sasl_passwd\"\n conf.jinja(\"/conf/sasl_passwd\", os.environ, path)\n os.system(\"postmap {}\".format(path))\n\n# Run Podop and Postfix\nmultiprocessing.Process(target=start_podop).start()\nos.system(\"/usr/libexec/postfix/post-install meta_directory=/etc/postfix create-missing\")\nos.system(\"postfix start-fg\")\n", "path": "core/postfix/start.py"}]}
1,750
601
gh_patches_debug_32359
rasdani/github-patches
git_diff
saulpw__visidata-2257
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Fixed width saver and loader don't round trip. (Columns expand with increasing number of spaces) **Small description** Open `test.csv`: ``` csv colours,counts red,3 green,5 blue,8 ``` Then save it as `test.fixed`: ``` colours counts red 3 green 5 blue 8 ``` Each column is separated by three spaces. Now, open the newly saved `test.fixed`, and save it as `test.fixed.csv`. Upon inspection, you will see that the three spaces have been included as column data, rather than being discarded as a separator: ``` colours,counts red ,3 green ,5 blue ,8 ``` If you repeat this process, three spaces get appended to each column every time you repeat a round trip. **Expected result** I expect to be able to round trip from CSV to Fixed and back without extra spaces being added to the data. **Steps to reproduce with sample data and a .vd** [test-fixed-saver.zip](https://github.com/saulpw/visidata/files/13938788/test-fixed-saver.zip) **Additional context** - saul.pw/VisiData v3.0.1 - Python 3.10.12 --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `visidata/loaders/fixed_width.py` Content: ``` 1 2 from visidata import VisiData, vd, Sheet, Column, Progress, SequenceSheet 3 4 5 vd.option('fixed_rows', 1000, 'number of rows to check for fixed width columns') 6 vd.option('fixed_maxcols', 0, 'max number of fixed-width columns to create (0 is no max)') 7 8 @VisiData.api 9 def open_fixed(vd, p): 10 return FixedWidthColumnsSheet(p.base_stem, source=p, headerlines=[]) 11 12 class FixedWidthColumn(Column): 13 def __init__(self, name, i, j, **kwargs): 14 super().__init__(name, **kwargs) 15 self.i, self.j = i, j 16 17 def calcValue(self, row): 18 return row[0][self.i:self.j] 19 20 def putValue(self, row, value): 21 value = str(value)[:self.j-self.i] 22 j = self.j or len(row) 23 row[0] = row[0][:self.i] + '%-*s' % (j-self.i, value) + row[0][self.j:] 24 25 def columnize(rows): 26 'Generate (i,j) indexes for fixed-width columns found in rows' 27 28 ## find all character columns that are not spaces ever 29 allNonspaces = set() 30 for r in rows: 31 for i, ch in enumerate(r): 32 if not ch.isspace(): 33 allNonspaces.add(i) 34 35 colstart = 0 36 prev = 0 37 38 # collapse fields 39 for i in allNonspaces: 40 if i > prev+1: 41 yield colstart, i 42 colstart = i 43 prev = i 44 45 yield colstart, prev+1 # final column gets rest of line 46 47 48 class FixedWidthColumnsSheet(SequenceSheet): 49 rowtype = 'lines' # rowdef: [line] (wrapping in list makes it unique and modifiable) 50 def addRow(self, row, index=None): 51 Sheet.addRow(self, row, index=index) 52 53 def iterload(self): 54 itsource = iter(self.source) 55 56 # compute fixed width columns from first fixed_rows lines 57 maxcols = self.options.fixed_maxcols 58 self.columns = [] 59 fixedRows = list([x] for x in self.optlines(itsource, 'fixed_rows')) 60 for i, j in columnize(list(r[0] for r in fixedRows)): 61 if maxcols and self.nCols >= maxcols-1: 62 self.addColumn(FixedWidthColumn('', i, None)) 63 break 64 else: 65 self.addColumn(FixedWidthColumn('', i, j)) 66 67 yield from fixedRows 68 69 self.setColNames(self.headerlines) 70 71 yield from ([line] for line in itsource) 72 73 def setCols(self, headerlines): 74 self.headerlines = headerlines 75 76 77 @VisiData.api 78 def save_fixed(vd, p, *vsheets): 79 with p.open(mode='w', encoding=vsheets[0].options.save_encoding) as fp: 80 for sheet in vsheets: 81 if len(vsheets) > 1: 82 fp.write('%s\n\n' % sheet.name) 83 84 widths = {} # Column -> width:int 85 # headers 86 for col in Progress(sheet.visibleCols, gerund='sizing'): 87 widths[col] = col.getMaxWidth(sheet.rows) #1849 88 fp.write(('{0:%s} ' % widths[col]).format(col.name)) 89 fp.write('\n') 90 91 # rows 92 with Progress(gerund='saving'): 93 for dispvals in sheet.iterdispvals(format=True): 94 for col, val in dispvals.items(): 95 fp.write(('{0:%s%s.%s} ' % ('>' if vd.isNumeric(col) else '<', widths[col], widths[col])).format(val)) 96 fp.write('\n') 97 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/visidata/loaders/fixed_width.py b/visidata/loaders/fixed_width.py --- a/visidata/loaders/fixed_width.py +++ b/visidata/loaders/fixed_width.py @@ -1,5 +1,5 @@ -from visidata import VisiData, vd, Sheet, Column, Progress, SequenceSheet +from visidata import VisiData, vd, Sheet, Column, Progress, SequenceSheet, Column, dispwidth vd.option('fixed_rows', 1000, 'number of rows to check for fixed width columns') @@ -9,6 +9,22 @@ def open_fixed(vd, p): return FixedWidthColumnsSheet(p.base_stem, source=p, headerlines=[]) [email protected] +def getMaxDataWidth(col, rows): #2255 need real max width for fixed width saver + '''Return the maximum length of any cell in column or its header, + even if wider than window. (Slow for large cells!)''' + + w = 0 + nlen = dispwidth(col.name) + if len(rows) > 0: + w_max = 0 + for r in rows: + row_w = dispwidth(col.getDisplayValue(r)) + if w_max < row_w: + w_max = row_w + w = w_max + return max(w, nlen) + class FixedWidthColumn(Column): def __init__(self, name, i, j, **kwargs): super().__init__(name, **kwargs) @@ -38,7 +54,7 @@ # collapse fields for i in allNonspaces: if i > prev+1: - yield colstart, i + yield colstart, prev+1 #2255 colstart = i prev = i @@ -84,7 +100,7 @@ widths = {} # Column -> width:int # headers for col in Progress(sheet.visibleCols, gerund='sizing'): - widths[col] = col.getMaxWidth(sheet.rows) #1849 + widths[col] = col.getMaxDataWidth(sheet.rows) #1849 #2255 fp.write(('{0:%s} ' % widths[col]).format(col.name)) fp.write('\n')
{"golden_diff": "diff --git a/visidata/loaders/fixed_width.py b/visidata/loaders/fixed_width.py\n--- a/visidata/loaders/fixed_width.py\n+++ b/visidata/loaders/fixed_width.py\n@@ -1,5 +1,5 @@\n \n-from visidata import VisiData, vd, Sheet, Column, Progress, SequenceSheet\n+from visidata import VisiData, vd, Sheet, Column, Progress, SequenceSheet, Column, dispwidth\n \n \n vd.option('fixed_rows', 1000, 'number of rows to check for fixed width columns')\n@@ -9,6 +9,22 @@\n def open_fixed(vd, p):\n return FixedWidthColumnsSheet(p.base_stem, source=p, headerlines=[])\n \[email protected]\n+def getMaxDataWidth(col, rows): #2255 need real max width for fixed width saver\n+ '''Return the maximum length of any cell in column or its header,\n+ even if wider than window. (Slow for large cells!)'''\n+\n+ w = 0\n+ nlen = dispwidth(col.name)\n+ if len(rows) > 0:\n+ w_max = 0\n+ for r in rows:\n+ row_w = dispwidth(col.getDisplayValue(r))\n+ if w_max < row_w:\n+ w_max = row_w\n+ w = w_max\n+ return max(w, nlen)\n+\n class FixedWidthColumn(Column):\n def __init__(self, name, i, j, **kwargs):\n super().__init__(name, **kwargs)\n@@ -38,7 +54,7 @@\n # collapse fields\n for i in allNonspaces:\n if i > prev+1:\n- yield colstart, i\n+ yield colstart, prev+1 #2255\n colstart = i\n prev = i\n \n@@ -84,7 +100,7 @@\n widths = {} # Column -> width:int\n # headers\n for col in Progress(sheet.visibleCols, gerund='sizing'):\n- widths[col] = col.getMaxWidth(sheet.rows) #1849\n+ widths[col] = col.getMaxDataWidth(sheet.rows) #1849 #2255\n fp.write(('{0:%s} ' % widths[col]).format(col.name))\n fp.write('\\n')\n", "issue": "Fixed width saver and loader don't round trip. (Columns expand with increasing number of spaces)\n**Small description**\r\n\r\nOpen `test.csv`:\r\n\r\n``` csv\r\ncolours,counts\r\nred,3\r\ngreen,5\r\nblue,8\r\n```\r\n\r\nThen save it as `test.fixed`:\r\n\r\n```\r\ncolours counts \r\nred 3 \r\ngreen 5 \r\nblue 8 \r\n```\r\n\r\nEach column is separated by three spaces.\r\n\r\nNow, open the newly saved `test.fixed`, and save it as `test.fixed.csv`.\r\nUpon inspection, you will see that the three spaces have been included as column data, rather than being discarded as a separator:\r\n\r\n```\r\ncolours,counts\r\nred ,3 \r\ngreen ,5 \r\nblue ,8 \r\n```\r\n\r\nIf you repeat this process, three spaces get appended to each column every time you repeat a round trip.\r\n\r\n**Expected result**\r\n\r\nI expect to be able to round trip from CSV to Fixed and back without extra spaces being added to the data.\r\n\r\n\r\n**Steps to reproduce with sample data and a .vd**\r\n\r\n[test-fixed-saver.zip](https://github.com/saulpw/visidata/files/13938788/test-fixed-saver.zip)\r\n\r\n**Additional context**\r\n\r\n- saul.pw/VisiData v3.0.1\r\n- Python 3.10.12\r\n\n", "before_files": [{"content": "\nfrom visidata import VisiData, vd, Sheet, Column, Progress, SequenceSheet\n\n\nvd.option('fixed_rows', 1000, 'number of rows to check for fixed width columns')\nvd.option('fixed_maxcols', 0, 'max number of fixed-width columns to create (0 is no max)')\n\[email protected]\ndef open_fixed(vd, p):\n return FixedWidthColumnsSheet(p.base_stem, source=p, headerlines=[])\n\nclass FixedWidthColumn(Column):\n def __init__(self, name, i, j, **kwargs):\n super().__init__(name, **kwargs)\n self.i, self.j = i, j\n\n def calcValue(self, row):\n return row[0][self.i:self.j]\n\n def putValue(self, row, value):\n value = str(value)[:self.j-self.i]\n j = self.j or len(row)\n row[0] = row[0][:self.i] + '%-*s' % (j-self.i, value) + row[0][self.j:]\n\ndef columnize(rows):\n 'Generate (i,j) indexes for fixed-width columns found in rows'\n\n ## find all character columns that are not spaces ever\n allNonspaces = set()\n for r in rows:\n for i, ch in enumerate(r):\n if not ch.isspace():\n allNonspaces.add(i)\n\n colstart = 0\n prev = 0\n\n # collapse fields\n for i in allNonspaces:\n if i > prev+1:\n yield colstart, i\n colstart = i\n prev = i\n\n yield colstart, prev+1 # final column gets rest of line\n\n\nclass FixedWidthColumnsSheet(SequenceSheet):\n rowtype = 'lines' # rowdef: [line] (wrapping in list makes it unique and modifiable)\n def addRow(self, row, index=None):\n Sheet.addRow(self, row, index=index)\n\n def iterload(self):\n itsource = iter(self.source)\n\n # compute fixed width columns from first fixed_rows lines\n maxcols = self.options.fixed_maxcols\n self.columns = []\n fixedRows = list([x] for x in self.optlines(itsource, 'fixed_rows'))\n for i, j in columnize(list(r[0] for r in fixedRows)):\n if maxcols and self.nCols >= maxcols-1:\n self.addColumn(FixedWidthColumn('', i, None))\n break\n else:\n self.addColumn(FixedWidthColumn('', i, j))\n\n yield from fixedRows\n\n self.setColNames(self.headerlines)\n\n yield from ([line] for line in itsource)\n\n def setCols(self, headerlines):\n self.headerlines = headerlines\n\n\[email protected]\ndef save_fixed(vd, p, *vsheets):\n with p.open(mode='w', encoding=vsheets[0].options.save_encoding) as fp:\n for sheet in vsheets:\n if len(vsheets) > 1:\n fp.write('%s\\n\\n' % sheet.name)\n\n widths = {} # Column -> width:int\n # headers\n for col in Progress(sheet.visibleCols, gerund='sizing'):\n widths[col] = col.getMaxWidth(sheet.rows) #1849\n fp.write(('{0:%s} ' % widths[col]).format(col.name))\n fp.write('\\n')\n\n # rows\n with Progress(gerund='saving'):\n for dispvals in sheet.iterdispvals(format=True):\n for col, val in dispvals.items():\n fp.write(('{0:%s%s.%s} ' % ('>' if vd.isNumeric(col) else '<', widths[col], widths[col])).format(val))\n fp.write('\\n')\n", "path": "visidata/loaders/fixed_width.py"}], "after_files": [{"content": "\nfrom visidata import VisiData, vd, Sheet, Column, Progress, SequenceSheet, Column, dispwidth\n\n\nvd.option('fixed_rows', 1000, 'number of rows to check for fixed width columns')\nvd.option('fixed_maxcols', 0, 'max number of fixed-width columns to create (0 is no max)')\n\[email protected]\ndef open_fixed(vd, p):\n return FixedWidthColumnsSheet(p.base_stem, source=p, headerlines=[])\n\[email protected]\ndef getMaxDataWidth(col, rows): #2255 need real max width for fixed width saver\n '''Return the maximum length of any cell in column or its header,\n even if wider than window. (Slow for large cells!)'''\n\n w = 0\n nlen = dispwidth(col.name)\n if len(rows) > 0:\n w_max = 0\n for r in rows:\n row_w = dispwidth(col.getDisplayValue(r))\n if w_max < row_w:\n w_max = row_w\n w = w_max\n return max(w, nlen)\n\nclass FixedWidthColumn(Column):\n def __init__(self, name, i, j, **kwargs):\n super().__init__(name, **kwargs)\n self.i, self.j = i, j\n\n def calcValue(self, row):\n return row[0][self.i:self.j]\n\n def putValue(self, row, value):\n value = str(value)[:self.j-self.i]\n j = self.j or len(row)\n row[0] = row[0][:self.i] + '%-*s' % (j-self.i, value) + row[0][self.j:]\n\ndef columnize(rows):\n 'Generate (i,j) indexes for fixed-width columns found in rows'\n\n ## find all character columns that are not spaces ever\n allNonspaces = set()\n for r in rows:\n for i, ch in enumerate(r):\n if not ch.isspace():\n allNonspaces.add(i)\n\n colstart = 0\n prev = 0\n\n # collapse fields\n for i in allNonspaces:\n if i > prev+1:\n yield colstart, prev+1 #2255\n colstart = i\n prev = i\n\n yield colstart, prev+1 # final column gets rest of line\n\n\nclass FixedWidthColumnsSheet(SequenceSheet):\n rowtype = 'lines' # rowdef: [line] (wrapping in list makes it unique and modifiable)\n def addRow(self, row, index=None):\n Sheet.addRow(self, row, index=index)\n\n def iterload(self):\n itsource = iter(self.source)\n\n # compute fixed width columns from first fixed_rows lines\n maxcols = self.options.fixed_maxcols\n self.columns = []\n fixedRows = list([x] for x in self.optlines(itsource, 'fixed_rows'))\n for i, j in columnize(list(r[0] for r in fixedRows)):\n if maxcols and self.nCols >= maxcols-1:\n self.addColumn(FixedWidthColumn('', i, None))\n break\n else:\n self.addColumn(FixedWidthColumn('', i, j))\n\n yield from fixedRows\n\n self.setColNames(self.headerlines)\n\n yield from ([line] for line in itsource)\n\n def setCols(self, headerlines):\n self.headerlines = headerlines\n\n\[email protected]\ndef save_fixed(vd, p, *vsheets):\n with p.open(mode='w', encoding=vsheets[0].options.save_encoding) as fp:\n for sheet in vsheets:\n if len(vsheets) > 1:\n fp.write('%s\\n\\n' % sheet.name)\n\n widths = {} # Column -> width:int\n # headers\n for col in Progress(sheet.visibleCols, gerund='sizing'):\n widths[col] = col.getMaxDataWidth(sheet.rows) #1849 #2255\n fp.write(('{0:%s} ' % widths[col]).format(col.name))\n fp.write('\\n')\n\n # rows\n with Progress(gerund='saving'):\n for dispvals in sheet.iterdispvals(format=True):\n for col, val in dispvals.items():\n fp.write(('{0:%s%s.%s} ' % ('>' if vd.isNumeric(col) else '<', widths[col], widths[col])).format(val))\n fp.write('\\n')\n", "path": "visidata/loaders/fixed_width.py"}]}
1,567
525
gh_patches_debug_13773
rasdani/github-patches
git_diff
googleapis__python-bigquery-164
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Remove redundant dependencies I think we can remove the following from the dependencies list: https://github.com/googleapis/python-bigquery/blob/dbaf3bdc35656fdfef2d4380befdcc2392c1ca83/setup.py#L31-L43 * `google-auth` is already required via [google-api-core](https://github.com/googleapis/python-api-core/blob/master/setup.py#L34) * `protobuf` is already required via [google-api-core](https://github.com/googleapis/python-api-core/blob/master/setup.py#L33) * `rsa` [google-auth](https://github.com/googleapis/google-auth-library-python/blob/master/setup.py#L25-L26) is pinning compatible versions now --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `setup.py` Content: ``` 1 # Copyright 2018 Google LLC 2 # 3 # Licensed under the Apache License, Version 2.0 (the "License"); 4 # you may not use this file except in compliance with the License. 5 # You may obtain a copy of the License at 6 # 7 # http://www.apache.org/licenses/LICENSE-2.0 8 # 9 # Unless required by applicable law or agreed to in writing, software 10 # distributed under the License is distributed on an "AS IS" BASIS, 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 # See the License for the specific language governing permissions and 13 # limitations under the License. 14 15 import io 16 import os 17 18 import setuptools 19 20 21 # Package metadata. 22 23 name = "google-cloud-bigquery" 24 description = "Google BigQuery API client library" 25 version = "1.25.0" 26 # Should be one of: 27 # 'Development Status :: 3 - Alpha' 28 # 'Development Status :: 4 - Beta' 29 # 'Development Status :: 5 - Production/Stable' 30 release_status = "Development Status :: 5 - Production/Stable" 31 dependencies = [ 32 'enum34; python_version < "3.4"', 33 "google-auth >= 1.9.0, < 2.0dev", 34 "google-api-core >= 1.15.0, < 2.0dev", 35 "google-cloud-core >= 1.1.0, < 2.0dev", 36 "google-resumable-media >= 0.5.0, < 0.6dev", 37 "protobuf >= 3.6.0", 38 "six >=1.13.0,< 2.0.0dev", 39 # rsa >= 4.1 is not compatible with Python 2 40 # https://github.com/sybrenstuvel/python-rsa/issues/152 41 'rsa <4.1; python_version < "3"', 42 'rsa >=3.1.4, <5; python_version >= "3"', 43 ] 44 extras = { 45 "bqstorage": [ 46 "google-cloud-bigquery-storage >= 1.0.0, <2.0.0dev", 47 # Due to an issue in pip's dependency resolver, the `grpc` extra is not 48 # installed, even though `google-cloud-bigquery-storage` specifies it 49 # as `google-api-core[grpc]`. We thus need to explicitly specify it here. 50 # See: https://github.com/googleapis/python-bigquery/issues/83 51 "grpcio >= 1.8.2, < 2.0dev", 52 "pyarrow>=0.16.0, < 2.0dev", 53 ], 54 "pandas": ["pandas>=0.17.1"], 55 # Exclude PyArrow dependency from Windows Python 2.7. 56 'pyarrow: platform_system != "Windows" or python_version >= "3.4"': [ 57 # Bad Linux release for 0.14.0. 58 # https://issues.apache.org/jira/browse/ARROW-5868 59 "pyarrow>=0.4.1, != 0.14.0" 60 ], 61 "tqdm": ["tqdm >= 4.0.0, <5.0.0dev"], 62 "fastparquet": [ 63 "fastparquet", 64 "python-snappy", 65 # llvmlite >= 0.32.0 cannot be installed on Python 3.5 and below 66 # (building the wheel fails), thus needs to be restricted. 67 # See: https://github.com/googleapis/python-bigquery/issues/78 68 "llvmlite <= 0.31.0", 69 ], 70 } 71 72 all_extras = [] 73 74 for extra in extras: 75 if extra == "fastparquet": 76 # Skip fastparquet from "all" because it is redundant with pyarrow and 77 # creates a dependency on pre-release versions of numpy. See: 78 # https://github.com/googleapis/google-cloud-python/issues/8549 79 continue 80 all_extras.extend(extras[extra]) 81 82 extras["all"] = all_extras 83 84 # Setup boilerplate below this line. 85 86 package_root = os.path.abspath(os.path.dirname(__file__)) 87 88 readme_filename = os.path.join(package_root, "README.rst") 89 with io.open(readme_filename, encoding="utf-8") as readme_file: 90 readme = readme_file.read() 91 92 # Only include packages under the 'google' namespace. Do not include tests, 93 # benchmarks, etc. 94 packages = [ 95 package for package in setuptools.find_packages() if package.startswith("google") 96 ] 97 98 # Determine which namespaces are needed. 99 namespaces = ["google"] 100 if "google.cloud" in packages: 101 namespaces.append("google.cloud") 102 103 104 setuptools.setup( 105 name=name, 106 version=version, 107 description=description, 108 long_description=readme, 109 author="Google LLC", 110 author_email="[email protected]", 111 license="Apache 2.0", 112 url="https://github.com/googleapis/python-bigquery", 113 classifiers=[ 114 release_status, 115 "Intended Audience :: Developers", 116 "License :: OSI Approved :: Apache Software License", 117 "Programming Language :: Python", 118 "Programming Language :: Python :: 2", 119 "Programming Language :: Python :: 2.7", 120 "Programming Language :: Python :: 3", 121 "Programming Language :: Python :: 3.5", 122 "Programming Language :: Python :: 3.6", 123 "Programming Language :: Python :: 3.7", 124 "Programming Language :: Python :: 3.8", 125 "Operating System :: OS Independent", 126 "Topic :: Internet", 127 ], 128 platforms="Posix; MacOS X; Windows", 129 packages=packages, 130 namespace_packages=namespaces, 131 install_requires=dependencies, 132 extras_require=extras, 133 python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", 134 include_package_data=True, 135 zip_safe=False, 136 ) 137 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -30,16 +30,10 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ 'enum34; python_version < "3.4"', - "google-auth >= 1.9.0, < 2.0dev", - "google-api-core >= 1.15.0, < 2.0dev", + "google-api-core >= 1.21.0, < 2.0dev", "google-cloud-core >= 1.1.0, < 2.0dev", "google-resumable-media >= 0.5.0, < 0.6dev", - "protobuf >= 3.6.0", "six >=1.13.0,< 2.0.0dev", - # rsa >= 4.1 is not compatible with Python 2 - # https://github.com/sybrenstuvel/python-rsa/issues/152 - 'rsa <4.1; python_version < "3"', - 'rsa >=3.1.4, <5; python_version >= "3"', ] extras = { "bqstorage": [
{"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -30,16 +30,10 @@\n release_status = \"Development Status :: 5 - Production/Stable\"\n dependencies = [\n 'enum34; python_version < \"3.4\"',\n- \"google-auth >= 1.9.0, < 2.0dev\",\n- \"google-api-core >= 1.15.0, < 2.0dev\",\n+ \"google-api-core >= 1.21.0, < 2.0dev\",\n \"google-cloud-core >= 1.1.0, < 2.0dev\",\n \"google-resumable-media >= 0.5.0, < 0.6dev\",\n- \"protobuf >= 3.6.0\",\n \"six >=1.13.0,< 2.0.0dev\",\n- # rsa >= 4.1 is not compatible with Python 2\n- # https://github.com/sybrenstuvel/python-rsa/issues/152\n- 'rsa <4.1; python_version < \"3\"',\n- 'rsa >=3.1.4, <5; python_version >= \"3\"',\n ]\n extras = {\n \"bqstorage\": [\n", "issue": "Remove redundant dependencies\nI think we can remove the following from the dependencies list:\r\n\r\nhttps://github.com/googleapis/python-bigquery/blob/dbaf3bdc35656fdfef2d4380befdcc2392c1ca83/setup.py#L31-L43\r\n\r\n* `google-auth` is already required via [google-api-core](https://github.com/googleapis/python-api-core/blob/master/setup.py#L34)\r\n* `protobuf` is already required via [google-api-core](https://github.com/googleapis/python-api-core/blob/master/setup.py#L33)\r\n* `rsa` [google-auth](https://github.com/googleapis/google-auth-library-python/blob/master/setup.py#L25-L26) is pinning compatible versions now \n", "before_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport io\nimport os\n\nimport setuptools\n\n\n# Package metadata.\n\nname = \"google-cloud-bigquery\"\ndescription = \"Google BigQuery API client library\"\nversion = \"1.25.0\"\n# Should be one of:\n# 'Development Status :: 3 - Alpha'\n# 'Development Status :: 4 - Beta'\n# 'Development Status :: 5 - Production/Stable'\nrelease_status = \"Development Status :: 5 - Production/Stable\"\ndependencies = [\n 'enum34; python_version < \"3.4\"',\n \"google-auth >= 1.9.0, < 2.0dev\",\n \"google-api-core >= 1.15.0, < 2.0dev\",\n \"google-cloud-core >= 1.1.0, < 2.0dev\",\n \"google-resumable-media >= 0.5.0, < 0.6dev\",\n \"protobuf >= 3.6.0\",\n \"six >=1.13.0,< 2.0.0dev\",\n # rsa >= 4.1 is not compatible with Python 2\n # https://github.com/sybrenstuvel/python-rsa/issues/152\n 'rsa <4.1; python_version < \"3\"',\n 'rsa >=3.1.4, <5; python_version >= \"3\"',\n]\nextras = {\n \"bqstorage\": [\n \"google-cloud-bigquery-storage >= 1.0.0, <2.0.0dev\",\n # Due to an issue in pip's dependency resolver, the `grpc` extra is not\n # installed, even though `google-cloud-bigquery-storage` specifies it\n # as `google-api-core[grpc]`. We thus need to explicitly specify it here.\n # See: https://github.com/googleapis/python-bigquery/issues/83\n \"grpcio >= 1.8.2, < 2.0dev\",\n \"pyarrow>=0.16.0, < 2.0dev\",\n ],\n \"pandas\": [\"pandas>=0.17.1\"],\n # Exclude PyArrow dependency from Windows Python 2.7.\n 'pyarrow: platform_system != \"Windows\" or python_version >= \"3.4\"': [\n # Bad Linux release for 0.14.0.\n # https://issues.apache.org/jira/browse/ARROW-5868\n \"pyarrow>=0.4.1, != 0.14.0\"\n ],\n \"tqdm\": [\"tqdm >= 4.0.0, <5.0.0dev\"],\n \"fastparquet\": [\n \"fastparquet\",\n \"python-snappy\",\n # llvmlite >= 0.32.0 cannot be installed on Python 3.5 and below\n # (building the wheel fails), thus needs to be restricted.\n # See: https://github.com/googleapis/python-bigquery/issues/78\n \"llvmlite <= 0.31.0\",\n ],\n}\n\nall_extras = []\n\nfor extra in extras:\n if extra == \"fastparquet\":\n # Skip fastparquet from \"all\" because it is redundant with pyarrow and\n # creates a dependency on pre-release versions of numpy. See:\n # https://github.com/googleapis/google-cloud-python/issues/8549\n continue\n all_extras.extend(extras[extra])\n\nextras[\"all\"] = all_extras\n\n# Setup boilerplate below this line.\n\npackage_root = os.path.abspath(os.path.dirname(__file__))\n\nreadme_filename = os.path.join(package_root, \"README.rst\")\nwith io.open(readme_filename, encoding=\"utf-8\") as readme_file:\n readme = readme_file.read()\n\n# Only include packages under the 'google' namespace. Do not include tests,\n# benchmarks, etc.\npackages = [\n package for package in setuptools.find_packages() if package.startswith(\"google\")\n]\n\n# Determine which namespaces are needed.\nnamespaces = [\"google\"]\nif \"google.cloud\" in packages:\n namespaces.append(\"google.cloud\")\n\n\nsetuptools.setup(\n name=name,\n version=version,\n description=description,\n long_description=readme,\n author=\"Google LLC\",\n author_email=\"[email protected]\",\n license=\"Apache 2.0\",\n url=\"https://github.com/googleapis/python-bigquery\",\n classifiers=[\n release_status,\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Operating System :: OS Independent\",\n \"Topic :: Internet\",\n ],\n platforms=\"Posix; MacOS X; Windows\",\n packages=packages,\n namespace_packages=namespaces,\n install_requires=dependencies,\n extras_require=extras,\n python_requires=\">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*\",\n include_package_data=True,\n zip_safe=False,\n)\n", "path": "setup.py"}], "after_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport io\nimport os\n\nimport setuptools\n\n\n# Package metadata.\n\nname = \"google-cloud-bigquery\"\ndescription = \"Google BigQuery API client library\"\nversion = \"1.25.0\"\n# Should be one of:\n# 'Development Status :: 3 - Alpha'\n# 'Development Status :: 4 - Beta'\n# 'Development Status :: 5 - Production/Stable'\nrelease_status = \"Development Status :: 5 - Production/Stable\"\ndependencies = [\n 'enum34; python_version < \"3.4\"',\n \"google-api-core >= 1.21.0, < 2.0dev\",\n \"google-cloud-core >= 1.1.0, < 2.0dev\",\n \"google-resumable-media >= 0.5.0, < 0.6dev\",\n \"six >=1.13.0,< 2.0.0dev\",\n]\nextras = {\n \"bqstorage\": [\n \"google-cloud-bigquery-storage >= 1.0.0, <2.0.0dev\",\n # Due to an issue in pip's dependency resolver, the `grpc` extra is not\n # installed, even though `google-cloud-bigquery-storage` specifies it\n # as `google-api-core[grpc]`. We thus need to explicitly specify it here.\n # See: https://github.com/googleapis/python-bigquery/issues/83\n \"grpcio >= 1.8.2, < 2.0dev\",\n \"pyarrow>=0.16.0, < 2.0dev\",\n ],\n \"pandas\": [\"pandas>=0.17.1\"],\n # Exclude PyArrow dependency from Windows Python 2.7.\n 'pyarrow: platform_system != \"Windows\" or python_version >= \"3.4\"': [\n # Bad Linux release for 0.14.0.\n # https://issues.apache.org/jira/browse/ARROW-5868\n \"pyarrow>=0.4.1, != 0.14.0\"\n ],\n \"tqdm\": [\"tqdm >= 4.0.0, <5.0.0dev\"],\n \"fastparquet\": [\n \"fastparquet\",\n \"python-snappy\",\n # llvmlite >= 0.32.0 cannot be installed on Python 3.5 and below\n # (building the wheel fails), thus needs to be restricted.\n # See: https://github.com/googleapis/python-bigquery/issues/78\n \"llvmlite <= 0.31.0\",\n ],\n}\n\nall_extras = []\n\nfor extra in extras:\n if extra == \"fastparquet\":\n # Skip fastparquet from \"all\" because it is redundant with pyarrow and\n # creates a dependency on pre-release versions of numpy. See:\n # https://github.com/googleapis/google-cloud-python/issues/8549\n continue\n all_extras.extend(extras[extra])\n\nextras[\"all\"] = all_extras\n\n# Setup boilerplate below this line.\n\npackage_root = os.path.abspath(os.path.dirname(__file__))\n\nreadme_filename = os.path.join(package_root, \"README.rst\")\nwith io.open(readme_filename, encoding=\"utf-8\") as readme_file:\n readme = readme_file.read()\n\n# Only include packages under the 'google' namespace. Do not include tests,\n# benchmarks, etc.\npackages = [\n package for package in setuptools.find_packages() if package.startswith(\"google\")\n]\n\n# Determine which namespaces are needed.\nnamespaces = [\"google\"]\nif \"google.cloud\" in packages:\n namespaces.append(\"google.cloud\")\n\n\nsetuptools.setup(\n name=name,\n version=version,\n description=description,\n long_description=readme,\n author=\"Google LLC\",\n author_email=\"[email protected]\",\n license=\"Apache 2.0\",\n url=\"https://github.com/googleapis/python-bigquery\",\n classifiers=[\n release_status,\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Operating System :: OS Independent\",\n \"Topic :: Internet\",\n ],\n platforms=\"Posix; MacOS X; Windows\",\n packages=packages,\n namespace_packages=namespaces,\n install_requires=dependencies,\n extras_require=extras,\n python_requires=\">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*\",\n include_package_data=True,\n zip_safe=False,\n)\n", "path": "setup.py"}]}
2,021
286
gh_patches_debug_38641
rasdani/github-patches
git_diff
abey79__vpype-507
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Remove deprecated APIs --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `vpype/_deprecated.py` Content: ``` 1 from __future__ import annotations 2 3 import logging 4 5 from .config import config_manager 6 7 # deprecated 8 CONFIG_MANAGER = config_manager 9 10 11 def block_processor(*args, **kwargs): # pragma: no cover 12 import vpype_cli 13 14 logging.warning( 15 "!!! `@vpype.block_processor` is deprecated, " 16 "use `@vpype_cli.block_processor` instead." 17 ) 18 return vpype_cli.block_processor(*args, **kwargs) 19 20 21 def generator(*args, **kwargs): # pragma: no cover 22 import vpype_cli 23 24 logging.warning( 25 "!!! `@vpype.generator` is deprecated, use `@vpype_cli.generator` instead." 26 ) 27 return vpype_cli.generator(*args, **kwargs) 28 29 30 def global_processor(*args, **kwargs): # pragma: no cover 31 import vpype_cli 32 33 logging.warning( 34 "!!! `@vpype.global_processor` is deprecated, " 35 "use `@vpype_cli.global_processor` instead." 36 ) 37 return vpype_cli.global_processor(*args, **kwargs) 38 39 40 def layer_processor(*args, **kwargs): # pragma: no cover 41 import vpype_cli 42 43 logging.warning( 44 "!!! `@vpype.layer_processor` is deprecated, use `@vpype_cli.layer_processor` instead." 45 ) 46 return vpype_cli.layer_processor(*args, **kwargs) 47 48 49 def pass_state(*args, **kwargs): # pragma: no cover 50 import vpype_cli 51 52 logging.warning( 53 "!!! `@vpype.pass_state` is deprecated, use `@vpype_cli.pass_state` instead." 54 ) 55 return vpype_cli.pass_state(*args, **kwargs) 56 57 58 class AngleType: # pragma: no cover 59 def __new__(cls): 60 import vpype_cli 61 62 logging.warning( 63 "!!! `vpype.AngleType` is deprecated, use `vpype_cli.AngleType` instead." 64 ) 65 return vpype_cli.AngleType() 66 67 68 class LayerType: # pragma: no cover 69 def __new__(cls, *args, **kwargs): 70 import vpype_cli 71 72 logging.warning( 73 "!!! `vpype.LayerType` is deprecated, use `vpype_cli.LayerType` instead." 74 ) 75 return vpype_cli.LayerType(*args, **kwargs) 76 77 78 class LengthType: # pragma: no cover 79 def __new__(cls): 80 import vpype_cli 81 82 logging.warning( 83 "!!! `vpype.LengthType` is deprecated, use `vpype_cli.LengthType` instead." 84 ) 85 return vpype_cli.LengthType() 86 87 88 class PageSizeType: # pragma: no cover 89 def __new__(cls): 90 import vpype_cli 91 92 logging.warning( 93 "!!! `vpype.PageSizeType` is deprecated, use `vpype_cli.PageSizeType` instead." 94 ) 95 return vpype_cli.PageSizeType() 96 97 98 def multiple_to_layer_ids(*args, **kwargs): # pragma: no cover 99 import vpype_cli 100 101 logging.warning( 102 "!!! `vpype.multiple_to_layer_ids` is deprecated, " 103 "use `vpype_cli.multiple_to_layer_ids` instead." 104 ) 105 return vpype_cli.multiple_to_layer_ids(*args, **kwargs) 106 107 108 def single_to_layer_id(*args, **kwargs): # pragma: no cover 109 import vpype_cli 110 111 logging.warning( 112 "!!! `vpype.single_to_layer_id` is deprecated, " 113 "use `vpype_cli.single_to_layer_id` instead." 114 ) 115 return vpype_cli.single_to_layer_id(*args, **kwargs) 116 ``` Path: `vpype/__init__.py` Content: ``` 1 """This module contains vpype core and its API.""" 2 3 from ._deprecated import * 4 from .config import * 5 from .filters import * 6 from .geometry import * 7 from .io import * 8 from .line_index import * 9 from .metadata import * 10 from .model import * 11 from .primitives import * 12 from .text import * 13 from .utils import * 14 15 16 def _get_version() -> str: 17 from importlib.metadata import version 18 19 return version(__name__) 20 21 22 __version__ = _get_version() 23 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/vpype/__init__.py b/vpype/__init__.py --- a/vpype/__init__.py +++ b/vpype/__init__.py @@ -1,6 +1,5 @@ """This module contains vpype core and its API.""" -from ._deprecated import * from .config import * from .filters import * from .geometry import * diff --git a/vpype/_deprecated.py b/vpype/_deprecated.py deleted file mode 100644 --- a/vpype/_deprecated.py +++ /dev/null @@ -1,115 +0,0 @@ -from __future__ import annotations - -import logging - -from .config import config_manager - -# deprecated -CONFIG_MANAGER = config_manager - - -def block_processor(*args, **kwargs): # pragma: no cover - import vpype_cli - - logging.warning( - "!!! `@vpype.block_processor` is deprecated, " - "use `@vpype_cli.block_processor` instead." - ) - return vpype_cli.block_processor(*args, **kwargs) - - -def generator(*args, **kwargs): # pragma: no cover - import vpype_cli - - logging.warning( - "!!! `@vpype.generator` is deprecated, use `@vpype_cli.generator` instead." - ) - return vpype_cli.generator(*args, **kwargs) - - -def global_processor(*args, **kwargs): # pragma: no cover - import vpype_cli - - logging.warning( - "!!! `@vpype.global_processor` is deprecated, " - "use `@vpype_cli.global_processor` instead." - ) - return vpype_cli.global_processor(*args, **kwargs) - - -def layer_processor(*args, **kwargs): # pragma: no cover - import vpype_cli - - logging.warning( - "!!! `@vpype.layer_processor` is deprecated, use `@vpype_cli.layer_processor` instead." - ) - return vpype_cli.layer_processor(*args, **kwargs) - - -def pass_state(*args, **kwargs): # pragma: no cover - import vpype_cli - - logging.warning( - "!!! `@vpype.pass_state` is deprecated, use `@vpype_cli.pass_state` instead." - ) - return vpype_cli.pass_state(*args, **kwargs) - - -class AngleType: # pragma: no cover - def __new__(cls): - import vpype_cli - - logging.warning( - "!!! `vpype.AngleType` is deprecated, use `vpype_cli.AngleType` instead." - ) - return vpype_cli.AngleType() - - -class LayerType: # pragma: no cover - def __new__(cls, *args, **kwargs): - import vpype_cli - - logging.warning( - "!!! `vpype.LayerType` is deprecated, use `vpype_cli.LayerType` instead." - ) - return vpype_cli.LayerType(*args, **kwargs) - - -class LengthType: # pragma: no cover - def __new__(cls): - import vpype_cli - - logging.warning( - "!!! `vpype.LengthType` is deprecated, use `vpype_cli.LengthType` instead." - ) - return vpype_cli.LengthType() - - -class PageSizeType: # pragma: no cover - def __new__(cls): - import vpype_cli - - logging.warning( - "!!! `vpype.PageSizeType` is deprecated, use `vpype_cli.PageSizeType` instead." - ) - return vpype_cli.PageSizeType() - - -def multiple_to_layer_ids(*args, **kwargs): # pragma: no cover - import vpype_cli - - logging.warning( - "!!! `vpype.multiple_to_layer_ids` is deprecated, " - "use `vpype_cli.multiple_to_layer_ids` instead." - ) - return vpype_cli.multiple_to_layer_ids(*args, **kwargs) - - -def single_to_layer_id(*args, **kwargs): # pragma: no cover - import vpype_cli - - logging.warning( - "!!! `vpype.single_to_layer_id` is deprecated, " - "use `vpype_cli.single_to_layer_id` instead." - ) - return vpype_cli.single_to_layer_id(*args, **kwargs)
{"golden_diff": "diff --git a/vpype/__init__.py b/vpype/__init__.py\n--- a/vpype/__init__.py\n+++ b/vpype/__init__.py\n@@ -1,6 +1,5 @@\n \"\"\"This module contains vpype core and its API.\"\"\"\n \n-from ._deprecated import *\n from .config import *\n from .filters import *\n from .geometry import *\ndiff --git a/vpype/_deprecated.py b/vpype/_deprecated.py\ndeleted file mode 100644\n--- a/vpype/_deprecated.py\n+++ /dev/null\n@@ -1,115 +0,0 @@\n-from __future__ import annotations\n-\n-import logging\n-\n-from .config import config_manager\n-\n-# deprecated\n-CONFIG_MANAGER = config_manager\n-\n-\n-def block_processor(*args, **kwargs): # pragma: no cover\n- import vpype_cli\n-\n- logging.warning(\n- \"!!! `@vpype.block_processor` is deprecated, \"\n- \"use `@vpype_cli.block_processor` instead.\"\n- )\n- return vpype_cli.block_processor(*args, **kwargs)\n-\n-\n-def generator(*args, **kwargs): # pragma: no cover\n- import vpype_cli\n-\n- logging.warning(\n- \"!!! `@vpype.generator` is deprecated, use `@vpype_cli.generator` instead.\"\n- )\n- return vpype_cli.generator(*args, **kwargs)\n-\n-\n-def global_processor(*args, **kwargs): # pragma: no cover\n- import vpype_cli\n-\n- logging.warning(\n- \"!!! `@vpype.global_processor` is deprecated, \"\n- \"use `@vpype_cli.global_processor` instead.\"\n- )\n- return vpype_cli.global_processor(*args, **kwargs)\n-\n-\n-def layer_processor(*args, **kwargs): # pragma: no cover\n- import vpype_cli\n-\n- logging.warning(\n- \"!!! `@vpype.layer_processor` is deprecated, use `@vpype_cli.layer_processor` instead.\"\n- )\n- return vpype_cli.layer_processor(*args, **kwargs)\n-\n-\n-def pass_state(*args, **kwargs): # pragma: no cover\n- import vpype_cli\n-\n- logging.warning(\n- \"!!! `@vpype.pass_state` is deprecated, use `@vpype_cli.pass_state` instead.\"\n- )\n- return vpype_cli.pass_state(*args, **kwargs)\n-\n-\n-class AngleType: # pragma: no cover\n- def __new__(cls):\n- import vpype_cli\n-\n- logging.warning(\n- \"!!! `vpype.AngleType` is deprecated, use `vpype_cli.AngleType` instead.\"\n- )\n- return vpype_cli.AngleType()\n-\n-\n-class LayerType: # pragma: no cover\n- def __new__(cls, *args, **kwargs):\n- import vpype_cli\n-\n- logging.warning(\n- \"!!! `vpype.LayerType` is deprecated, use `vpype_cli.LayerType` instead.\"\n- )\n- return vpype_cli.LayerType(*args, **kwargs)\n-\n-\n-class LengthType: # pragma: no cover\n- def __new__(cls):\n- import vpype_cli\n-\n- logging.warning(\n- \"!!! `vpype.LengthType` is deprecated, use `vpype_cli.LengthType` instead.\"\n- )\n- return vpype_cli.LengthType()\n-\n-\n-class PageSizeType: # pragma: no cover\n- def __new__(cls):\n- import vpype_cli\n-\n- logging.warning(\n- \"!!! `vpype.PageSizeType` is deprecated, use `vpype_cli.PageSizeType` instead.\"\n- )\n- return vpype_cli.PageSizeType()\n-\n-\n-def multiple_to_layer_ids(*args, **kwargs): # pragma: no cover\n- import vpype_cli\n-\n- logging.warning(\n- \"!!! `vpype.multiple_to_layer_ids` is deprecated, \"\n- \"use `vpype_cli.multiple_to_layer_ids` instead.\"\n- )\n- return vpype_cli.multiple_to_layer_ids(*args, **kwargs)\n-\n-\n-def single_to_layer_id(*args, **kwargs): # pragma: no cover\n- import vpype_cli\n-\n- logging.warning(\n- \"!!! `vpype.single_to_layer_id` is deprecated, \"\n- \"use `vpype_cli.single_to_layer_id` instead.\"\n- )\n- return vpype_cli.single_to_layer_id(*args, **kwargs)\n", "issue": "Remove deprecated APIs\n\n", "before_files": [{"content": "from __future__ import annotations\n\nimport logging\n\nfrom .config import config_manager\n\n# deprecated\nCONFIG_MANAGER = config_manager\n\n\ndef block_processor(*args, **kwargs): # pragma: no cover\n import vpype_cli\n\n logging.warning(\n \"!!! `@vpype.block_processor` is deprecated, \"\n \"use `@vpype_cli.block_processor` instead.\"\n )\n return vpype_cli.block_processor(*args, **kwargs)\n\n\ndef generator(*args, **kwargs): # pragma: no cover\n import vpype_cli\n\n logging.warning(\n \"!!! `@vpype.generator` is deprecated, use `@vpype_cli.generator` instead.\"\n )\n return vpype_cli.generator(*args, **kwargs)\n\n\ndef global_processor(*args, **kwargs): # pragma: no cover\n import vpype_cli\n\n logging.warning(\n \"!!! `@vpype.global_processor` is deprecated, \"\n \"use `@vpype_cli.global_processor` instead.\"\n )\n return vpype_cli.global_processor(*args, **kwargs)\n\n\ndef layer_processor(*args, **kwargs): # pragma: no cover\n import vpype_cli\n\n logging.warning(\n \"!!! `@vpype.layer_processor` is deprecated, use `@vpype_cli.layer_processor` instead.\"\n )\n return vpype_cli.layer_processor(*args, **kwargs)\n\n\ndef pass_state(*args, **kwargs): # pragma: no cover\n import vpype_cli\n\n logging.warning(\n \"!!! `@vpype.pass_state` is deprecated, use `@vpype_cli.pass_state` instead.\"\n )\n return vpype_cli.pass_state(*args, **kwargs)\n\n\nclass AngleType: # pragma: no cover\n def __new__(cls):\n import vpype_cli\n\n logging.warning(\n \"!!! `vpype.AngleType` is deprecated, use `vpype_cli.AngleType` instead.\"\n )\n return vpype_cli.AngleType()\n\n\nclass LayerType: # pragma: no cover\n def __new__(cls, *args, **kwargs):\n import vpype_cli\n\n logging.warning(\n \"!!! `vpype.LayerType` is deprecated, use `vpype_cli.LayerType` instead.\"\n )\n return vpype_cli.LayerType(*args, **kwargs)\n\n\nclass LengthType: # pragma: no cover\n def __new__(cls):\n import vpype_cli\n\n logging.warning(\n \"!!! `vpype.LengthType` is deprecated, use `vpype_cli.LengthType` instead.\"\n )\n return vpype_cli.LengthType()\n\n\nclass PageSizeType: # pragma: no cover\n def __new__(cls):\n import vpype_cli\n\n logging.warning(\n \"!!! `vpype.PageSizeType` is deprecated, use `vpype_cli.PageSizeType` instead.\"\n )\n return vpype_cli.PageSizeType()\n\n\ndef multiple_to_layer_ids(*args, **kwargs): # pragma: no cover\n import vpype_cli\n\n logging.warning(\n \"!!! `vpype.multiple_to_layer_ids` is deprecated, \"\n \"use `vpype_cli.multiple_to_layer_ids` instead.\"\n )\n return vpype_cli.multiple_to_layer_ids(*args, **kwargs)\n\n\ndef single_to_layer_id(*args, **kwargs): # pragma: no cover\n import vpype_cli\n\n logging.warning(\n \"!!! `vpype.single_to_layer_id` is deprecated, \"\n \"use `vpype_cli.single_to_layer_id` instead.\"\n )\n return vpype_cli.single_to_layer_id(*args, **kwargs)\n", "path": "vpype/_deprecated.py"}, {"content": "\"\"\"This module contains vpype core and its API.\"\"\"\n\nfrom ._deprecated import *\nfrom .config import *\nfrom .filters import *\nfrom .geometry import *\nfrom .io import *\nfrom .line_index import *\nfrom .metadata import *\nfrom .model import *\nfrom .primitives import *\nfrom .text import *\nfrom .utils import *\n\n\ndef _get_version() -> str:\n from importlib.metadata import version\n\n return version(__name__)\n\n\n__version__ = _get_version()\n", "path": "vpype/__init__.py"}], "after_files": [{"content": null, "path": "vpype/_deprecated.py"}, {"content": "\"\"\"This module contains vpype core and its API.\"\"\"\n\nfrom .config import *\nfrom .filters import *\nfrom .geometry import *\nfrom .io import *\nfrom .line_index import *\nfrom .metadata import *\nfrom .model import *\nfrom .primitives import *\nfrom .text import *\nfrom .utils import *\n\n\ndef _get_version() -> str:\n from importlib.metadata import version\n\n return version(__name__)\n\n\n__version__ = _get_version()\n", "path": "vpype/__init__.py"}]}
1,452
993
gh_patches_debug_22779
rasdani/github-patches
git_diff
open-mmlab__mmcv-1905
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- UnicodeDecodeError: 'utf-8' codec can't decode byte 0xd3 in position 0: invalid continuation byte 在windows11上面,我使用`pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu113/torch1.11.0/index.html`安装了`mmcv-full 1.5.0`。 之后使用[测试脚本](https://github.com/open-mmlab/mmcv/blob/master/.dev_scripts/check_installation.py)测试是否安装成功,出现了如下错误: ```bash C:\Users\ZhangQu\.conda\envs\mmlab\python.exe D:/masterResearch/envtest/check_mmcv.py Start checking the installation of mmcv-full ... CPU ops were compiled successfully. CUDA ops were compiled successfully. mmcv-full has been installed successfully. Traceback (most recent call last): File "D:\masterResearch\envtest\check_mmcv.py", line 40, in <module> env_info_dict = collect_env() File "C:\Users\ZhangQu\.conda\envs\mmlab\lib\site-packages\mmcv\utils\env.py", line 91, in collect_env env_info['MSVC'] = cc.decode(encoding).partition('\n')[0].strip() UnicodeDecodeError: 'utf-8' codec can't decode byte 0xd3 in position 0: invalid continuation byte Process finished with exit code 1 ``` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `mmcv/utils/env.py` Content: ``` 1 # Copyright (c) OpenMMLab. All rights reserved. 2 """This file holding some environment constant for sharing by other files.""" 3 4 import os.path as osp 5 import subprocess 6 import sys 7 from collections import defaultdict 8 9 import cv2 10 import torch 11 12 import mmcv 13 from .parrots_wrapper import get_build_config 14 15 16 def collect_env(): 17 """Collect the information of the running environments. 18 19 Returns: 20 dict: The environment information. The following fields are contained. 21 22 - sys.platform: The variable of ``sys.platform``. 23 - Python: Python version. 24 - CUDA available: Bool, indicating if CUDA is available. 25 - GPU devices: Device type of each GPU. 26 - CUDA_HOME (optional): The env var ``CUDA_HOME``. 27 - NVCC (optional): NVCC version. 28 - GCC: GCC version, "n/a" if GCC is not installed. 29 - MSVC: Microsoft Virtual C++ Compiler version, Windows only. 30 - PyTorch: PyTorch version. 31 - PyTorch compiling details: The output of \ 32 ``torch.__config__.show()``. 33 - TorchVision (optional): TorchVision version. 34 - OpenCV: OpenCV version. 35 - MMCV: MMCV version. 36 - MMCV Compiler: The GCC version for compiling MMCV ops. 37 - MMCV CUDA Compiler: The CUDA version for compiling MMCV ops. 38 """ 39 env_info = {} 40 env_info['sys.platform'] = sys.platform 41 env_info['Python'] = sys.version.replace('\n', '') 42 43 cuda_available = torch.cuda.is_available() 44 env_info['CUDA available'] = cuda_available 45 46 if cuda_available: 47 devices = defaultdict(list) 48 for k in range(torch.cuda.device_count()): 49 devices[torch.cuda.get_device_name(k)].append(str(k)) 50 for name, device_ids in devices.items(): 51 env_info['GPU ' + ','.join(device_ids)] = name 52 53 from mmcv.utils.parrots_wrapper import _get_cuda_home 54 CUDA_HOME = _get_cuda_home() 55 env_info['CUDA_HOME'] = CUDA_HOME 56 57 if CUDA_HOME is not None and osp.isdir(CUDA_HOME): 58 try: 59 nvcc = osp.join(CUDA_HOME, 'bin/nvcc') 60 nvcc = subprocess.check_output(f'"{nvcc}" -V', shell=True) 61 nvcc = nvcc.decode('utf-8').strip() 62 release = nvcc.rfind('Cuda compilation tools') 63 build = nvcc.rfind('Build ') 64 nvcc = nvcc[release:build].strip() 65 except subprocess.SubprocessError: 66 nvcc = 'Not Available' 67 env_info['NVCC'] = nvcc 68 69 try: 70 # Check C++ Compiler. 71 # For Unix-like, sysconfig has 'CC' variable like 'gcc -pthread ...', 72 # indicating the compiler used, we use this to get the compiler name 73 import sysconfig 74 cc = sysconfig.get_config_var('CC') 75 if cc: 76 cc = osp.basename(cc.split()[0]) 77 cc_info = subprocess.check_output(f'{cc} --version', shell=True) 78 env_info['GCC'] = cc_info.decode('utf-8').partition( 79 '\n')[0].strip() 80 else: 81 # on Windows, cl.exe is not in PATH. We need to find the path. 82 # distutils.ccompiler.new_compiler() returns a msvccompiler 83 # object and after initialization, path to cl.exe is found. 84 import os 85 from distutils.ccompiler import new_compiler 86 ccompiler = new_compiler() 87 ccompiler.initialize() 88 cc = subprocess.check_output( 89 f'{ccompiler.cc}', stderr=subprocess.STDOUT, shell=True) 90 encoding = os.device_encoding(sys.stdout.fileno()) or 'utf-8' 91 env_info['MSVC'] = cc.decode(encoding).partition('\n')[0].strip() 92 env_info['GCC'] = 'n/a' 93 except subprocess.CalledProcessError: 94 env_info['GCC'] = 'n/a' 95 96 env_info['PyTorch'] = torch.__version__ 97 env_info['PyTorch compiling details'] = get_build_config() 98 99 try: 100 import torchvision 101 env_info['TorchVision'] = torchvision.__version__ 102 except ModuleNotFoundError: 103 pass 104 105 env_info['OpenCV'] = cv2.__version__ 106 107 env_info['MMCV'] = mmcv.__version__ 108 109 try: 110 from mmcv.ops import get_compiler_version, get_compiling_cuda_version 111 except ModuleNotFoundError: 112 env_info['MMCV Compiler'] = 'n/a' 113 env_info['MMCV CUDA Compiler'] = 'n/a' 114 else: 115 env_info['MMCV Compiler'] = get_compiler_version() 116 env_info['MMCV CUDA Compiler'] = get_compiling_cuda_version() 117 118 return env_info 119 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/mmcv/utils/env.py b/mmcv/utils/env.py --- a/mmcv/utils/env.py +++ b/mmcv/utils/env.py @@ -81,13 +81,15 @@ # on Windows, cl.exe is not in PATH. We need to find the path. # distutils.ccompiler.new_compiler() returns a msvccompiler # object and after initialization, path to cl.exe is found. + import locale import os from distutils.ccompiler import new_compiler ccompiler = new_compiler() ccompiler.initialize() cc = subprocess.check_output( f'{ccompiler.cc}', stderr=subprocess.STDOUT, shell=True) - encoding = os.device_encoding(sys.stdout.fileno()) or 'utf-8' + encoding = os.device_encoding( + sys.stdout.fileno()) or locale.getpreferredencoding() env_info['MSVC'] = cc.decode(encoding).partition('\n')[0].strip() env_info['GCC'] = 'n/a' except subprocess.CalledProcessError:
{"golden_diff": "diff --git a/mmcv/utils/env.py b/mmcv/utils/env.py\n--- a/mmcv/utils/env.py\n+++ b/mmcv/utils/env.py\n@@ -81,13 +81,15 @@\n # on Windows, cl.exe is not in PATH. We need to find the path.\n # distutils.ccompiler.new_compiler() returns a msvccompiler\n # object and after initialization, path to cl.exe is found.\n+ import locale\n import os\n from distutils.ccompiler import new_compiler\n ccompiler = new_compiler()\n ccompiler.initialize()\n cc = subprocess.check_output(\n f'{ccompiler.cc}', stderr=subprocess.STDOUT, shell=True)\n- encoding = os.device_encoding(sys.stdout.fileno()) or 'utf-8'\n+ encoding = os.device_encoding(\n+ sys.stdout.fileno()) or locale.getpreferredencoding()\n env_info['MSVC'] = cc.decode(encoding).partition('\\n')[0].strip()\n env_info['GCC'] = 'n/a'\n except subprocess.CalledProcessError:\n", "issue": "UnicodeDecodeError: 'utf-8' codec can't decode byte 0xd3 in position 0: invalid continuation byte\n\u5728windows11\u4e0a\u9762\uff0c\u6211\u4f7f\u7528`pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu113/torch1.11.0/index.html`\u5b89\u88c5\u4e86`mmcv-full 1.5.0`\u3002\r\n\u4e4b\u540e\u4f7f\u7528[\u6d4b\u8bd5\u811a\u672c](https://github.com/open-mmlab/mmcv/blob/master/.dev_scripts/check_installation.py)\u6d4b\u8bd5\u662f\u5426\u5b89\u88c5\u6210\u529f\uff0c\u51fa\u73b0\u4e86\u5982\u4e0b\u9519\u8bef:\r\n```bash\r\nC:\\Users\\ZhangQu\\.conda\\envs\\mmlab\\python.exe D:/masterResearch/envtest/check_mmcv.py\r\nStart checking the installation of mmcv-full ...\r\nCPU ops were compiled successfully.\r\nCUDA ops were compiled successfully.\r\nmmcv-full has been installed successfully.\r\n\r\nTraceback (most recent call last):\r\n File \"D:\\masterResearch\\envtest\\check_mmcv.py\", line 40, in <module>\r\n env_info_dict = collect_env()\r\n File \"C:\\Users\\ZhangQu\\.conda\\envs\\mmlab\\lib\\site-packages\\mmcv\\utils\\env.py\", line 91, in collect_env\r\n env_info['MSVC'] = cc.decode(encoding).partition('\\n')[0].strip()\r\nUnicodeDecodeError: 'utf-8' codec can't decode byte 0xd3 in position 0: invalid continuation byte\r\n\r\nProcess finished with exit code 1\r\n```\n", "before_files": [{"content": "# Copyright (c) OpenMMLab. All rights reserved.\n\"\"\"This file holding some environment constant for sharing by other files.\"\"\"\n\nimport os.path as osp\nimport subprocess\nimport sys\nfrom collections import defaultdict\n\nimport cv2\nimport torch\n\nimport mmcv\nfrom .parrots_wrapper import get_build_config\n\n\ndef collect_env():\n \"\"\"Collect the information of the running environments.\n\n Returns:\n dict: The environment information. The following fields are contained.\n\n - sys.platform: The variable of ``sys.platform``.\n - Python: Python version.\n - CUDA available: Bool, indicating if CUDA is available.\n - GPU devices: Device type of each GPU.\n - CUDA_HOME (optional): The env var ``CUDA_HOME``.\n - NVCC (optional): NVCC version.\n - GCC: GCC version, \"n/a\" if GCC is not installed.\n - MSVC: Microsoft Virtual C++ Compiler version, Windows only.\n - PyTorch: PyTorch version.\n - PyTorch compiling details: The output of \\\n ``torch.__config__.show()``.\n - TorchVision (optional): TorchVision version.\n - OpenCV: OpenCV version.\n - MMCV: MMCV version.\n - MMCV Compiler: The GCC version for compiling MMCV ops.\n - MMCV CUDA Compiler: The CUDA version for compiling MMCV ops.\n \"\"\"\n env_info = {}\n env_info['sys.platform'] = sys.platform\n env_info['Python'] = sys.version.replace('\\n', '')\n\n cuda_available = torch.cuda.is_available()\n env_info['CUDA available'] = cuda_available\n\n if cuda_available:\n devices = defaultdict(list)\n for k in range(torch.cuda.device_count()):\n devices[torch.cuda.get_device_name(k)].append(str(k))\n for name, device_ids in devices.items():\n env_info['GPU ' + ','.join(device_ids)] = name\n\n from mmcv.utils.parrots_wrapper import _get_cuda_home\n CUDA_HOME = _get_cuda_home()\n env_info['CUDA_HOME'] = CUDA_HOME\n\n if CUDA_HOME is not None and osp.isdir(CUDA_HOME):\n try:\n nvcc = osp.join(CUDA_HOME, 'bin/nvcc')\n nvcc = subprocess.check_output(f'\"{nvcc}\" -V', shell=True)\n nvcc = nvcc.decode('utf-8').strip()\n release = nvcc.rfind('Cuda compilation tools')\n build = nvcc.rfind('Build ')\n nvcc = nvcc[release:build].strip()\n except subprocess.SubprocessError:\n nvcc = 'Not Available'\n env_info['NVCC'] = nvcc\n\n try:\n # Check C++ Compiler.\n # For Unix-like, sysconfig has 'CC' variable like 'gcc -pthread ...',\n # indicating the compiler used, we use this to get the compiler name\n import sysconfig\n cc = sysconfig.get_config_var('CC')\n if cc:\n cc = osp.basename(cc.split()[0])\n cc_info = subprocess.check_output(f'{cc} --version', shell=True)\n env_info['GCC'] = cc_info.decode('utf-8').partition(\n '\\n')[0].strip()\n else:\n # on Windows, cl.exe is not in PATH. We need to find the path.\n # distutils.ccompiler.new_compiler() returns a msvccompiler\n # object and after initialization, path to cl.exe is found.\n import os\n from distutils.ccompiler import new_compiler\n ccompiler = new_compiler()\n ccompiler.initialize()\n cc = subprocess.check_output(\n f'{ccompiler.cc}', stderr=subprocess.STDOUT, shell=True)\n encoding = os.device_encoding(sys.stdout.fileno()) or 'utf-8'\n env_info['MSVC'] = cc.decode(encoding).partition('\\n')[0].strip()\n env_info['GCC'] = 'n/a'\n except subprocess.CalledProcessError:\n env_info['GCC'] = 'n/a'\n\n env_info['PyTorch'] = torch.__version__\n env_info['PyTorch compiling details'] = get_build_config()\n\n try:\n import torchvision\n env_info['TorchVision'] = torchvision.__version__\n except ModuleNotFoundError:\n pass\n\n env_info['OpenCV'] = cv2.__version__\n\n env_info['MMCV'] = mmcv.__version__\n\n try:\n from mmcv.ops import get_compiler_version, get_compiling_cuda_version\n except ModuleNotFoundError:\n env_info['MMCV Compiler'] = 'n/a'\n env_info['MMCV CUDA Compiler'] = 'n/a'\n else:\n env_info['MMCV Compiler'] = get_compiler_version()\n env_info['MMCV CUDA Compiler'] = get_compiling_cuda_version()\n\n return env_info\n", "path": "mmcv/utils/env.py"}], "after_files": [{"content": "# Copyright (c) OpenMMLab. All rights reserved.\n\"\"\"This file holding some environment constant for sharing by other files.\"\"\"\n\nimport os.path as osp\nimport subprocess\nimport sys\nfrom collections import defaultdict\n\nimport cv2\nimport torch\n\nimport mmcv\nfrom .parrots_wrapper import get_build_config\n\n\ndef collect_env():\n \"\"\"Collect the information of the running environments.\n\n Returns:\n dict: The environment information. The following fields are contained.\n\n - sys.platform: The variable of ``sys.platform``.\n - Python: Python version.\n - CUDA available: Bool, indicating if CUDA is available.\n - GPU devices: Device type of each GPU.\n - CUDA_HOME (optional): The env var ``CUDA_HOME``.\n - NVCC (optional): NVCC version.\n - GCC: GCC version, \"n/a\" if GCC is not installed.\n - MSVC: Microsoft Virtual C++ Compiler version, Windows only.\n - PyTorch: PyTorch version.\n - PyTorch compiling details: The output of \\\n ``torch.__config__.show()``.\n - TorchVision (optional): TorchVision version.\n - OpenCV: OpenCV version.\n - MMCV: MMCV version.\n - MMCV Compiler: The GCC version for compiling MMCV ops.\n - MMCV CUDA Compiler: The CUDA version for compiling MMCV ops.\n \"\"\"\n env_info = {}\n env_info['sys.platform'] = sys.platform\n env_info['Python'] = sys.version.replace('\\n', '')\n\n cuda_available = torch.cuda.is_available()\n env_info['CUDA available'] = cuda_available\n\n if cuda_available:\n devices = defaultdict(list)\n for k in range(torch.cuda.device_count()):\n devices[torch.cuda.get_device_name(k)].append(str(k))\n for name, device_ids in devices.items():\n env_info['GPU ' + ','.join(device_ids)] = name\n\n from mmcv.utils.parrots_wrapper import _get_cuda_home\n CUDA_HOME = _get_cuda_home()\n env_info['CUDA_HOME'] = CUDA_HOME\n\n if CUDA_HOME is not None and osp.isdir(CUDA_HOME):\n try:\n nvcc = osp.join(CUDA_HOME, 'bin/nvcc')\n nvcc = subprocess.check_output(f'\"{nvcc}\" -V', shell=True)\n nvcc = nvcc.decode('utf-8').strip()\n release = nvcc.rfind('Cuda compilation tools')\n build = nvcc.rfind('Build ')\n nvcc = nvcc[release:build].strip()\n except subprocess.SubprocessError:\n nvcc = 'Not Available'\n env_info['NVCC'] = nvcc\n\n try:\n # Check C++ Compiler.\n # For Unix-like, sysconfig has 'CC' variable like 'gcc -pthread ...',\n # indicating the compiler used, we use this to get the compiler name\n import sysconfig\n cc = sysconfig.get_config_var('CC')\n if cc:\n cc = osp.basename(cc.split()[0])\n cc_info = subprocess.check_output(f'{cc} --version', shell=True)\n env_info['GCC'] = cc_info.decode('utf-8').partition(\n '\\n')[0].strip()\n else:\n # on Windows, cl.exe is not in PATH. We need to find the path.\n # distutils.ccompiler.new_compiler() returns a msvccompiler\n # object and after initialization, path to cl.exe is found.\n import locale\n import os\n from distutils.ccompiler import new_compiler\n ccompiler = new_compiler()\n ccompiler.initialize()\n cc = subprocess.check_output(\n f'{ccompiler.cc}', stderr=subprocess.STDOUT, shell=True)\n encoding = os.device_encoding(\n sys.stdout.fileno()) or locale.getpreferredencoding()\n env_info['MSVC'] = cc.decode(encoding).partition('\\n')[0].strip()\n env_info['GCC'] = 'n/a'\n except subprocess.CalledProcessError:\n env_info['GCC'] = 'n/a'\n\n env_info['PyTorch'] = torch.__version__\n env_info['PyTorch compiling details'] = get_build_config()\n\n try:\n import torchvision\n env_info['TorchVision'] = torchvision.__version__\n except ModuleNotFoundError:\n pass\n\n env_info['OpenCV'] = cv2.__version__\n\n env_info['MMCV'] = mmcv.__version__\n\n try:\n from mmcv.ops import get_compiler_version, get_compiling_cuda_version\n except ModuleNotFoundError:\n env_info['MMCV Compiler'] = 'n/a'\n env_info['MMCV CUDA Compiler'] = 'n/a'\n else:\n env_info['MMCV Compiler'] = get_compiler_version()\n env_info['MMCV CUDA Compiler'] = get_compiling_cuda_version()\n\n return env_info\n", "path": "mmcv/utils/env.py"}]}
1,872
231
gh_patches_debug_33327
rasdani/github-patches
git_diff
comfyanonymous__ComfyUI-2207
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- HyperTile node is nondeterministic across executions and messes with global randomness The HyperTile node uses the random module and seeds the global random with its own counter variable. Unfortunately, this counter variable is retained across executions if the HyperTile parameters don't change, and so every execution will have different results. The effect on global random can be avoided just by using a `random.Random()` instance instead of a counter, but since ComfyUI doesn't provide any kind of after-exec function for nodes, there doesn't seem to be a way to reset it to its initial state after one prompt is executed. I suppose you could work around this by setting having IS_CHANGED return something so that the node gets always executed, thus reinitializing randomness, but that might cause any nodes that come after the HyperTile node to needlessly re-execute. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `comfy_extras/nodes_hypertile.py` Content: ``` 1 #Taken from: https://github.com/tfernd/HyperTile/ 2 3 import math 4 from einops import rearrange 5 import random 6 7 def random_divisor(value: int, min_value: int, /, max_options: int = 1, counter = 0) -> int: 8 min_value = min(min_value, value) 9 10 # All big divisors of value (inclusive) 11 divisors = [i for i in range(min_value, value + 1) if value % i == 0] 12 13 ns = [value // i for i in divisors[:max_options]] # has at least 1 element 14 15 random.seed(counter) 16 idx = random.randint(0, len(ns) - 1) 17 18 return ns[idx] 19 20 class HyperTile: 21 @classmethod 22 def INPUT_TYPES(s): 23 return {"required": { "model": ("MODEL",), 24 "tile_size": ("INT", {"default": 256, "min": 1, "max": 2048}), 25 "swap_size": ("INT", {"default": 2, "min": 1, "max": 128}), 26 "max_depth": ("INT", {"default": 0, "min": 0, "max": 10}), 27 "scale_depth": ("BOOLEAN", {"default": False}), 28 }} 29 RETURN_TYPES = ("MODEL",) 30 FUNCTION = "patch" 31 32 CATEGORY = "_for_testing" 33 34 def patch(self, model, tile_size, swap_size, max_depth, scale_depth): 35 model_channels = model.model.model_config.unet_config["model_channels"] 36 37 apply_to = set() 38 temp = model_channels 39 for x in range(max_depth + 1): 40 apply_to.add(temp) 41 temp *= 2 42 43 latent_tile_size = max(32, tile_size) // 8 44 self.temp = None 45 self.counter = 1 46 47 def hypertile_in(q, k, v, extra_options): 48 if q.shape[-1] in apply_to: 49 shape = extra_options["original_shape"] 50 aspect_ratio = shape[-1] / shape[-2] 51 52 hw = q.size(1) 53 h, w = round(math.sqrt(hw * aspect_ratio)), round(math.sqrt(hw / aspect_ratio)) 54 55 factor = 2**((q.shape[-1] // model_channels) - 1) if scale_depth else 1 56 nh = random_divisor(h, latent_tile_size * factor, swap_size, self.counter) 57 self.counter += 1 58 nw = random_divisor(w, latent_tile_size * factor, swap_size, self.counter) 59 self.counter += 1 60 61 if nh * nw > 1: 62 q = rearrange(q, "b (nh h nw w) c -> (b nh nw) (h w) c", h=h // nh, w=w // nw, nh=nh, nw=nw) 63 self.temp = (nh, nw, h, w) 64 return q, k, v 65 66 return q, k, v 67 def hypertile_out(out, extra_options): 68 if self.temp is not None: 69 nh, nw, h, w = self.temp 70 self.temp = None 71 out = rearrange(out, "(b nh nw) hw c -> b nh nw hw c", nh=nh, nw=nw) 72 out = rearrange(out, "b nh nw (h w) c -> b (nh h nw w) c", h=h // nh, w=w // nw) 73 return out 74 75 76 m = model.clone() 77 m.set_model_attn1_patch(hypertile_in) 78 m.set_model_attn1_output_patch(hypertile_out) 79 return (m, ) 80 81 NODE_CLASS_MAPPINGS = { 82 "HyperTile": HyperTile, 83 } 84 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/comfy_extras/nodes_hypertile.py b/comfy_extras/nodes_hypertile.py --- a/comfy_extras/nodes_hypertile.py +++ b/comfy_extras/nodes_hypertile.py @@ -2,9 +2,10 @@ import math from einops import rearrange -import random +# Use torch rng for consistency across generations +from torch import randint -def random_divisor(value: int, min_value: int, /, max_options: int = 1, counter = 0) -> int: +def random_divisor(value: int, min_value: int, /, max_options: int = 1) -> int: min_value = min(min_value, value) # All big divisors of value (inclusive) @@ -12,8 +13,7 @@ ns = [value // i for i in divisors[:max_options]] # has at least 1 element - random.seed(counter) - idx = random.randint(0, len(ns) - 1) + idx = randint(low=0, high=len(ns) - 1, size=(1,)).item() return ns[idx] @@ -42,7 +42,6 @@ latent_tile_size = max(32, tile_size) // 8 self.temp = None - self.counter = 1 def hypertile_in(q, k, v, extra_options): if q.shape[-1] in apply_to: @@ -53,10 +52,8 @@ h, w = round(math.sqrt(hw * aspect_ratio)), round(math.sqrt(hw / aspect_ratio)) factor = 2**((q.shape[-1] // model_channels) - 1) if scale_depth else 1 - nh = random_divisor(h, latent_tile_size * factor, swap_size, self.counter) - self.counter += 1 - nw = random_divisor(w, latent_tile_size * factor, swap_size, self.counter) - self.counter += 1 + nh = random_divisor(h, latent_tile_size * factor, swap_size) + nw = random_divisor(w, latent_tile_size * factor, swap_size) if nh * nw > 1: q = rearrange(q, "b (nh h nw w) c -> (b nh nw) (h w) c", h=h // nh, w=w // nw, nh=nh, nw=nw)
{"golden_diff": "diff --git a/comfy_extras/nodes_hypertile.py b/comfy_extras/nodes_hypertile.py\n--- a/comfy_extras/nodes_hypertile.py\n+++ b/comfy_extras/nodes_hypertile.py\n@@ -2,9 +2,10 @@\n \n import math\n from einops import rearrange\n-import random\n+# Use torch rng for consistency across generations\n+from torch import randint\n \n-def random_divisor(value: int, min_value: int, /, max_options: int = 1, counter = 0) -> int:\n+def random_divisor(value: int, min_value: int, /, max_options: int = 1) -> int:\n min_value = min(min_value, value)\n \n # All big divisors of value (inclusive)\n@@ -12,8 +13,7 @@\n \n ns = [value // i for i in divisors[:max_options]] # has at least 1 element\n \n- random.seed(counter)\n- idx = random.randint(0, len(ns) - 1)\n+ idx = randint(low=0, high=len(ns) - 1, size=(1,)).item()\n \n return ns[idx]\n \n@@ -42,7 +42,6 @@\n \n latent_tile_size = max(32, tile_size) // 8\n self.temp = None\n- self.counter = 1\n \n def hypertile_in(q, k, v, extra_options):\n if q.shape[-1] in apply_to:\n@@ -53,10 +52,8 @@\n h, w = round(math.sqrt(hw * aspect_ratio)), round(math.sqrt(hw / aspect_ratio))\n \n factor = 2**((q.shape[-1] // model_channels) - 1) if scale_depth else 1\n- nh = random_divisor(h, latent_tile_size * factor, swap_size, self.counter)\n- self.counter += 1\n- nw = random_divisor(w, latent_tile_size * factor, swap_size, self.counter)\n- self.counter += 1\n+ nh = random_divisor(h, latent_tile_size * factor, swap_size)\n+ nw = random_divisor(w, latent_tile_size * factor, swap_size)\n \n if nh * nw > 1:\n q = rearrange(q, \"b (nh h nw w) c -> (b nh nw) (h w) c\", h=h // nh, w=w // nw, nh=nh, nw=nw)\n", "issue": "HyperTile node is nondeterministic across executions and messes with global randomness\nThe HyperTile node uses the random module and seeds the global random with its own counter variable.\r\n\r\nUnfortunately, this counter variable is retained across executions if the HyperTile parameters don't change, and so every execution will have different results.\r\n\r\nThe effect on global random can be avoided just by using a `random.Random()` instance instead of a counter, but since ComfyUI doesn't provide any kind of after-exec function for nodes, there doesn't seem to be a way to reset it to its initial state after one prompt is executed.\r\n\r\nI suppose you could work around this by setting having IS_CHANGED return something so that the node gets always executed, thus reinitializing randomness, but that might cause any nodes that come after the HyperTile node to needlessly re-execute.\r\n\r\n\n", "before_files": [{"content": "#Taken from: https://github.com/tfernd/HyperTile/\n\nimport math\nfrom einops import rearrange\nimport random\n\ndef random_divisor(value: int, min_value: int, /, max_options: int = 1, counter = 0) -> int:\n min_value = min(min_value, value)\n\n # All big divisors of value (inclusive)\n divisors = [i for i in range(min_value, value + 1) if value % i == 0]\n\n ns = [value // i for i in divisors[:max_options]] # has at least 1 element\n\n random.seed(counter)\n idx = random.randint(0, len(ns) - 1)\n\n return ns[idx]\n\nclass HyperTile:\n @classmethod\n def INPUT_TYPES(s):\n return {\"required\": { \"model\": (\"MODEL\",),\n \"tile_size\": (\"INT\", {\"default\": 256, \"min\": 1, \"max\": 2048}),\n \"swap_size\": (\"INT\", {\"default\": 2, \"min\": 1, \"max\": 128}),\n \"max_depth\": (\"INT\", {\"default\": 0, \"min\": 0, \"max\": 10}),\n \"scale_depth\": (\"BOOLEAN\", {\"default\": False}),\n }}\n RETURN_TYPES = (\"MODEL\",)\n FUNCTION = \"patch\"\n\n CATEGORY = \"_for_testing\"\n\n def patch(self, model, tile_size, swap_size, max_depth, scale_depth):\n model_channels = model.model.model_config.unet_config[\"model_channels\"]\n\n apply_to = set()\n temp = model_channels\n for x in range(max_depth + 1):\n apply_to.add(temp)\n temp *= 2\n\n latent_tile_size = max(32, tile_size) // 8\n self.temp = None\n self.counter = 1\n\n def hypertile_in(q, k, v, extra_options):\n if q.shape[-1] in apply_to:\n shape = extra_options[\"original_shape\"]\n aspect_ratio = shape[-1] / shape[-2]\n\n hw = q.size(1)\n h, w = round(math.sqrt(hw * aspect_ratio)), round(math.sqrt(hw / aspect_ratio))\n\n factor = 2**((q.shape[-1] // model_channels) - 1) if scale_depth else 1\n nh = random_divisor(h, latent_tile_size * factor, swap_size, self.counter)\n self.counter += 1\n nw = random_divisor(w, latent_tile_size * factor, swap_size, self.counter)\n self.counter += 1\n\n if nh * nw > 1:\n q = rearrange(q, \"b (nh h nw w) c -> (b nh nw) (h w) c\", h=h // nh, w=w // nw, nh=nh, nw=nw)\n self.temp = (nh, nw, h, w)\n return q, k, v\n\n return q, k, v\n def hypertile_out(out, extra_options):\n if self.temp is not None:\n nh, nw, h, w = self.temp\n self.temp = None\n out = rearrange(out, \"(b nh nw) hw c -> b nh nw hw c\", nh=nh, nw=nw)\n out = rearrange(out, \"b nh nw (h w) c -> b (nh h nw w) c\", h=h // nh, w=w // nw)\n return out\n\n\n m = model.clone()\n m.set_model_attn1_patch(hypertile_in)\n m.set_model_attn1_output_patch(hypertile_out)\n return (m, )\n\nNODE_CLASS_MAPPINGS = {\n \"HyperTile\": HyperTile,\n}\n", "path": "comfy_extras/nodes_hypertile.py"}], "after_files": [{"content": "#Taken from: https://github.com/tfernd/HyperTile/\n\nimport math\nfrom einops import rearrange\n# Use torch rng for consistency across generations\nfrom torch import randint\n\ndef random_divisor(value: int, min_value: int, /, max_options: int = 1) -> int:\n min_value = min(min_value, value)\n\n # All big divisors of value (inclusive)\n divisors = [i for i in range(min_value, value + 1) if value % i == 0]\n\n ns = [value // i for i in divisors[:max_options]] # has at least 1 element\n\n idx = randint(low=0, high=len(ns) - 1, size=(1,)).item()\n\n return ns[idx]\n\nclass HyperTile:\n @classmethod\n def INPUT_TYPES(s):\n return {\"required\": { \"model\": (\"MODEL\",),\n \"tile_size\": (\"INT\", {\"default\": 256, \"min\": 1, \"max\": 2048}),\n \"swap_size\": (\"INT\", {\"default\": 2, \"min\": 1, \"max\": 128}),\n \"max_depth\": (\"INT\", {\"default\": 0, \"min\": 0, \"max\": 10}),\n \"scale_depth\": (\"BOOLEAN\", {\"default\": False}),\n }}\n RETURN_TYPES = (\"MODEL\",)\n FUNCTION = \"patch\"\n\n CATEGORY = \"_for_testing\"\n\n def patch(self, model, tile_size, swap_size, max_depth, scale_depth):\n model_channels = model.model.model_config.unet_config[\"model_channels\"]\n\n apply_to = set()\n temp = model_channels\n for x in range(max_depth + 1):\n apply_to.add(temp)\n temp *= 2\n\n latent_tile_size = max(32, tile_size) // 8\n self.temp = None\n\n def hypertile_in(q, k, v, extra_options):\n if q.shape[-1] in apply_to:\n shape = extra_options[\"original_shape\"]\n aspect_ratio = shape[-1] / shape[-2]\n\n hw = q.size(1)\n h, w = round(math.sqrt(hw * aspect_ratio)), round(math.sqrt(hw / aspect_ratio))\n\n factor = 2**((q.shape[-1] // model_channels) - 1) if scale_depth else 1\n nh = random_divisor(h, latent_tile_size * factor, swap_size)\n nw = random_divisor(w, latent_tile_size * factor, swap_size)\n\n if nh * nw > 1:\n q = rearrange(q, \"b (nh h nw w) c -> (b nh nw) (h w) c\", h=h // nh, w=w // nw, nh=nh, nw=nw)\n self.temp = (nh, nw, h, w)\n return q, k, v\n\n return q, k, v\n def hypertile_out(out, extra_options):\n if self.temp is not None:\n nh, nw, h, w = self.temp\n self.temp = None\n out = rearrange(out, \"(b nh nw) hw c -> b nh nw hw c\", nh=nh, nw=nw)\n out = rearrange(out, \"b nh nw (h w) c -> b (nh h nw w) c\", h=h // nh, w=w // nw)\n return out\n\n\n m = model.clone()\n m.set_model_attn1_patch(hypertile_in)\n m.set_model_attn1_output_patch(hypertile_out)\n return (m, )\n\nNODE_CLASS_MAPPINGS = {\n \"HyperTile\": HyperTile,\n}\n", "path": "comfy_extras/nodes_hypertile.py"}]}
1,422
548
gh_patches_debug_26976
rasdani/github-patches
git_diff
neptune-ai__neptune-client-197
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- experiment.get_system_properties() doesn't return "hostname" I think there is some regression. For recent experiments `experiment.get_properties()` return either an empty dictionary or `{'key1': 'value1', 'key2': '17', 'key3': 'other-value'}` (whatever that is) in case of the sandbox project. For older experiments, I still can get the properties. This is probably a backend issue but there is no better place to put it. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `neptune/internal/streams/channel_writer.py` Content: ``` 1 # 2 # Copyright (c) 2019, Neptune Labs Sp. z o.o. 3 # 4 # Licensed under the Apache License, Version 2.0 (the "License"); 5 # you may not use this file except in compliance with the License. 6 # You may obtain a copy of the License at 7 # 8 # http://www.apache.org/licenses/LICENSE-2.0 9 # 10 # Unless required by applicable law or agreed to in writing, software 11 # distributed under the License is distributed on an "AS IS" BASIS, 12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 # See the License for the specific language governing permissions and 14 # limitations under the License. 15 # 16 17 from __future__ import unicode_literals 18 19 import re 20 import time 21 22 from neptune.internal.channels.channels import ChannelNamespace, ChannelValue, ChannelType 23 24 25 class ChannelWriter(object): 26 __SPLIT_PATTERN = re.compile(r'[\n\r]{1,2}') 27 28 def __init__(self, experiment, channel_name, channel_namespace=ChannelNamespace.USER): 29 self.time_started_ms = time.time() * 1000 30 self._experiment = experiment 31 self._channel_name = channel_name 32 self._channel_namespace = channel_namespace 33 self._data = None 34 35 def write(self, data): 36 if self._data is None: 37 self._data = data 38 else: 39 self._data += data 40 lines = self.__SPLIT_PATTERN.split(self._data) 41 for line in lines[:-1]: 42 value = ChannelValue( 43 x=time.time() * 1000 - self.time_started_ms, 44 y=dict(text_value=str(line)), 45 ts=None 46 ) 47 # pylint: disable=protected-access 48 self._experiment._channels_values_sender.send( 49 channel_name=self._channel_name, 50 channel_type=ChannelType.TEXT.value, 51 channel_value=value, 52 channel_namespace=self._channel_namespace 53 ) 54 55 self._data = lines[-1] 56 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/neptune/internal/streams/channel_writer.py b/neptune/internal/streams/channel_writer.py --- a/neptune/internal/streams/channel_writer.py +++ b/neptune/internal/streams/channel_writer.py @@ -16,8 +16,8 @@ from __future__ import unicode_literals +from datetime import datetime import re -import time from neptune.internal.channels.channels import ChannelNamespace, ChannelValue, ChannelType @@ -26,7 +26,7 @@ __SPLIT_PATTERN = re.compile(r'[\n\r]{1,2}') def __init__(self, experiment, channel_name, channel_namespace=ChannelNamespace.USER): - self.time_started_ms = time.time() * 1000 + self._time_started = experiment.get_system_properties()['created'] self._experiment = experiment self._channel_name = channel_name self._channel_namespace = channel_namespace @@ -40,7 +40,7 @@ lines = self.__SPLIT_PATTERN.split(self._data) for line in lines[:-1]: value = ChannelValue( - x=time.time() * 1000 - self.time_started_ms, + x=(datetime.now(tz=self._time_started.tzinfo) - self._time_started).total_seconds() * 1000, y=dict(text_value=str(line)), ts=None )
{"golden_diff": "diff --git a/neptune/internal/streams/channel_writer.py b/neptune/internal/streams/channel_writer.py\n--- a/neptune/internal/streams/channel_writer.py\n+++ b/neptune/internal/streams/channel_writer.py\n@@ -16,8 +16,8 @@\n \n from __future__ import unicode_literals\n \n+from datetime import datetime\n import re\n-import time\n \n from neptune.internal.channels.channels import ChannelNamespace, ChannelValue, ChannelType\n \n@@ -26,7 +26,7 @@\n __SPLIT_PATTERN = re.compile(r'[\\n\\r]{1,2}')\n \n def __init__(self, experiment, channel_name, channel_namespace=ChannelNamespace.USER):\n- self.time_started_ms = time.time() * 1000\n+ self._time_started = experiment.get_system_properties()['created']\n self._experiment = experiment\n self._channel_name = channel_name\n self._channel_namespace = channel_namespace\n@@ -40,7 +40,7 @@\n lines = self.__SPLIT_PATTERN.split(self._data)\n for line in lines[:-1]:\n value = ChannelValue(\n- x=time.time() * 1000 - self.time_started_ms,\n+ x=(datetime.now(tz=self._time_started.tzinfo) - self._time_started).total_seconds() * 1000,\n y=dict(text_value=str(line)),\n ts=None\n )\n", "issue": "experiment.get_system_properties() doesn't return \"hostname\"\nI think there is some regression. For recent experiments `experiment.get_properties()` return either an empty dictionary or `{'key1': 'value1', 'key2': '17', 'key3': 'other-value'}` (whatever that is) in case of the sandbox project.\r\n\r\nFor older experiments, I still can get the properties. \r\n\r\nThis is probably a backend issue but there is no better place to put it.\n", "before_files": [{"content": "#\n# Copyright (c) 2019, Neptune Labs Sp. z o.o.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nfrom __future__ import unicode_literals\n\nimport re\nimport time\n\nfrom neptune.internal.channels.channels import ChannelNamespace, ChannelValue, ChannelType\n\n\nclass ChannelWriter(object):\n __SPLIT_PATTERN = re.compile(r'[\\n\\r]{1,2}')\n\n def __init__(self, experiment, channel_name, channel_namespace=ChannelNamespace.USER):\n self.time_started_ms = time.time() * 1000\n self._experiment = experiment\n self._channel_name = channel_name\n self._channel_namespace = channel_namespace\n self._data = None\n\n def write(self, data):\n if self._data is None:\n self._data = data\n else:\n self._data += data\n lines = self.__SPLIT_PATTERN.split(self._data)\n for line in lines[:-1]:\n value = ChannelValue(\n x=time.time() * 1000 - self.time_started_ms,\n y=dict(text_value=str(line)),\n ts=None\n )\n # pylint: disable=protected-access\n self._experiment._channels_values_sender.send(\n channel_name=self._channel_name,\n channel_type=ChannelType.TEXT.value,\n channel_value=value,\n channel_namespace=self._channel_namespace\n )\n\n self._data = lines[-1]\n", "path": "neptune/internal/streams/channel_writer.py"}], "after_files": [{"content": "#\n# Copyright (c) 2019, Neptune Labs Sp. z o.o.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nfrom __future__ import unicode_literals\n\nfrom datetime import datetime\nimport re\n\nfrom neptune.internal.channels.channels import ChannelNamespace, ChannelValue, ChannelType\n\n\nclass ChannelWriter(object):\n __SPLIT_PATTERN = re.compile(r'[\\n\\r]{1,2}')\n\n def __init__(self, experiment, channel_name, channel_namespace=ChannelNamespace.USER):\n self._time_started = experiment.get_system_properties()['created']\n self._experiment = experiment\n self._channel_name = channel_name\n self._channel_namespace = channel_namespace\n self._data = None\n\n def write(self, data):\n if self._data is None:\n self._data = data\n else:\n self._data += data\n lines = self.__SPLIT_PATTERN.split(self._data)\n for line in lines[:-1]:\n value = ChannelValue(\n x=(datetime.now(tz=self._time_started.tzinfo) - self._time_started).total_seconds() * 1000,\n y=dict(text_value=str(line)),\n ts=None\n )\n # pylint: disable=protected-access\n self._experiment._channels_values_sender.send(\n channel_name=self._channel_name,\n channel_type=ChannelType.TEXT.value,\n channel_value=value,\n channel_namespace=self._channel_namespace\n )\n\n self._data = lines[-1]\n", "path": "neptune/internal/streams/channel_writer.py"}]}
882
302
gh_patches_debug_16917
rasdani/github-patches
git_diff
deeppavlov__DeepPavlov-100
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Files not found while running telegram bot I run telegram bot interface (copy-paste from readme) ``` python -m deeppavlov.deep interactbot deeppavlov/configs/go_bot/gobot_dstc2.json -t TELEGRAM_TOKEN ``` But the error is given: ``` File "/home/ubuntu/work/ipavlov/DeepPavlov/deeppavlov/core/common/file.py", line 22, in read_json with open(fpath) as fin: FileNotFoundError: [Errno 2] No such file or directory: '../telegram_utils/models_info.json' ``` All files have been downloaded and successfully used in training. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `telegram_utils/telegram_ui.py` Content: ``` 1 """ 2 Copyright 2017 Neural Networks and Deep Learning lab, MIPT 3 4 Licensed under the Apache License, Version 2.0 (the "License"); 5 you may not use this file except in compliance with the License. 6 You may obtain a copy of the License at 7 8 http://www.apache.org/licenses/LICENSE-2.0 9 10 Unless required by applicable law or agreed to in writing, software 11 distributed under the License is distributed on an "AS IS" BASIS, 12 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 See the License for the specific language governing permissions and 14 limitations under the License. 15 """ 16 import telebot 17 18 from deeppavlov.core.common.file import read_json 19 from deeppavlov.core.commands.infer import build_model_from_config 20 21 22 def init_bot_for_model(token, model): 23 bot = telebot.TeleBot(token) 24 25 model_name = type(model).__name__ 26 models_info = read_json('../telegram_utils/models_info.json') 27 model_info = models_info[model_name] if model_name in models_info else models_info['@default'] 28 29 @bot.message_handler(commands=['start']) 30 def send_start_message(message): 31 chat_id = message.chat.id 32 out_message = model_info['start_message'] 33 if hasattr(model, 'reset'): 34 model.reset() 35 bot.send_message(chat_id, out_message) 36 37 @bot.message_handler(commands=['help']) 38 def send_help_message(message): 39 chat_id = message.chat.id 40 out_message = model_info['help_message'] 41 bot.send_message(chat_id, out_message) 42 43 @bot.message_handler() 44 def handle_inference(message): 45 chat_id = message.chat.id 46 context = message.text 47 48 pred = model([context]) 49 reply_message = str(pred[0]) 50 bot.send_message(chat_id, reply_message) 51 52 bot.polling() 53 54 55 def interact_model_by_telegram(config_path, token): 56 config = read_json(config_path) 57 model = build_model_from_config(config) 58 init_bot_for_model(token, model) 59 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/telegram_utils/telegram_ui.py b/telegram_utils/telegram_ui.py --- a/telegram_utils/telegram_ui.py +++ b/telegram_utils/telegram_ui.py @@ -13,6 +13,8 @@ See the License for the specific language governing permissions and limitations under the License. """ +from pathlib import Path + import telebot from deeppavlov.core.common.file import read_json @@ -23,7 +25,8 @@ bot = telebot.TeleBot(token) model_name = type(model).__name__ - models_info = read_json('../telegram_utils/models_info.json') + config_path = Path(__file__).parent / 'models_info.json' + models_info = read_json(str(config_path)) model_info = models_info[model_name] if model_name in models_info else models_info['@default'] @bot.message_handler(commands=['start'])
{"golden_diff": "diff --git a/telegram_utils/telegram_ui.py b/telegram_utils/telegram_ui.py\n--- a/telegram_utils/telegram_ui.py\n+++ b/telegram_utils/telegram_ui.py\n@@ -13,6 +13,8 @@\n See the License for the specific language governing permissions and\n limitations under the License.\n \"\"\"\n+from pathlib import Path\n+\n import telebot\n \n from deeppavlov.core.common.file import read_json\n@@ -23,7 +25,8 @@\n bot = telebot.TeleBot(token)\n \n model_name = type(model).__name__\n- models_info = read_json('../telegram_utils/models_info.json')\n+ config_path = Path(__file__).parent / 'models_info.json'\n+ models_info = read_json(str(config_path))\n model_info = models_info[model_name] if model_name in models_info else models_info['@default']\n \n @bot.message_handler(commands=['start'])\n", "issue": "Files not found while running telegram bot \nI run telegram bot interface (copy-paste from readme)\r\n```\r\npython -m deeppavlov.deep interactbot deeppavlov/configs/go_bot/gobot_dstc2.json -t TELEGRAM_TOKEN\r\n```\r\n\r\nBut the error is given:\r\n\r\n``` \r\nFile \"/home/ubuntu/work/ipavlov/DeepPavlov/deeppavlov/core/common/file.py\", line 22, in read_json\r\n with open(fpath) as fin:\r\nFileNotFoundError: [Errno 2] No such file or directory: '../telegram_utils/models_info.json' \r\n```\r\n\r\nAll files have been downloaded and successfully used in training.\n", "before_files": [{"content": "\"\"\"\nCopyright 2017 Neural Networks and Deep Learning lab, MIPT\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n\"\"\"\nimport telebot\n\nfrom deeppavlov.core.common.file import read_json\nfrom deeppavlov.core.commands.infer import build_model_from_config\n\n\ndef init_bot_for_model(token, model):\n bot = telebot.TeleBot(token)\n\n model_name = type(model).__name__\n models_info = read_json('../telegram_utils/models_info.json')\n model_info = models_info[model_name] if model_name in models_info else models_info['@default']\n\n @bot.message_handler(commands=['start'])\n def send_start_message(message):\n chat_id = message.chat.id\n out_message = model_info['start_message']\n if hasattr(model, 'reset'):\n model.reset()\n bot.send_message(chat_id, out_message)\n\n @bot.message_handler(commands=['help'])\n def send_help_message(message):\n chat_id = message.chat.id\n out_message = model_info['help_message']\n bot.send_message(chat_id, out_message)\n\n @bot.message_handler()\n def handle_inference(message):\n chat_id = message.chat.id\n context = message.text\n\n pred = model([context])\n reply_message = str(pred[0])\n bot.send_message(chat_id, reply_message)\n\n bot.polling()\n\n\ndef interact_model_by_telegram(config_path, token):\n config = read_json(config_path)\n model = build_model_from_config(config)\n init_bot_for_model(token, model)\n", "path": "telegram_utils/telegram_ui.py"}], "after_files": [{"content": "\"\"\"\nCopyright 2017 Neural Networks and Deep Learning lab, MIPT\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n\"\"\"\nfrom pathlib import Path\n\nimport telebot\n\nfrom deeppavlov.core.common.file import read_json\nfrom deeppavlov.core.commands.infer import build_model_from_config\n\n\ndef init_bot_for_model(token, model):\n bot = telebot.TeleBot(token)\n\n model_name = type(model).__name__\n config_path = Path(__file__).parent / 'models_info.json'\n models_info = read_json(str(config_path))\n model_info = models_info[model_name] if model_name in models_info else models_info['@default']\n\n @bot.message_handler(commands=['start'])\n def send_start_message(message):\n chat_id = message.chat.id\n out_message = model_info['start_message']\n if hasattr(model, 'reset'):\n model.reset()\n bot.send_message(chat_id, out_message)\n\n @bot.message_handler(commands=['help'])\n def send_help_message(message):\n chat_id = message.chat.id\n out_message = model_info['help_message']\n bot.send_message(chat_id, out_message)\n\n @bot.message_handler()\n def handle_inference(message):\n chat_id = message.chat.id\n context = message.text\n\n pred = model([context])\n reply_message = str(pred[0])\n bot.send_message(chat_id, reply_message)\n\n bot.polling()\n\n\ndef interact_model_by_telegram(config_path, token):\n config = read_json(config_path)\n model = build_model_from_config(config)\n init_bot_for_model(token, model)\n", "path": "telegram_utils/telegram_ui.py"}]}
936
196
gh_patches_debug_42952
rasdani/github-patches
git_diff
ansible__ansible-lint-2832
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- no-changelog: check_collection_changelog check the collection structure to confirm if there is a changelog file present at the root of the directory. rule should be optional and only enabled at users discretion. Error level should be Warn/Info (0) based off of code found [here ](https://github.com/ansible/galaxy-importer/blob/master/galaxy_importer/loaders/collection.py#L119) --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `src/ansiblelint/rules/galaxy.py` Content: ``` 1 """Implementation of GalaxyRule.""" 2 from __future__ import annotations 3 4 import sys 5 from functools import total_ordering 6 from typing import TYPE_CHECKING, Any 7 8 from ansiblelint.constants import LINE_NUMBER_KEY 9 from ansiblelint.errors import MatchError 10 from ansiblelint.rules import AnsibleLintRule 11 12 if TYPE_CHECKING: 13 from ansiblelint.file_utils import Lintable 14 15 16 class GalaxyRule(AnsibleLintRule): 17 """Rule for checking collection version is greater than 1.0.0.""" 18 19 id = "galaxy" 20 description = "Confirm via galaxy.yml file if collection version is greater than or equal to 1.0.0" 21 severity = "MEDIUM" 22 tags = ["metadata", "opt-in", "experimental"] 23 version_added = "v6.6.0 (last update)" 24 25 def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: 26 """Return matches found for a specific play (entry in playbook).""" 27 if file.kind != "galaxy": # type: ignore 28 return [] 29 if "version" not in data: 30 return [ 31 self.create_matcherror( 32 message="galaxy.yaml should have version tag.", 33 linenumber=data[LINE_NUMBER_KEY], 34 tag="galaxy[version-missing]", 35 filename=file, 36 ) 37 ] 38 version = data.get("version") 39 if Version(version) < Version("1.0.0"): 40 return [ 41 self.create_matcherror( 42 message="collection version should be greater than or equal to 1.0.0", 43 # pylint: disable=protected-access 44 linenumber=version._line_number, 45 tag="galaxy[version-incorrect]", 46 filename=file, 47 ) 48 ] 49 return [] 50 51 52 @total_ordering 53 class Version: 54 """Simple class to compare arbitrary versions.""" 55 56 def __init__(self, version_string: str): 57 """Construct a Version object.""" 58 self.components = version_string.split(".") 59 60 def __eq__(self, other: object) -> bool: 61 """Implement equality comparison.""" 62 other = _coerce(other) 63 if not isinstance(other, Version): 64 return NotImplemented 65 66 return self.components == other.components 67 68 def __lt__(self, other: Version) -> bool: 69 """Implement lower-than operation.""" 70 other = _coerce(other) 71 if not isinstance(other, Version): 72 return NotImplemented 73 74 return self.components < other.components 75 76 77 def _coerce(other: object) -> Version: 78 if isinstance(other, str): 79 other = Version(other) 80 if isinstance(other, (int, float)): 81 other = Version(str(other)) 82 if isinstance(other, Version): 83 return other 84 raise NotImplementedError(f"Unable to coerce object type {type(other)} to Version") 85 86 87 if "pytest" in sys.modules: # noqa: C901 88 89 from ansiblelint.rules import RulesCollection 90 from ansiblelint.runner import Runner 91 92 def test_galaxy_collection_version_positive() -> None: 93 """Positive test for collection version in galaxy.""" 94 collection = RulesCollection() 95 collection.register(GalaxyRule()) 96 success = "examples/collection/galaxy.yml" 97 good_runner = Runner(success, rules=collection) 98 assert [] == good_runner.run() 99 100 def test_galaxy_collection_version_negative() -> None: 101 """Negative test for collection version in galaxy.""" 102 collection = RulesCollection() 103 collection.register(GalaxyRule()) 104 failure = "examples/meta/galaxy.yml" 105 bad_runner = Runner(failure, rules=collection) 106 errs = bad_runner.run() 107 assert len(errs) == 1 108 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/src/ansiblelint/rules/galaxy.py b/src/ansiblelint/rules/galaxy.py --- a/src/ansiblelint/rules/galaxy.py +++ b/src/ansiblelint/rules/galaxy.py @@ -1,6 +1,7 @@ """Implementation of GalaxyRule.""" from __future__ import annotations +import os import sys from functools import total_ordering from typing import TYPE_CHECKING, Any @@ -14,10 +15,10 @@ class GalaxyRule(AnsibleLintRule): - """Rule for checking collection version is greater than 1.0.0.""" + """Rule for checking collection version is greater than 1.0.0 and checking for changelog.""" id = "galaxy" - description = "Confirm via galaxy.yml file if collection version is greater than or equal to 1.0.0" + description = "Confirm via galaxy.yml file if collection version is greater than or equal to 1.0.0 and check for changelog." severity = "MEDIUM" tags = ["metadata", "opt-in", "experimental"] version_added = "v6.6.0 (last update)" @@ -26,18 +27,21 @@ """Return matches found for a specific play (entry in playbook).""" if file.kind != "galaxy": # type: ignore return [] + + results = [] + if "version" not in data: - return [ + results.append( self.create_matcherror( message="galaxy.yaml should have version tag.", linenumber=data[LINE_NUMBER_KEY], tag="galaxy[version-missing]", filename=file, ) - ] + ) version = data.get("version") if Version(version) < Version("1.0.0"): - return [ + results.append( self.create_matcherror( message="collection version should be greater than or equal to 1.0.0", # pylint: disable=protected-access @@ -45,8 +49,33 @@ tag="galaxy[version-incorrect]", filename=file, ) - ] - return [] + ) + + # Changelog Check - building off Galaxy rule as there is no current way to check + # for a nonexistent file + + base_path = os.path.split(str(file.abspath))[0] + changelog_found = 0 + changelog_paths = [ + os.path.join(base_path, "changelogs", "changelog.yaml"), + os.path.join(base_path, "CHANGELOG.rst"), + os.path.join(base_path, "CHANGELOG.md"), + ] + + for path in changelog_paths: + if os.path.isfile(path): + changelog_found = 1 + + if not changelog_found: + results.append( + self.create_matcherror( + message="No changelog found. Please add a changelog file. Refer to the galaxy.md file for more info.", + tag="galaxy[no-changelog]", + filename=file, + ) + ) + + return results @total_ordering @@ -105,3 +134,18 @@ bad_runner = Runner(failure, rules=collection) errs = bad_runner.run() assert len(errs) == 1 + + def test_changelog_present() -> None: + """Positive test for finding a changelog.""" + collection = RulesCollection() + collection.register(GalaxyRule()) + good_runner = Runner("examples/collection/galaxy.yml", rules=collection) + assert [] == good_runner.run() + + def test_changelog_missing() -> None: + """Negative test for finding a changelog.""" + collection = RulesCollection() + collection.register(GalaxyRule()) + bad_runner = Runner("examples/no_changelog/galaxy.yml", rules=collection) + errs = bad_runner.run() + assert len(errs) == 1
{"golden_diff": "diff --git a/src/ansiblelint/rules/galaxy.py b/src/ansiblelint/rules/galaxy.py\n--- a/src/ansiblelint/rules/galaxy.py\n+++ b/src/ansiblelint/rules/galaxy.py\n@@ -1,6 +1,7 @@\n \"\"\"Implementation of GalaxyRule.\"\"\"\n from __future__ import annotations\n \n+import os\n import sys\n from functools import total_ordering\n from typing import TYPE_CHECKING, Any\n@@ -14,10 +15,10 @@\n \n \n class GalaxyRule(AnsibleLintRule):\n- \"\"\"Rule for checking collection version is greater than 1.0.0.\"\"\"\n+ \"\"\"Rule for checking collection version is greater than 1.0.0 and checking for changelog.\"\"\"\n \n id = \"galaxy\"\n- description = \"Confirm via galaxy.yml file if collection version is greater than or equal to 1.0.0\"\n+ description = \"Confirm via galaxy.yml file if collection version is greater than or equal to 1.0.0 and check for changelog.\"\n severity = \"MEDIUM\"\n tags = [\"metadata\", \"opt-in\", \"experimental\"]\n version_added = \"v6.6.0 (last update)\"\n@@ -26,18 +27,21 @@\n \"\"\"Return matches found for a specific play (entry in playbook).\"\"\"\n if file.kind != \"galaxy\": # type: ignore\n return []\n+\n+ results = []\n+\n if \"version\" not in data:\n- return [\n+ results.append(\n self.create_matcherror(\n message=\"galaxy.yaml should have version tag.\",\n linenumber=data[LINE_NUMBER_KEY],\n tag=\"galaxy[version-missing]\",\n filename=file,\n )\n- ]\n+ )\n version = data.get(\"version\")\n if Version(version) < Version(\"1.0.0\"):\n- return [\n+ results.append(\n self.create_matcherror(\n message=\"collection version should be greater than or equal to 1.0.0\",\n # pylint: disable=protected-access\n@@ -45,8 +49,33 @@\n tag=\"galaxy[version-incorrect]\",\n filename=file,\n )\n- ]\n- return []\n+ )\n+\n+ # Changelog Check - building off Galaxy rule as there is no current way to check\n+ # for a nonexistent file\n+\n+ base_path = os.path.split(str(file.abspath))[0]\n+ changelog_found = 0\n+ changelog_paths = [\n+ os.path.join(base_path, \"changelogs\", \"changelog.yaml\"),\n+ os.path.join(base_path, \"CHANGELOG.rst\"),\n+ os.path.join(base_path, \"CHANGELOG.md\"),\n+ ]\n+\n+ for path in changelog_paths:\n+ if os.path.isfile(path):\n+ changelog_found = 1\n+\n+ if not changelog_found:\n+ results.append(\n+ self.create_matcherror(\n+ message=\"No changelog found. Please add a changelog file. Refer to the galaxy.md file for more info.\",\n+ tag=\"galaxy[no-changelog]\",\n+ filename=file,\n+ )\n+ )\n+\n+ return results\n \n \n @total_ordering\n@@ -105,3 +134,18 @@\n bad_runner = Runner(failure, rules=collection)\n errs = bad_runner.run()\n assert len(errs) == 1\n+\n+ def test_changelog_present() -> None:\n+ \"\"\"Positive test for finding a changelog.\"\"\"\n+ collection = RulesCollection()\n+ collection.register(GalaxyRule())\n+ good_runner = Runner(\"examples/collection/galaxy.yml\", rules=collection)\n+ assert [] == good_runner.run()\n+\n+ def test_changelog_missing() -> None:\n+ \"\"\"Negative test for finding a changelog.\"\"\"\n+ collection = RulesCollection()\n+ collection.register(GalaxyRule())\n+ bad_runner = Runner(\"examples/no_changelog/galaxy.yml\", rules=collection)\n+ errs = bad_runner.run()\n+ assert len(errs) == 1\n", "issue": "no-changelog: check_collection_changelog\ncheck the collection structure to confirm if there is a changelog file present at the root of the directory. \r\n\r\nrule should be optional and only enabled at users discretion. Error level should be Warn/Info (0)\r\n\r\nbased off of code found [here ](https://github.com/ansible/galaxy-importer/blob/master/galaxy_importer/loaders/collection.py#L119)\n", "before_files": [{"content": "\"\"\"Implementation of GalaxyRule.\"\"\"\nfrom __future__ import annotations\n\nimport sys\nfrom functools import total_ordering\nfrom typing import TYPE_CHECKING, Any\n\nfrom ansiblelint.constants import LINE_NUMBER_KEY\nfrom ansiblelint.errors import MatchError\nfrom ansiblelint.rules import AnsibleLintRule\n\nif TYPE_CHECKING:\n from ansiblelint.file_utils import Lintable\n\n\nclass GalaxyRule(AnsibleLintRule):\n \"\"\"Rule for checking collection version is greater than 1.0.0.\"\"\"\n\n id = \"galaxy\"\n description = \"Confirm via galaxy.yml file if collection version is greater than or equal to 1.0.0\"\n severity = \"MEDIUM\"\n tags = [\"metadata\", \"opt-in\", \"experimental\"]\n version_added = \"v6.6.0 (last update)\"\n\n def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]:\n \"\"\"Return matches found for a specific play (entry in playbook).\"\"\"\n if file.kind != \"galaxy\": # type: ignore\n return []\n if \"version\" not in data:\n return [\n self.create_matcherror(\n message=\"galaxy.yaml should have version tag.\",\n linenumber=data[LINE_NUMBER_KEY],\n tag=\"galaxy[version-missing]\",\n filename=file,\n )\n ]\n version = data.get(\"version\")\n if Version(version) < Version(\"1.0.0\"):\n return [\n self.create_matcherror(\n message=\"collection version should be greater than or equal to 1.0.0\",\n # pylint: disable=protected-access\n linenumber=version._line_number,\n tag=\"galaxy[version-incorrect]\",\n filename=file,\n )\n ]\n return []\n\n\n@total_ordering\nclass Version:\n \"\"\"Simple class to compare arbitrary versions.\"\"\"\n\n def __init__(self, version_string: str):\n \"\"\"Construct a Version object.\"\"\"\n self.components = version_string.split(\".\")\n\n def __eq__(self, other: object) -> bool:\n \"\"\"Implement equality comparison.\"\"\"\n other = _coerce(other)\n if not isinstance(other, Version):\n return NotImplemented\n\n return self.components == other.components\n\n def __lt__(self, other: Version) -> bool:\n \"\"\"Implement lower-than operation.\"\"\"\n other = _coerce(other)\n if not isinstance(other, Version):\n return NotImplemented\n\n return self.components < other.components\n\n\ndef _coerce(other: object) -> Version:\n if isinstance(other, str):\n other = Version(other)\n if isinstance(other, (int, float)):\n other = Version(str(other))\n if isinstance(other, Version):\n return other\n raise NotImplementedError(f\"Unable to coerce object type {type(other)} to Version\")\n\n\nif \"pytest\" in sys.modules: # noqa: C901\n\n from ansiblelint.rules import RulesCollection\n from ansiblelint.runner import Runner\n\n def test_galaxy_collection_version_positive() -> None:\n \"\"\"Positive test for collection version in galaxy.\"\"\"\n collection = RulesCollection()\n collection.register(GalaxyRule())\n success = \"examples/collection/galaxy.yml\"\n good_runner = Runner(success, rules=collection)\n assert [] == good_runner.run()\n\n def test_galaxy_collection_version_negative() -> None:\n \"\"\"Negative test for collection version in galaxy.\"\"\"\n collection = RulesCollection()\n collection.register(GalaxyRule())\n failure = \"examples/meta/galaxy.yml\"\n bad_runner = Runner(failure, rules=collection)\n errs = bad_runner.run()\n assert len(errs) == 1\n", "path": "src/ansiblelint/rules/galaxy.py"}], "after_files": [{"content": "\"\"\"Implementation of GalaxyRule.\"\"\"\nfrom __future__ import annotations\n\nimport os\nimport sys\nfrom functools import total_ordering\nfrom typing import TYPE_CHECKING, Any\n\nfrom ansiblelint.constants import LINE_NUMBER_KEY\nfrom ansiblelint.errors import MatchError\nfrom ansiblelint.rules import AnsibleLintRule\n\nif TYPE_CHECKING:\n from ansiblelint.file_utils import Lintable\n\n\nclass GalaxyRule(AnsibleLintRule):\n \"\"\"Rule for checking collection version is greater than 1.0.0 and checking for changelog.\"\"\"\n\n id = \"galaxy\"\n description = \"Confirm via galaxy.yml file if collection version is greater than or equal to 1.0.0 and check for changelog.\"\n severity = \"MEDIUM\"\n tags = [\"metadata\", \"opt-in\", \"experimental\"]\n version_added = \"v6.6.0 (last update)\"\n\n def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]:\n \"\"\"Return matches found for a specific play (entry in playbook).\"\"\"\n if file.kind != \"galaxy\": # type: ignore\n return []\n\n results = []\n\n if \"version\" not in data:\n results.append(\n self.create_matcherror(\n message=\"galaxy.yaml should have version tag.\",\n linenumber=data[LINE_NUMBER_KEY],\n tag=\"galaxy[version-missing]\",\n filename=file,\n )\n )\n version = data.get(\"version\")\n if Version(version) < Version(\"1.0.0\"):\n results.append(\n self.create_matcherror(\n message=\"collection version should be greater than or equal to 1.0.0\",\n # pylint: disable=protected-access\n linenumber=version._line_number,\n tag=\"galaxy[version-incorrect]\",\n filename=file,\n )\n )\n\n # Changelog Check - building off Galaxy rule as there is no current way to check\n # for a nonexistent file\n\n base_path = os.path.split(str(file.abspath))[0]\n changelog_found = 0\n changelog_paths = [\n os.path.join(base_path, \"changelogs\", \"changelog.yaml\"),\n os.path.join(base_path, \"CHANGELOG.rst\"),\n os.path.join(base_path, \"CHANGELOG.md\"),\n ]\n\n for path in changelog_paths:\n if os.path.isfile(path):\n changelog_found = 1\n\n if not changelog_found:\n results.append(\n self.create_matcherror(\n message=\"No changelog found. Please add a changelog file. Refer to the galaxy.md file for more info.\",\n tag=\"galaxy[no-changelog]\",\n filename=file,\n )\n )\n\n return results\n\n\n@total_ordering\nclass Version:\n \"\"\"Simple class to compare arbitrary versions.\"\"\"\n\n def __init__(self, version_string: str):\n \"\"\"Construct a Version object.\"\"\"\n self.components = version_string.split(\".\")\n\n def __eq__(self, other: object) -> bool:\n \"\"\"Implement equality comparison.\"\"\"\n other = _coerce(other)\n if not isinstance(other, Version):\n return NotImplemented\n\n return self.components == other.components\n\n def __lt__(self, other: Version) -> bool:\n \"\"\"Implement lower-than operation.\"\"\"\n other = _coerce(other)\n if not isinstance(other, Version):\n return NotImplemented\n\n return self.components < other.components\n\n\ndef _coerce(other: object) -> Version:\n if isinstance(other, str):\n other = Version(other)\n if isinstance(other, (int, float)):\n other = Version(str(other))\n if isinstance(other, Version):\n return other\n raise NotImplementedError(f\"Unable to coerce object type {type(other)} to Version\")\n\n\nif \"pytest\" in sys.modules: # noqa: C901\n\n from ansiblelint.rules import RulesCollection\n from ansiblelint.runner import Runner\n\n def test_galaxy_collection_version_positive() -> None:\n \"\"\"Positive test for collection version in galaxy.\"\"\"\n collection = RulesCollection()\n collection.register(GalaxyRule())\n success = \"examples/collection/galaxy.yml\"\n good_runner = Runner(success, rules=collection)\n assert [] == good_runner.run()\n\n def test_galaxy_collection_version_negative() -> None:\n \"\"\"Negative test for collection version in galaxy.\"\"\"\n collection = RulesCollection()\n collection.register(GalaxyRule())\n failure = \"examples/meta/galaxy.yml\"\n bad_runner = Runner(failure, rules=collection)\n errs = bad_runner.run()\n assert len(errs) == 1\n\n def test_changelog_present() -> None:\n \"\"\"Positive test for finding a changelog.\"\"\"\n collection = RulesCollection()\n collection.register(GalaxyRule())\n good_runner = Runner(\"examples/collection/galaxy.yml\", rules=collection)\n assert [] == good_runner.run()\n\n def test_changelog_missing() -> None:\n \"\"\"Negative test for finding a changelog.\"\"\"\n collection = RulesCollection()\n collection.register(GalaxyRule())\n bad_runner = Runner(\"examples/no_changelog/galaxy.yml\", rules=collection)\n errs = bad_runner.run()\n assert len(errs) == 1\n", "path": "src/ansiblelint/rules/galaxy.py"}]}
1,351
888
gh_patches_debug_11363
rasdani/github-patches
git_diff
Cog-Creators__Red-DiscordBot-1221
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- [V3 Context] Handle DMs properly during send_interactive (*forwarded from @palmtree5*) ### Type: - Bug ### Brief description of the problem When a user in DM types `more` during a case of `ctx.send_interactive` (for example, a long response to `[p]debug`), an error occurs. ### Expected behavior When a user types `more`, the bot's message *"type `more` to continue"* should be deleted. ### Actual behavior ```py Exception in command 'debug' Traceback (most recent call last): File "/home/palm/redv3/lib/python3.5/site-packages/discord/ext/commands/core.py", line 62, in wrapped ret = yield from coro(*args, **kwargs) File "/home/palm/redv3/lib/python3.5/site-packages/redbot/core/dev_commands.py", line 124, in debug await ctx.send_interactive(self.get_pages(result), box_lang="py") File "/home/palm/redv3/lib/python3.5/site-packages/redbot/core/context.py", line 120, in send_interactive await self.channel.delete_messages((query, resp)) AttributeError: 'DMChannel' object has no attribute 'delete_messages' The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/home/palm/redv3/lib/python3.5/site-packages/discord/ext/commands/bot.py", line 886, in invoke yield from ctx.command.invoke(ctx) File "/home/palm/redv3/lib/python3.5/site-packages/discord/ext/commands/core.py", line 489, in invoke yield from injected(*ctx.args, **ctx.kwargs) File "/home/palm/redv3/lib/python3.5/site-packages/discord/ext/commands/core.py", line 71, in wrapped raise CommandInvokeError(e) from e discord.ext.commands.errors.CommandInvokeError: Command raised an exception: AttributeError: 'DMChannel' object has no attribute 'delete_messages' ``` ### Steps to reproduce 1. Open a DM with the bot 2. Do `[p]debug dir(bot)` 3. Type `more` when queried --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `redbot/core/context.py` Content: ``` 1 """ 2 The purpose of this module is to allow for Red to further customise the command 3 invocation context provided by discord.py. 4 """ 5 import asyncio 6 from typing import Iterable, List 7 8 import discord 9 from discord.ext import commands 10 11 from redbot.core.utils.chat_formatting import box 12 13 __all__ = ["RedContext"] 14 15 TICK = "\N{WHITE HEAVY CHECK MARK}" 16 17 18 class RedContext(commands.Context): 19 """Command invocation context for Red. 20 21 All context passed into commands will be of this type. 22 23 This class inherits from `commands.Context <discord.ext.commands.Context>`. 24 """ 25 26 async def send_help(self) -> List[discord.Message]: 27 """Send the command help message. 28 29 Returns 30 ------- 31 `list` of `discord.Message` 32 A list of help messages which were sent to the user. 33 34 """ 35 command = self.invoked_subcommand or self.command 36 embeds = await self.bot.formatter.format_help_for(self, command) 37 destination = self 38 ret = [] 39 for embed in embeds: 40 try: 41 m = await destination.send(embed=embed) 42 except discord.HTTPException: 43 destination = self.author 44 m = await destination.send(embed=embed) 45 ret.append(m) 46 47 return ret 48 49 async def tick(self) -> bool: 50 """Add a tick reaction to the command message. 51 52 Returns 53 ------- 54 bool 55 :code:`True` if adding the reaction succeeded. 56 57 """ 58 try: 59 await self.message.add_reaction(TICK) 60 except discord.HTTPException: 61 return False 62 else: 63 return True 64 65 async def send_interactive(self, 66 messages: Iterable[str], 67 box_lang: str=None, 68 timeout: int=15) -> List[discord.Message]: 69 """Send multiple messages interactively. 70 71 The user will be prompted for whether or not they would like to view 72 the next message, one at a time. They will also be notified of how 73 many messages are remaining on each prompt. 74 75 Parameters 76 ---------- 77 messages : `iterable` of `str` 78 The messages to send. 79 box_lang : str 80 If specified, each message will be contained within a codeblock of 81 this language. 82 timeout : int 83 How long the user has to respond to the prompt before it times out. 84 After timing out, the bot deletes its prompt message. 85 86 """ 87 messages = tuple(messages) 88 ret = [] 89 90 more_check = lambda m: (m.author == self.author and 91 m.channel == self.channel and 92 m.content.lower() == "more") 93 94 for idx, page in enumerate(messages, 1): 95 if box_lang is None: 96 msg = await self.send(page) 97 else: 98 msg = await self.send(box(page, lang=box_lang)) 99 ret.append(msg) 100 n_remaining = len(messages) - idx 101 if n_remaining > 0: 102 if n_remaining == 1: 103 plural = "" 104 is_are = "is" 105 else: 106 plural = "s" 107 is_are = "are" 108 query = await self.send( 109 "There {} still {} message{} remaining. " 110 "Type `more` to continue." 111 "".format(is_are, n_remaining, plural)) 112 try: 113 resp = await self.bot.wait_for( 114 'message', check=more_check, timeout=timeout) 115 except asyncio.TimeoutError: 116 await query.delete() 117 break 118 else: 119 try: 120 await self.channel.delete_messages((query, resp)) 121 except discord.HTTPException: 122 # In case the bot can't delete other users' messages, 123 # or is not a bot account 124 await query.delete() 125 return ret 126 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/redbot/core/context.py b/redbot/core/context.py --- a/redbot/core/context.py +++ b/redbot/core/context.py @@ -118,8 +118,9 @@ else: try: await self.channel.delete_messages((query, resp)) - except discord.HTTPException: + except (discord.HTTPException, AttributeError): # In case the bot can't delete other users' messages, # or is not a bot account + # or chanel is a DM await query.delete() return ret
{"golden_diff": "diff --git a/redbot/core/context.py b/redbot/core/context.py\n--- a/redbot/core/context.py\n+++ b/redbot/core/context.py\n@@ -118,8 +118,9 @@\n else:\n try:\n await self.channel.delete_messages((query, resp))\n- except discord.HTTPException:\n+ except (discord.HTTPException, AttributeError):\n # In case the bot can't delete other users' messages,\n # or is not a bot account\n+ # or chanel is a DM\n await query.delete()\n return ret\n", "issue": "[V3 Context] Handle DMs properly during send_interactive\n(*forwarded from @palmtree5*)\r\n\r\n### Type:\r\n\r\n- Bug\r\n\r\n### Brief description of the problem\r\nWhen a user in DM types `more` during a case of `ctx.send_interactive` (for example, a long response to `[p]debug`), an error occurs.\r\n\r\n### Expected behavior\r\nWhen a user types `more`, the bot's message *\"type `more` to continue\"* should be deleted.\r\n\r\n### Actual behavior\r\n```py\r\nException in command 'debug'\r\nTraceback (most recent call last):\r\n File \"/home/palm/redv3/lib/python3.5/site-packages/discord/ext/commands/core.py\", line 62, in wrapped\r\n ret = yield from coro(*args, **kwargs)\r\n File \"/home/palm/redv3/lib/python3.5/site-packages/redbot/core/dev_commands.py\", line 124, in debug\r\n await ctx.send_interactive(self.get_pages(result), box_lang=\"py\")\r\n File \"/home/palm/redv3/lib/python3.5/site-packages/redbot/core/context.py\", line 120, in send_interactive\r\n await self.channel.delete_messages((query, resp))\r\nAttributeError: 'DMChannel' object has no attribute 'delete_messages'\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File \"/home/palm/redv3/lib/python3.5/site-packages/discord/ext/commands/bot.py\", line 886, in invoke\r\n yield from ctx.command.invoke(ctx)\r\n File \"/home/palm/redv3/lib/python3.5/site-packages/discord/ext/commands/core.py\", line 489, in invoke\r\n yield from injected(*ctx.args, **ctx.kwargs)\r\n File \"/home/palm/redv3/lib/python3.5/site-packages/discord/ext/commands/core.py\", line 71, in wrapped\r\n raise CommandInvokeError(e) from e\r\ndiscord.ext.commands.errors.CommandInvokeError: Command raised an exception: AttributeError: 'DMChannel' object has no attribute 'delete_messages'\r\n```\r\n\r\n### Steps to reproduce\r\n\r\n1. Open a DM with the bot\r\n2. Do `[p]debug dir(bot)`\r\n3. Type `more` when queried\r\n\n", "before_files": [{"content": "\"\"\"\nThe purpose of this module is to allow for Red to further customise the command\ninvocation context provided by discord.py.\n\"\"\"\nimport asyncio\nfrom typing import Iterable, List\n\nimport discord\nfrom discord.ext import commands\n\nfrom redbot.core.utils.chat_formatting import box\n\n__all__ = [\"RedContext\"]\n\nTICK = \"\\N{WHITE HEAVY CHECK MARK}\"\n\n\nclass RedContext(commands.Context):\n \"\"\"Command invocation context for Red.\n\n All context passed into commands will be of this type.\n\n This class inherits from `commands.Context <discord.ext.commands.Context>`.\n \"\"\"\n\n async def send_help(self) -> List[discord.Message]:\n \"\"\"Send the command help message.\n\n Returns\n -------\n `list` of `discord.Message`\n A list of help messages which were sent to the user.\n\n \"\"\"\n command = self.invoked_subcommand or self.command\n embeds = await self.bot.formatter.format_help_for(self, command)\n destination = self\n ret = []\n for embed in embeds:\n try:\n m = await destination.send(embed=embed)\n except discord.HTTPException:\n destination = self.author\n m = await destination.send(embed=embed)\n ret.append(m)\n\n return ret\n\n async def tick(self) -> bool:\n \"\"\"Add a tick reaction to the command message.\n\n Returns\n -------\n bool\n :code:`True` if adding the reaction succeeded.\n\n \"\"\"\n try:\n await self.message.add_reaction(TICK)\n except discord.HTTPException:\n return False\n else:\n return True\n\n async def send_interactive(self,\n messages: Iterable[str],\n box_lang: str=None,\n timeout: int=15) -> List[discord.Message]:\n \"\"\"Send multiple messages interactively.\n\n The user will be prompted for whether or not they would like to view\n the next message, one at a time. They will also be notified of how\n many messages are remaining on each prompt.\n\n Parameters\n ----------\n messages : `iterable` of `str`\n The messages to send.\n box_lang : str\n If specified, each message will be contained within a codeblock of\n this language.\n timeout : int\n How long the user has to respond to the prompt before it times out.\n After timing out, the bot deletes its prompt message.\n\n \"\"\"\n messages = tuple(messages)\n ret = []\n\n more_check = lambda m: (m.author == self.author and\n m.channel == self.channel and\n m.content.lower() == \"more\")\n\n for idx, page in enumerate(messages, 1):\n if box_lang is None:\n msg = await self.send(page)\n else:\n msg = await self.send(box(page, lang=box_lang))\n ret.append(msg)\n n_remaining = len(messages) - idx\n if n_remaining > 0:\n if n_remaining == 1:\n plural = \"\"\n is_are = \"is\"\n else:\n plural = \"s\"\n is_are = \"are\"\n query = await self.send(\n \"There {} still {} message{} remaining. \"\n \"Type `more` to continue.\"\n \"\".format(is_are, n_remaining, plural))\n try:\n resp = await self.bot.wait_for(\n 'message', check=more_check, timeout=timeout)\n except asyncio.TimeoutError:\n await query.delete()\n break\n else:\n try:\n await self.channel.delete_messages((query, resp))\n except discord.HTTPException:\n # In case the bot can't delete other users' messages,\n # or is not a bot account\n await query.delete()\n return ret\n", "path": "redbot/core/context.py"}], "after_files": [{"content": "\"\"\"\nThe purpose of this module is to allow for Red to further customise the command\ninvocation context provided by discord.py.\n\"\"\"\nimport asyncio\nfrom typing import Iterable, List\n\nimport discord\nfrom discord.ext import commands\n\nfrom redbot.core.utils.chat_formatting import box\n\n__all__ = [\"RedContext\"]\n\nTICK = \"\\N{WHITE HEAVY CHECK MARK}\"\n\n\nclass RedContext(commands.Context):\n \"\"\"Command invocation context for Red.\n\n All context passed into commands will be of this type.\n\n This class inherits from `commands.Context <discord.ext.commands.Context>`.\n \"\"\"\n\n async def send_help(self) -> List[discord.Message]:\n \"\"\"Send the command help message.\n\n Returns\n -------\n `list` of `discord.Message`\n A list of help messages which were sent to the user.\n\n \"\"\"\n command = self.invoked_subcommand or self.command\n embeds = await self.bot.formatter.format_help_for(self, command)\n destination = self\n ret = []\n for embed in embeds:\n try:\n m = await destination.send(embed=embed)\n except discord.HTTPException:\n destination = self.author\n m = await destination.send(embed=embed)\n ret.append(m)\n\n return ret\n\n async def tick(self) -> bool:\n \"\"\"Add a tick reaction to the command message.\n\n Returns\n -------\n bool\n :code:`True` if adding the reaction succeeded.\n\n \"\"\"\n try:\n await self.message.add_reaction(TICK)\n except discord.HTTPException:\n return False\n else:\n return True\n\n async def send_interactive(self,\n messages: Iterable[str],\n box_lang: str=None,\n timeout: int=15) -> List[discord.Message]:\n \"\"\"Send multiple messages interactively.\n\n The user will be prompted for whether or not they would like to view\n the next message, one at a time. They will also be notified of how\n many messages are remaining on each prompt.\n\n Parameters\n ----------\n messages : `iterable` of `str`\n The messages to send.\n box_lang : str\n If specified, each message will be contained within a codeblock of\n this language.\n timeout : int\n How long the user has to respond to the prompt before it times out.\n After timing out, the bot deletes its prompt message.\n\n \"\"\"\n messages = tuple(messages)\n ret = []\n\n more_check = lambda m: (m.author == self.author and\n m.channel == self.channel and\n m.content.lower() == \"more\")\n\n for idx, page in enumerate(messages, 1):\n if box_lang is None:\n msg = await self.send(page)\n else:\n msg = await self.send(box(page, lang=box_lang))\n ret.append(msg)\n n_remaining = len(messages) - idx\n if n_remaining > 0:\n if n_remaining == 1:\n plural = \"\"\n is_are = \"is\"\n else:\n plural = \"s\"\n is_are = \"are\"\n query = await self.send(\n \"There {} still {} message{} remaining. \"\n \"Type `more` to continue.\"\n \"\".format(is_are, n_remaining, plural))\n try:\n resp = await self.bot.wait_for(\n 'message', check=more_check, timeout=timeout)\n except asyncio.TimeoutError:\n await query.delete()\n break\n else:\n try:\n await self.channel.delete_messages((query, resp))\n except (discord.HTTPException, AttributeError):\n # In case the bot can't delete other users' messages,\n # or is not a bot account\n # or chanel is a DM\n await query.delete()\n return ret\n", "path": "redbot/core/context.py"}]}
1,809
122
gh_patches_debug_4648
rasdani/github-patches
git_diff
kivy__kivy-2196
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- [examples] installation of latest kivy-examples package fails Looks like there is a simple syntax problem in an example... Beleave it is not py3 compatible :/ Package: kivy-examples (1.8.1-daily0+201405040547-2023-testing42~ubuntu14.04.1) ``` File "/usr/share/kivy-examples/widgets/compound_selection.py", line 17 print [x.text for x in self.selected_nodes] ^ SyntaxError: invalid syntax ``` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `examples/widgets/compound_selection.py` Content: ``` 1 from kivy.uix.gridlayout import GridLayout 2 from kivy.uix.button import Button 3 from kivy.uix.behaviors import CompoundSelectionBehavior 4 from kivy.app import runTouchApp 5 from kivy.core.window import Window 6 7 8 class SelectableGrid(CompoundSelectionBehavior, GridLayout): 9 10 def __init__(self, **kwargs): 11 super(SelectableGrid, self).__init__(**kwargs) 12 keyboard = Window.request_keyboard(None, self) 13 keyboard.bind(on_key_down=self.select_with_key_down, 14 on_key_up=self.select_with_key_up) 15 16 def print_selection(*l): 17 print(x.text for x in self.selected_nodes) 18 self.bind(selected_nodes=print_selection) 19 20 def goto_node(self, key, last_node, last_node_idx): 21 ''' This function is used to go to the node by typing the number 22 of the text of the button. 23 ''' 24 node, idx = super(SelectableGrid, self).goto_node(key, last_node, 25 last_node_idx) 26 if node == last_node: 27 children = self.children 28 for i in range(len(children)): 29 if children[i].text == key: 30 return children[i], i 31 return node, idx 32 33 def select_node(self, node): 34 node.background_color = (1, 0, 0, 1) 35 return super(SelectableGrid, self).select_node(node) 36 37 def deselect_node(self, node): 38 node.background_color = (1, 1, 1, 1) 39 super(SelectableGrid, self).deselect_node(node) 40 41 def do_touch(self, instance, touch): 42 if ('button' in touch.profile and touch.button in 43 ('scrollup', 'scrolldown', 'scrollleft', 'scrollright')) or\ 44 instance.collide_point(*touch.pos): 45 self.select_with_touch(instance, touch) 46 else: 47 return False 48 return True 49 50 51 root = SelectableGrid(cols=5, up_count=5, multiselect=True, scroll_count=1) 52 for i in range(40): 53 c = Button(text=str(i)) 54 c.bind(on_touch_down=root.do_touch) 55 root.add_widget(c) 56 57 runTouchApp(root) 58 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/examples/widgets/compound_selection.py b/examples/widgets/compound_selection.py --- a/examples/widgets/compound_selection.py +++ b/examples/widgets/compound_selection.py @@ -14,7 +14,7 @@ on_key_up=self.select_with_key_up) def print_selection(*l): - print(x.text for x in self.selected_nodes) + print([x.text for x in self.selected_nodes]) self.bind(selected_nodes=print_selection) def goto_node(self, key, last_node, last_node_idx):
{"golden_diff": "diff --git a/examples/widgets/compound_selection.py b/examples/widgets/compound_selection.py\n--- a/examples/widgets/compound_selection.py\n+++ b/examples/widgets/compound_selection.py\n@@ -14,7 +14,7 @@\n on_key_up=self.select_with_key_up)\n \n def print_selection(*l):\n- print(x.text for x in self.selected_nodes)\n+ print([x.text for x in self.selected_nodes])\n self.bind(selected_nodes=print_selection)\n \n def goto_node(self, key, last_node, last_node_idx):\n", "issue": "[examples] installation of latest kivy-examples package fails\nLooks like there is a simple syntax problem in an example...\nBeleave it is not py3 compatible :/\n\nPackage: kivy-examples (1.8.1-daily0+201405040547-2023-testing42~ubuntu14.04.1)\n\n```\n File \"/usr/share/kivy-examples/widgets/compound_selection.py\", line 17\n print [x.text for x in self.selected_nodes]\n ^\nSyntaxError: invalid syntax\n```\n\n", "before_files": [{"content": "from kivy.uix.gridlayout import GridLayout\nfrom kivy.uix.button import Button\nfrom kivy.uix.behaviors import CompoundSelectionBehavior\nfrom kivy.app import runTouchApp\nfrom kivy.core.window import Window\n\n\nclass SelectableGrid(CompoundSelectionBehavior, GridLayout):\n\n def __init__(self, **kwargs):\n super(SelectableGrid, self).__init__(**kwargs)\n keyboard = Window.request_keyboard(None, self)\n keyboard.bind(on_key_down=self.select_with_key_down,\n on_key_up=self.select_with_key_up)\n\n def print_selection(*l):\n print(x.text for x in self.selected_nodes)\n self.bind(selected_nodes=print_selection)\n\n def goto_node(self, key, last_node, last_node_idx):\n ''' This function is used to go to the node by typing the number\n of the text of the button.\n '''\n node, idx = super(SelectableGrid, self).goto_node(key, last_node,\n last_node_idx)\n if node == last_node:\n children = self.children\n for i in range(len(children)):\n if children[i].text == key:\n return children[i], i\n return node, idx\n\n def select_node(self, node):\n node.background_color = (1, 0, 0, 1)\n return super(SelectableGrid, self).select_node(node)\n\n def deselect_node(self, node):\n node.background_color = (1, 1, 1, 1)\n super(SelectableGrid, self).deselect_node(node)\n\n def do_touch(self, instance, touch):\n if ('button' in touch.profile and touch.button in\n ('scrollup', 'scrolldown', 'scrollleft', 'scrollright')) or\\\n instance.collide_point(*touch.pos):\n self.select_with_touch(instance, touch)\n else:\n return False\n return True\n\n\nroot = SelectableGrid(cols=5, up_count=5, multiselect=True, scroll_count=1)\nfor i in range(40):\n c = Button(text=str(i))\n c.bind(on_touch_down=root.do_touch)\n root.add_widget(c)\n\nrunTouchApp(root)\n", "path": "examples/widgets/compound_selection.py"}], "after_files": [{"content": "from kivy.uix.gridlayout import GridLayout\nfrom kivy.uix.button import Button\nfrom kivy.uix.behaviors import CompoundSelectionBehavior\nfrom kivy.app import runTouchApp\nfrom kivy.core.window import Window\n\n\nclass SelectableGrid(CompoundSelectionBehavior, GridLayout):\n\n def __init__(self, **kwargs):\n super(SelectableGrid, self).__init__(**kwargs)\n keyboard = Window.request_keyboard(None, self)\n keyboard.bind(on_key_down=self.select_with_key_down,\n on_key_up=self.select_with_key_up)\n\n def print_selection(*l):\n print([x.text for x in self.selected_nodes])\n self.bind(selected_nodes=print_selection)\n\n def goto_node(self, key, last_node, last_node_idx):\n ''' This function is used to go to the node by typing the number\n of the text of the button.\n '''\n node, idx = super(SelectableGrid, self).goto_node(key, last_node,\n last_node_idx)\n if node == last_node:\n children = self.children\n for i in range(len(children)):\n if children[i].text == key:\n return children[i], i\n return node, idx\n\n def select_node(self, node):\n node.background_color = (1, 0, 0, 1)\n return super(SelectableGrid, self).select_node(node)\n\n def deselect_node(self, node):\n node.background_color = (1, 1, 1, 1)\n super(SelectableGrid, self).deselect_node(node)\n\n def do_touch(self, instance, touch):\n if ('button' in touch.profile and touch.button in\n ('scrollup', 'scrolldown', 'scrollleft', 'scrollright')) or\\\n instance.collide_point(*touch.pos):\n self.select_with_touch(instance, touch)\n else:\n return False\n return True\n\n\nroot = SelectableGrid(cols=5, up_count=5, multiselect=True, scroll_count=1)\nfor i in range(40):\n c = Button(text=str(i))\n c.bind(on_touch_down=root.do_touch)\n root.add_widget(c)\n\nrunTouchApp(root)\n", "path": "examples/widgets/compound_selection.py"}]}
961
116
gh_patches_debug_19248
rasdani/github-patches
git_diff
Kinto__kinto-1765
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Exposed providers in openid capabilities are not filtered by `multiauth.policies` For example, with: ```ini kinto.includes = kinto.plugins.openid multiauth.policies = google multiauth.policy.google.use = kinto.plugins.openid.OpenIDConnectPolicy multiauth.policy.google.issuer_url = https://accounts.google.com multiauth.policy.google.client_id = a multiauth.policy.google.client_secret = b multiauth.policy.fxa.use = kinto.plugins.openid.OpenIDConnectPolicy multiauth.policy.fxa.issuer_url = https://accounts.mozilla.org multiauth.policy.fxa.client_id = a multiauth.policy.fxa.client_secret = b ``` `fxa` provider should not be shown in capability. See misbehaving code (OMG a `print()` statement!) https://github.com/Kinto/kinto/blob/bf60689ae016a31b8c5bb1955397be46d7b4acca/kinto/plugins/openid/__init__.py#L89-L95 --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `kinto/plugins/openid/__init__.py` Content: ``` 1 import re 2 3 import requests 4 from pyramid import authentication as base_auth 5 from pyramid.interfaces import IAuthenticationPolicy 6 from zope.interface import implementer 7 8 from kinto.core import logger 9 from kinto.core import utils as core_utils 10 from kinto.core.openapi import OpenAPI 11 12 from .utils import fetch_openid_config 13 14 15 @implementer(IAuthenticationPolicy) 16 class OpenIDConnectPolicy(base_auth.CallbackAuthenticationPolicy): 17 def __init__(self, issuer, client_id, realm='Realm', **kwargs): 18 self.realm = realm 19 self.issuer = issuer 20 self.client_id = client_id 21 self.client_secret = kwargs.get('client_secret', '') 22 self.header_type = kwargs.get('header_type', 'Bearer') 23 self.userid_field = kwargs.get('userid_field', 'sub') 24 self.verification_ttl = int(kwargs.get('verification_ttl_seconds', 86400)) 25 26 # Fetch OpenID config (at instantiation, ie. startup) 27 self.oid_config = fetch_openid_config(issuer) 28 29 self._jwt_keys = None 30 31 def unauthenticated_userid(self, request): 32 """Return the userid or ``None`` if token could not be verified. 33 """ 34 settings = request.registry.settings 35 hmac_secret = settings['userid_hmac_secret'] 36 37 authorization = request.headers.get('Authorization', '') 38 try: 39 authmeth, access_token = authorization.split(' ', 1) 40 except ValueError: 41 return None 42 43 if authmeth.lower() != self.header_type.lower(): 44 return None 45 46 # XXX JWT Access token 47 # https://auth0.com/docs/tokens/access-token#access-token-format 48 49 # Check cache if these tokens were already verified. 50 hmac_tokens = core_utils.hmac_digest(hmac_secret, access_token) 51 cache_key = 'openid:verify:{}'.format(hmac_tokens) 52 payload = request.registry.cache.get(cache_key) 53 if payload is None: 54 # This can take some time. 55 payload = self._verify_token(access_token) 56 if payload is None: 57 return None 58 # Save for next time / refresh ttl. 59 request.registry.cache.set(cache_key, payload, ttl=self.verification_ttl) 60 # Extract meaningful field from userinfo (eg. email or sub) 61 return payload.get(self.userid_field) 62 63 def forget(self, request): 64 """A no-op. Credentials are sent on every request. 65 Return WWW-Authenticate Realm header for Bearer token. 66 """ 67 return [('WWW-Authenticate', '%s realm="%s"' % (self.header_type, self.realm))] 68 69 def _verify_token(self, access_token): 70 uri = self.oid_config['userinfo_endpoint'] 71 # Opaque access token string. Fetch user info from profile. 72 try: 73 resp = requests.get(uri, headers={'Authorization': 'Bearer ' + access_token}) 74 resp.raise_for_status() 75 userprofile = resp.json() 76 return userprofile 77 78 except (requests.exceptions.HTTPError, ValueError, KeyError) as e: 79 logger.debug('Unable to fetch user profile from %s (%s)' % (uri, e)) 80 return None 81 82 83 def includeme(config): 84 # Activate end-points. 85 config.scan('kinto.plugins.openid.views') 86 87 settings = config.get_settings() 88 89 openid_policies = [] 90 for k, v in settings.items(): 91 m = re.match('multiauth\.policy\.(.*)\.use', k) 92 if m: 93 if v.endswith('OpenIDConnectPolicy'): 94 openid_policies.append(m.group(1)) 95 96 if len(openid_policies) == 0: 97 # Do not add the capability if no policy is configured. 98 return 99 100 providers_infos = [] 101 for name in openid_policies: 102 issuer = settings['multiauth.policy.%s.issuer' % name] 103 openid_config = fetch_openid_config(issuer) 104 105 client_id = settings['multiauth.policy.%s.client_id' % name] 106 header_type = settings.get('multiauth.policy.%s.header_type', 'Bearer') 107 108 providers_infos.append({ 109 'name': name, 110 'issuer': openid_config['issuer'], 111 'auth_path': '/openid/%s/login' % name, 112 'client_id': client_id, 113 'header_type': header_type, 114 'userinfo_endpoint': openid_config['userinfo_endpoint'], 115 }) 116 117 OpenAPI.expose_authentication_method(name, { 118 'type': 'oauth2', 119 'authorizationUrl': openid_config['authorization_endpoint'], 120 }) 121 122 config.add_api_capability( 123 'openid', 124 description='OpenID connect support.', 125 url='http://kinto.readthedocs.io/en/stable/api/1.x/authentication.html', 126 providers=providers_infos) 127 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/kinto/plugins/openid/__init__.py b/kinto/plugins/openid/__init__.py --- a/kinto/plugins/openid/__init__.py +++ b/kinto/plugins/openid/__init__.py @@ -1,7 +1,7 @@ -import re import requests from pyramid import authentication as base_auth +from pyramid.settings import aslist from pyramid.interfaces import IAuthenticationPolicy from zope.interface import implementer @@ -87,11 +87,10 @@ settings = config.get_settings() openid_policies = [] - for k, v in settings.items(): - m = re.match('multiauth\.policy\.(.*)\.use', k) - if m: - if v.endswith('OpenIDConnectPolicy'): - openid_policies.append(m.group(1)) + for policy in aslist(settings['multiauth.policies']): + v = settings.get('multiauth.policy.%s.use' % policy, '') + if v.endswith('OpenIDConnectPolicy'): + openid_policies.append(policy) if len(openid_policies) == 0: # Do not add the capability if no policy is configured.
{"golden_diff": "diff --git a/kinto/plugins/openid/__init__.py b/kinto/plugins/openid/__init__.py\n--- a/kinto/plugins/openid/__init__.py\n+++ b/kinto/plugins/openid/__init__.py\n@@ -1,7 +1,7 @@\n-import re\n \n import requests\n from pyramid import authentication as base_auth\n+from pyramid.settings import aslist\n from pyramid.interfaces import IAuthenticationPolicy\n from zope.interface import implementer\n \n@@ -87,11 +87,10 @@\n settings = config.get_settings()\n \n openid_policies = []\n- for k, v in settings.items():\n- m = re.match('multiauth\\.policy\\.(.*)\\.use', k)\n- if m:\n- if v.endswith('OpenIDConnectPolicy'):\n- openid_policies.append(m.group(1))\n+ for policy in aslist(settings['multiauth.policies']):\n+ v = settings.get('multiauth.policy.%s.use' % policy, '')\n+ if v.endswith('OpenIDConnectPolicy'):\n+ openid_policies.append(policy)\n \n if len(openid_policies) == 0:\n # Do not add the capability if no policy is configured.\n", "issue": "Exposed providers in openid capabilities are not filtered by `multiauth.policies`\nFor example, with:\r\n\r\n```ini\r\n\r\nkinto.includes = kinto.plugins.openid\r\n\r\nmultiauth.policies = google\r\n\r\nmultiauth.policy.google.use = kinto.plugins.openid.OpenIDConnectPolicy\r\nmultiauth.policy.google.issuer_url = https://accounts.google.com\r\nmultiauth.policy.google.client_id = a\r\nmultiauth.policy.google.client_secret = b\r\n\r\nmultiauth.policy.fxa.use = kinto.plugins.openid.OpenIDConnectPolicy\r\nmultiauth.policy.fxa.issuer_url = https://accounts.mozilla.org\r\nmultiauth.policy.fxa.client_id = a\r\nmultiauth.policy.fxa.client_secret = b\r\n```\r\n\r\n`fxa` provider should not be shown in capability.\r\n\r\nSee misbehaving code (OMG a `print()` statement!)\r\n\r\nhttps://github.com/Kinto/kinto/blob/bf60689ae016a31b8c5bb1955397be46d7b4acca/kinto/plugins/openid/__init__.py#L89-L95\n", "before_files": [{"content": "import re\n\nimport requests\nfrom pyramid import authentication as base_auth\nfrom pyramid.interfaces import IAuthenticationPolicy\nfrom zope.interface import implementer\n\nfrom kinto.core import logger\nfrom kinto.core import utils as core_utils\nfrom kinto.core.openapi import OpenAPI\n\nfrom .utils import fetch_openid_config\n\n\n@implementer(IAuthenticationPolicy)\nclass OpenIDConnectPolicy(base_auth.CallbackAuthenticationPolicy):\n def __init__(self, issuer, client_id, realm='Realm', **kwargs):\n self.realm = realm\n self.issuer = issuer\n self.client_id = client_id\n self.client_secret = kwargs.get('client_secret', '')\n self.header_type = kwargs.get('header_type', 'Bearer')\n self.userid_field = kwargs.get('userid_field', 'sub')\n self.verification_ttl = int(kwargs.get('verification_ttl_seconds', 86400))\n\n # Fetch OpenID config (at instantiation, ie. startup)\n self.oid_config = fetch_openid_config(issuer)\n\n self._jwt_keys = None\n\n def unauthenticated_userid(self, request):\n \"\"\"Return the userid or ``None`` if token could not be verified.\n \"\"\"\n settings = request.registry.settings\n hmac_secret = settings['userid_hmac_secret']\n\n authorization = request.headers.get('Authorization', '')\n try:\n authmeth, access_token = authorization.split(' ', 1)\n except ValueError:\n return None\n\n if authmeth.lower() != self.header_type.lower():\n return None\n\n # XXX JWT Access token\n # https://auth0.com/docs/tokens/access-token#access-token-format\n\n # Check cache if these tokens were already verified.\n hmac_tokens = core_utils.hmac_digest(hmac_secret, access_token)\n cache_key = 'openid:verify:{}'.format(hmac_tokens)\n payload = request.registry.cache.get(cache_key)\n if payload is None:\n # This can take some time.\n payload = self._verify_token(access_token)\n if payload is None:\n return None\n # Save for next time / refresh ttl.\n request.registry.cache.set(cache_key, payload, ttl=self.verification_ttl)\n # Extract meaningful field from userinfo (eg. email or sub)\n return payload.get(self.userid_field)\n\n def forget(self, request):\n \"\"\"A no-op. Credentials are sent on every request.\n Return WWW-Authenticate Realm header for Bearer token.\n \"\"\"\n return [('WWW-Authenticate', '%s realm=\"%s\"' % (self.header_type, self.realm))]\n\n def _verify_token(self, access_token):\n uri = self.oid_config['userinfo_endpoint']\n # Opaque access token string. Fetch user info from profile.\n try:\n resp = requests.get(uri, headers={'Authorization': 'Bearer ' + access_token})\n resp.raise_for_status()\n userprofile = resp.json()\n return userprofile\n\n except (requests.exceptions.HTTPError, ValueError, KeyError) as e:\n logger.debug('Unable to fetch user profile from %s (%s)' % (uri, e))\n return None\n\n\ndef includeme(config):\n # Activate end-points.\n config.scan('kinto.plugins.openid.views')\n\n settings = config.get_settings()\n\n openid_policies = []\n for k, v in settings.items():\n m = re.match('multiauth\\.policy\\.(.*)\\.use', k)\n if m:\n if v.endswith('OpenIDConnectPolicy'):\n openid_policies.append(m.group(1))\n\n if len(openid_policies) == 0:\n # Do not add the capability if no policy is configured.\n return\n\n providers_infos = []\n for name in openid_policies:\n issuer = settings['multiauth.policy.%s.issuer' % name]\n openid_config = fetch_openid_config(issuer)\n\n client_id = settings['multiauth.policy.%s.client_id' % name]\n header_type = settings.get('multiauth.policy.%s.header_type', 'Bearer')\n\n providers_infos.append({\n 'name': name,\n 'issuer': openid_config['issuer'],\n 'auth_path': '/openid/%s/login' % name,\n 'client_id': client_id,\n 'header_type': header_type,\n 'userinfo_endpoint': openid_config['userinfo_endpoint'],\n })\n\n OpenAPI.expose_authentication_method(name, {\n 'type': 'oauth2',\n 'authorizationUrl': openid_config['authorization_endpoint'],\n })\n\n config.add_api_capability(\n 'openid',\n description='OpenID connect support.',\n url='http://kinto.readthedocs.io/en/stable/api/1.x/authentication.html',\n providers=providers_infos)\n", "path": "kinto/plugins/openid/__init__.py"}], "after_files": [{"content": "\nimport requests\nfrom pyramid import authentication as base_auth\nfrom pyramid.settings import aslist\nfrom pyramid.interfaces import IAuthenticationPolicy\nfrom zope.interface import implementer\n\nfrom kinto.core import logger\nfrom kinto.core import utils as core_utils\nfrom kinto.core.openapi import OpenAPI\n\nfrom .utils import fetch_openid_config\n\n\n@implementer(IAuthenticationPolicy)\nclass OpenIDConnectPolicy(base_auth.CallbackAuthenticationPolicy):\n def __init__(self, issuer, client_id, realm='Realm', **kwargs):\n self.realm = realm\n self.issuer = issuer\n self.client_id = client_id\n self.client_secret = kwargs.get('client_secret', '')\n self.header_type = kwargs.get('header_type', 'Bearer')\n self.userid_field = kwargs.get('userid_field', 'sub')\n self.verification_ttl = int(kwargs.get('verification_ttl_seconds', 86400))\n\n # Fetch OpenID config (at instantiation, ie. startup)\n self.oid_config = fetch_openid_config(issuer)\n\n self._jwt_keys = None\n\n def unauthenticated_userid(self, request):\n \"\"\"Return the userid or ``None`` if token could not be verified.\n \"\"\"\n settings = request.registry.settings\n hmac_secret = settings['userid_hmac_secret']\n\n authorization = request.headers.get('Authorization', '')\n try:\n authmeth, access_token = authorization.split(' ', 1)\n except ValueError:\n return None\n\n if authmeth.lower() != self.header_type.lower():\n return None\n\n # XXX JWT Access token\n # https://auth0.com/docs/tokens/access-token#access-token-format\n\n # Check cache if these tokens were already verified.\n hmac_tokens = core_utils.hmac_digest(hmac_secret, access_token)\n cache_key = 'openid:verify:{}'.format(hmac_tokens)\n payload = request.registry.cache.get(cache_key)\n if payload is None:\n # This can take some time.\n payload = self._verify_token(access_token)\n if payload is None:\n return None\n # Save for next time / refresh ttl.\n request.registry.cache.set(cache_key, payload, ttl=self.verification_ttl)\n # Extract meaningful field from userinfo (eg. email or sub)\n return payload.get(self.userid_field)\n\n def forget(self, request):\n \"\"\"A no-op. Credentials are sent on every request.\n Return WWW-Authenticate Realm header for Bearer token.\n \"\"\"\n return [('WWW-Authenticate', '%s realm=\"%s\"' % (self.header_type, self.realm))]\n\n def _verify_token(self, access_token):\n uri = self.oid_config['userinfo_endpoint']\n # Opaque access token string. Fetch user info from profile.\n try:\n resp = requests.get(uri, headers={'Authorization': 'Bearer ' + access_token})\n resp.raise_for_status()\n userprofile = resp.json()\n return userprofile\n\n except (requests.exceptions.HTTPError, ValueError, KeyError) as e:\n logger.debug('Unable to fetch user profile from %s (%s)' % (uri, e))\n return None\n\n\ndef includeme(config):\n # Activate end-points.\n config.scan('kinto.plugins.openid.views')\n\n settings = config.get_settings()\n\n openid_policies = []\n for policy in aslist(settings['multiauth.policies']):\n v = settings.get('multiauth.policy.%s.use' % policy, '')\n if v.endswith('OpenIDConnectPolicy'):\n openid_policies.append(policy)\n\n if len(openid_policies) == 0:\n # Do not add the capability if no policy is configured.\n return\n\n providers_infos = []\n for name in openid_policies:\n issuer = settings['multiauth.policy.%s.issuer' % name]\n openid_config = fetch_openid_config(issuer)\n\n client_id = settings['multiauth.policy.%s.client_id' % name]\n header_type = settings.get('multiauth.policy.%s.header_type', 'Bearer')\n\n providers_infos.append({\n 'name': name,\n 'issuer': openid_config['issuer'],\n 'auth_path': '/openid/%s/login' % name,\n 'client_id': client_id,\n 'header_type': header_type,\n 'userinfo_endpoint': openid_config['userinfo_endpoint'],\n })\n\n OpenAPI.expose_authentication_method(name, {\n 'type': 'oauth2',\n 'authorizationUrl': openid_config['authorization_endpoint'],\n })\n\n config.add_api_capability(\n 'openid',\n description='OpenID connect support.',\n url='http://kinto.readthedocs.io/en/stable/api/1.x/authentication.html',\n providers=providers_infos)\n", "path": "kinto/plugins/openid/__init__.py"}]}
1,776
260
gh_patches_debug_24461
rasdani/github-patches
git_diff
pytorch__TensorRT-2375
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- 🐛 [Bug] Issue in `ConstantFolder` where certain operators no longer exist in latest nightly ## Bug Description [See here](https://github.com/pytorch/pytorch/blob/4b881b0da390c1290bb12850ef9daad6f6eb2cb6/torch/_inductor/constant_folding.py#L53-L63) ## To Reproduce See recent PR ## Expected behavior These tests should not fail. ## Environment > Build information about Torch-TensorRT can be found by turning on debug messages - Torch-TensorRT Version (e.g. 1.0.0): https://github.com/pytorch/TensorRT/commit/0ef47c78ce94f610ee2a3a5a62e6fb450d4a8b34 - PyTorch Version (e.g. 1.0): `2.2.0.dev20231009+cu121` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `py/torch_tensorrt/dynamo/lowering/passes/constant_folding.py` Content: ``` 1 import logging 2 from typing import Sequence 3 4 import torch 5 from torch_tensorrt._utils import sanitized_torch_version 6 from torch_tensorrt.dynamo.lowering.passes.pass_utils import ( 7 clean_up_graph_after_modifications, 8 ) 9 10 from packaging import version 11 12 # Modify import location of utilities based on Torch version 13 if version.parse(sanitized_torch_version()) < version.parse("2.1.1"): 14 from torch._inductor.freezing import ConstantFolder, replace_node_with_constant 15 else: 16 from torch._inductor.constant_folding import ( 17 ConstantFolder, 18 replace_node_with_constant, 19 ) 20 21 logger = logging.getLogger(__name__) 22 23 24 @torch.utils._python_dispatch._disable_current_modes() # type: ignore 25 def constant_fold( 26 gm: torch.fx.GraphModule, sample_inputs: Sequence[torch.Tensor] 27 ) -> torch.fx.GraphModule: 28 """Adapted from: 29 https://github.com/pytorch/pytorch/blob/3a79621c9dce17f77fbddc06aab21f6bc477f313/torch/_inductor/freezing.py#L178-L197 30 31 Folds constants in the graph module, not skipping constructors 32 33 Modifies the graph in-place and replaces node with constants 34 """ 35 cf = ConstantFolder(gm, skip_constructors=False) 36 cf.run() 37 38 for node, constant in cf.node_replacements.items(): 39 replace_node_with_constant(gm, node, constant) 40 41 erased_params = [] 42 for node in gm.graph.nodes: 43 # If get_attr node has no users, mark it for deletion 44 if node.op == "get_attr" and len(node.users) == 0: 45 # If the node's parameter is not a parameter of any other node, remove it 46 if not any( 47 other.target == node.target for other in gm.graph.nodes if other != node 48 ): 49 delattr(gm, node.target) 50 erased_params.append(node) 51 52 # Remove unused nodes from the graph 53 for node in erased_params: 54 gm.graph.erase_node(node) 55 56 gm = clean_up_graph_after_modifications(gm) 57 58 logger.debug(f"Graph after constant folding:\n{gm.graph}") 59 60 return gm 61 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/py/torch_tensorrt/dynamo/lowering/passes/constant_folding.py b/py/torch_tensorrt/dynamo/lowering/passes/constant_folding.py --- a/py/torch_tensorrt/dynamo/lowering/passes/constant_folding.py +++ b/py/torch_tensorrt/dynamo/lowering/passes/constant_folding.py @@ -1,5 +1,5 @@ import logging -from typing import Sequence +from typing import Any, Sequence import torch from torch_tensorrt._utils import sanitized_torch_version @@ -32,7 +32,7 @@ Modifies the graph in-place and replaces node with constants """ - cf = ConstantFolder(gm, skip_constructors=False) + cf = _TorchTensorRTConstantFolder(gm, skip_constructors=False) cf.run() for node, constant in cf.node_replacements.items(): @@ -58,3 +58,14 @@ logger.debug(f"Graph after constant folding:\n{gm.graph}") return gm + + +# TODO: Delete this class when the following code is fixed in nightly: +# https://github.com/pytorch/pytorch/blob/4b881b0da390c1290bb12850ef9daad6f6eb2cb6/torch/_inductor/constant_folding.py#L53-L63 +class _TorchTensorRTConstantFolder(ConstantFolder): # type: ignore[misc] + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + # TODO: Update this function when quantization is added + def is_impure(self, node: torch.fx.node.Node) -> bool: + return False
{"golden_diff": "diff --git a/py/torch_tensorrt/dynamo/lowering/passes/constant_folding.py b/py/torch_tensorrt/dynamo/lowering/passes/constant_folding.py\n--- a/py/torch_tensorrt/dynamo/lowering/passes/constant_folding.py\n+++ b/py/torch_tensorrt/dynamo/lowering/passes/constant_folding.py\n@@ -1,5 +1,5 @@\n import logging\n-from typing import Sequence\n+from typing import Any, Sequence\n \n import torch\n from torch_tensorrt._utils import sanitized_torch_version\n@@ -32,7 +32,7 @@\n \n Modifies the graph in-place and replaces node with constants\n \"\"\"\n- cf = ConstantFolder(gm, skip_constructors=False)\n+ cf = _TorchTensorRTConstantFolder(gm, skip_constructors=False)\n cf.run()\n \n for node, constant in cf.node_replacements.items():\n@@ -58,3 +58,14 @@\n logger.debug(f\"Graph after constant folding:\\n{gm.graph}\")\n \n return gm\n+\n+\n+# TODO: Delete this class when the following code is fixed in nightly:\n+# https://github.com/pytorch/pytorch/blob/4b881b0da390c1290bb12850ef9daad6f6eb2cb6/torch/_inductor/constant_folding.py#L53-L63\n+class _TorchTensorRTConstantFolder(ConstantFolder): # type: ignore[misc]\n+ def __init__(self, *args: Any, **kwargs: Any) -> None:\n+ super().__init__(*args, **kwargs)\n+\n+ # TODO: Update this function when quantization is added\n+ def is_impure(self, node: torch.fx.node.Node) -> bool:\n+ return False\n", "issue": "\ud83d\udc1b [Bug] Issue in `ConstantFolder` where certain operators no longer exist in latest nightly\n## Bug Description\r\n\r\n[See here](https://github.com/pytorch/pytorch/blob/4b881b0da390c1290bb12850ef9daad6f6eb2cb6/torch/_inductor/constant_folding.py#L53-L63)\r\n\r\n## To Reproduce\r\n\r\nSee recent PR \r\n\r\n## Expected behavior\r\n\r\nThese tests should not fail.\r\n\r\n## Environment\r\n\r\n> Build information about Torch-TensorRT can be found by turning on debug messages\r\n\r\n - Torch-TensorRT Version (e.g. 1.0.0): https://github.com/pytorch/TensorRT/commit/0ef47c78ce94f610ee2a3a5a62e6fb450d4a8b34\r\n - PyTorch Version (e.g. 1.0): `2.2.0.dev20231009+cu121`\r\n\n", "before_files": [{"content": "import logging\nfrom typing import Sequence\n\nimport torch\nfrom torch_tensorrt._utils import sanitized_torch_version\nfrom torch_tensorrt.dynamo.lowering.passes.pass_utils import (\n clean_up_graph_after_modifications,\n)\n\nfrom packaging import version\n\n# Modify import location of utilities based on Torch version\nif version.parse(sanitized_torch_version()) < version.parse(\"2.1.1\"):\n from torch._inductor.freezing import ConstantFolder, replace_node_with_constant\nelse:\n from torch._inductor.constant_folding import (\n ConstantFolder,\n replace_node_with_constant,\n )\n\nlogger = logging.getLogger(__name__)\n\n\[email protected]._python_dispatch._disable_current_modes() # type: ignore\ndef constant_fold(\n gm: torch.fx.GraphModule, sample_inputs: Sequence[torch.Tensor]\n) -> torch.fx.GraphModule:\n \"\"\"Adapted from:\n https://github.com/pytorch/pytorch/blob/3a79621c9dce17f77fbddc06aab21f6bc477f313/torch/_inductor/freezing.py#L178-L197\n\n Folds constants in the graph module, not skipping constructors\n\n Modifies the graph in-place and replaces node with constants\n \"\"\"\n cf = ConstantFolder(gm, skip_constructors=False)\n cf.run()\n\n for node, constant in cf.node_replacements.items():\n replace_node_with_constant(gm, node, constant)\n\n erased_params = []\n for node in gm.graph.nodes:\n # If get_attr node has no users, mark it for deletion\n if node.op == \"get_attr\" and len(node.users) == 0:\n # If the node's parameter is not a parameter of any other node, remove it\n if not any(\n other.target == node.target for other in gm.graph.nodes if other != node\n ):\n delattr(gm, node.target)\n erased_params.append(node)\n\n # Remove unused nodes from the graph\n for node in erased_params:\n gm.graph.erase_node(node)\n\n gm = clean_up_graph_after_modifications(gm)\n\n logger.debug(f\"Graph after constant folding:\\n{gm.graph}\")\n\n return gm\n", "path": "py/torch_tensorrt/dynamo/lowering/passes/constant_folding.py"}], "after_files": [{"content": "import logging\nfrom typing import Any, Sequence\n\nimport torch\nfrom torch_tensorrt._utils import sanitized_torch_version\nfrom torch_tensorrt.dynamo.lowering.passes.pass_utils import (\n clean_up_graph_after_modifications,\n)\n\nfrom packaging import version\n\n# Modify import location of utilities based on Torch version\nif version.parse(sanitized_torch_version()) < version.parse(\"2.1.1\"):\n from torch._inductor.freezing import ConstantFolder, replace_node_with_constant\nelse:\n from torch._inductor.constant_folding import (\n ConstantFolder,\n replace_node_with_constant,\n )\n\nlogger = logging.getLogger(__name__)\n\n\[email protected]._python_dispatch._disable_current_modes() # type: ignore\ndef constant_fold(\n gm: torch.fx.GraphModule, sample_inputs: Sequence[torch.Tensor]\n) -> torch.fx.GraphModule:\n \"\"\"Adapted from:\n https://github.com/pytorch/pytorch/blob/3a79621c9dce17f77fbddc06aab21f6bc477f313/torch/_inductor/freezing.py#L178-L197\n\n Folds constants in the graph module, not skipping constructors\n\n Modifies the graph in-place and replaces node with constants\n \"\"\"\n cf = _TorchTensorRTConstantFolder(gm, skip_constructors=False)\n cf.run()\n\n for node, constant in cf.node_replacements.items():\n replace_node_with_constant(gm, node, constant)\n\n erased_params = []\n for node in gm.graph.nodes:\n # If get_attr node has no users, mark it for deletion\n if node.op == \"get_attr\" and len(node.users) == 0:\n # If the node's parameter is not a parameter of any other node, remove it\n if not any(\n other.target == node.target for other in gm.graph.nodes if other != node\n ):\n delattr(gm, node.target)\n erased_params.append(node)\n\n # Remove unused nodes from the graph\n for node in erased_params:\n gm.graph.erase_node(node)\n\n gm = clean_up_graph_after_modifications(gm)\n\n logger.debug(f\"Graph after constant folding:\\n{gm.graph}\")\n\n return gm\n\n\n# TODO: Delete this class when the following code is fixed in nightly:\n# https://github.com/pytorch/pytorch/blob/4b881b0da390c1290bb12850ef9daad6f6eb2cb6/torch/_inductor/constant_folding.py#L53-L63\nclass _TorchTensorRTConstantFolder(ConstantFolder): # type: ignore[misc]\n def __init__(self, *args: Any, **kwargs: Any) -> None:\n super().__init__(*args, **kwargs)\n\n # TODO: Update this function when quantization is added\n def is_impure(self, node: torch.fx.node.Node) -> bool:\n return False\n", "path": "py/torch_tensorrt/dynamo/lowering/passes/constant_folding.py"}]}
1,101
397
gh_patches_debug_26365
rasdani/github-patches
git_diff
cisagov__manage.get.gov-1396
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Update error message on check availability section in the request form ### Issue description To keep language consistent, we should update the error message in the check availability section in the request form from: This > That domain isn't available. Try entering another one. Contact us if you need help coming up with a domain. To this > That domain isn’t available. [Read more about choosing your .gov domain.](https://beta.get.gov/domains/choosing/) Note that the hyperlink in the new message should open in a new tab so as to not take the user our of the request flow ### Acceptance criteria - [ ] error message is updated - [ ] link opens in new tab ### Additional context _No response_ ### Links to other issues _No response_ --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `src/api/views.py` Content: ``` 1 """Internal API views""" 2 from django.apps import apps 3 from django.views.decorators.http import require_http_methods 4 from django.http import JsonResponse 5 6 import requests 7 8 from login_required import login_not_required 9 10 from cachetools.func import ttl_cache 11 12 13 DOMAIN_FILE_URL = "https://raw.githubusercontent.com/cisagov/dotgov-data/main/current-full.csv" 14 15 16 DOMAIN_API_MESSAGES = { 17 "required": "Enter the .gov domain you want. Don’t include “www” or “.gov.”" 18 " For example, if you want www.city.gov, you would enter “city”" 19 " (without the quotes).", 20 "extra_dots": "Enter the .gov domain you want without any periods.", 21 "unavailable": "That domain isn’t available. Try entering another one." 22 " Contact us if you need help coming up with a domain.", 23 "invalid": "Enter a domain using only letters, numbers, or hyphens (though we don't recommend using hyphens).", 24 "success": "That domain is available!", 25 "error": "Error finding domain availability.", 26 } 27 28 29 # this file doesn't change that often, nor is it that big, so cache the result 30 # in memory for ten minutes 31 @ttl_cache(ttl=600) 32 def _domains(): 33 """Return a list of the current .gov domains. 34 35 Fetch a file from DOMAIN_FILE_URL, parse the CSV for the domain, 36 lowercase everything and return the list. 37 """ 38 DraftDomain = apps.get_model("registrar.DraftDomain") 39 # 5 second timeout 40 file_contents = requests.get(DOMAIN_FILE_URL, timeout=5).text 41 domains = set() 42 # skip the first line 43 for line in file_contents.splitlines()[1:]: 44 # get the domain before the first comma 45 domain = line.split(",", 1)[0] 46 # sanity-check the string we got from the file here 47 if DraftDomain.string_could_be_domain(domain): 48 # lowercase everything when we put it in domains 49 domains.add(domain.lower()) 50 return domains 51 52 53 def check_domain_available(domain): 54 """Return true if the given domain is available. 55 56 The given domain is lowercased to match against the domains list. If the 57 given domain doesn't end with .gov, ".gov" is added when looking for 58 a match. 59 """ 60 Domain = apps.get_model("registrar.Domain") 61 try: 62 if domain.endswith(".gov"): 63 return Domain.available(domain) 64 else: 65 # domain search string doesn't end with .gov, add it on here 66 return Domain.available(domain + ".gov") 67 except Exception: 68 return False 69 70 71 @require_http_methods(["GET"]) 72 @login_not_required 73 def available(request, domain=""): 74 """Is a given domain available or not. 75 76 Response is a JSON dictionary with the key "available" and value true or 77 false. 78 """ 79 DraftDomain = apps.get_model("registrar.DraftDomain") 80 # validate that the given domain could be a domain name and fail early if 81 # not. 82 if not (DraftDomain.string_could_be_domain(domain) or DraftDomain.string_could_be_domain(domain + ".gov")): 83 return JsonResponse({"available": False, "message": DOMAIN_API_MESSAGES["invalid"]}) 84 # a domain is available if it is NOT in the list of current domains 85 try: 86 if check_domain_available(domain): 87 return JsonResponse({"available": True, "message": DOMAIN_API_MESSAGES["success"]}) 88 else: 89 return JsonResponse({"available": False, "message": DOMAIN_API_MESSAGES["unavailable"]}) 90 except Exception: 91 return JsonResponse({"available": False, "message": DOMAIN_API_MESSAGES["error"]}) 92 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/src/api/views.py b/src/api/views.py --- a/src/api/views.py +++ b/src/api/views.py @@ -2,6 +2,9 @@ from django.apps import apps from django.views.decorators.http import require_http_methods from django.http import JsonResponse +from django.utils.safestring import mark_safe + +from registrar.templatetags.url_helpers import public_site_url import requests @@ -18,8 +21,13 @@ " For example, if you want www.city.gov, you would enter “city”" " (without the quotes).", "extra_dots": "Enter the .gov domain you want without any periods.", - "unavailable": "That domain isn’t available. Try entering another one." - " Contact us if you need help coming up with a domain.", + # message below is considered safe; no user input can be inserted into the message + # body; public_site_url() function reads from local app settings and therefore safe + "unavailable": mark_safe( # nosec + "That domain isn’t available. " + "<a class='usa-link' href='{}' target='_blank'>" + "Read more about choosing your .gov domain.</a>".format(public_site_url("domains/choosing")) + ), "invalid": "Enter a domain using only letters, numbers, or hyphens (though we don't recommend using hyphens).", "success": "That domain is available!", "error": "Error finding domain availability.",
{"golden_diff": "diff --git a/src/api/views.py b/src/api/views.py\n--- a/src/api/views.py\n+++ b/src/api/views.py\n@@ -2,6 +2,9 @@\n from django.apps import apps\n from django.views.decorators.http import require_http_methods\n from django.http import JsonResponse\n+from django.utils.safestring import mark_safe\n+\n+from registrar.templatetags.url_helpers import public_site_url\n \n import requests\n \n@@ -18,8 +21,13 @@\n \" For example, if you want www.city.gov, you would enter \u201ccity\u201d\"\n \" (without the quotes).\",\n \"extra_dots\": \"Enter the .gov domain you want without any periods.\",\n- \"unavailable\": \"That domain isn\u2019t available. Try entering another one.\"\n- \" Contact us if you need help coming up with a domain.\",\n+ # message below is considered safe; no user input can be inserted into the message\n+ # body; public_site_url() function reads from local app settings and therefore safe\n+ \"unavailable\": mark_safe( # nosec\n+ \"That domain isn\u2019t available. \"\n+ \"<a class='usa-link' href='{}' target='_blank'>\"\n+ \"Read more about choosing your .gov domain.</a>\".format(public_site_url(\"domains/choosing\"))\n+ ),\n \"invalid\": \"Enter a domain using only letters, numbers, or hyphens (though we don't recommend using hyphens).\",\n \"success\": \"That domain is available!\",\n \"error\": \"Error finding domain availability.\",\n", "issue": "Update error message on check availability section in the request form\n### Issue description\n\nTo keep language consistent, we should update the error message in the check availability section in the request form from:\r\n\r\nThis\r\n\r\n> That domain isn't available. Try entering another one. Contact us if you need help coming up with a domain. \r\n\r\n\r\nTo this\r\n\r\n> That domain isn\u2019t available. [Read more about choosing your .gov domain.](https://beta.get.gov/domains/choosing/) \r\n\r\n\r\nNote that the hyperlink in the new message should open in a new tab so as to not take the user our of the request flow\n\n### Acceptance criteria\n\n- [ ] error message is updated\r\n- [ ] link opens in new tab\n\n### Additional context\n\n_No response_\n\n### Links to other issues\n\n_No response_\n", "before_files": [{"content": "\"\"\"Internal API views\"\"\"\nfrom django.apps import apps\nfrom django.views.decorators.http import require_http_methods\nfrom django.http import JsonResponse\n\nimport requests\n\nfrom login_required import login_not_required\n\nfrom cachetools.func import ttl_cache\n\n\nDOMAIN_FILE_URL = \"https://raw.githubusercontent.com/cisagov/dotgov-data/main/current-full.csv\"\n\n\nDOMAIN_API_MESSAGES = {\n \"required\": \"Enter the .gov domain you want. Don\u2019t include \u201cwww\u201d or \u201c.gov.\u201d\"\n \" For example, if you want www.city.gov, you would enter \u201ccity\u201d\"\n \" (without the quotes).\",\n \"extra_dots\": \"Enter the .gov domain you want without any periods.\",\n \"unavailable\": \"That domain isn\u2019t available. Try entering another one.\"\n \" Contact us if you need help coming up with a domain.\",\n \"invalid\": \"Enter a domain using only letters, numbers, or hyphens (though we don't recommend using hyphens).\",\n \"success\": \"That domain is available!\",\n \"error\": \"Error finding domain availability.\",\n}\n\n\n# this file doesn't change that often, nor is it that big, so cache the result\n# in memory for ten minutes\n@ttl_cache(ttl=600)\ndef _domains():\n \"\"\"Return a list of the current .gov domains.\n\n Fetch a file from DOMAIN_FILE_URL, parse the CSV for the domain,\n lowercase everything and return the list.\n \"\"\"\n DraftDomain = apps.get_model(\"registrar.DraftDomain\")\n # 5 second timeout\n file_contents = requests.get(DOMAIN_FILE_URL, timeout=5).text\n domains = set()\n # skip the first line\n for line in file_contents.splitlines()[1:]:\n # get the domain before the first comma\n domain = line.split(\",\", 1)[0]\n # sanity-check the string we got from the file here\n if DraftDomain.string_could_be_domain(domain):\n # lowercase everything when we put it in domains\n domains.add(domain.lower())\n return domains\n\n\ndef check_domain_available(domain):\n \"\"\"Return true if the given domain is available.\n\n The given domain is lowercased to match against the domains list. If the\n given domain doesn't end with .gov, \".gov\" is added when looking for\n a match.\n \"\"\"\n Domain = apps.get_model(\"registrar.Domain\")\n try:\n if domain.endswith(\".gov\"):\n return Domain.available(domain)\n else:\n # domain search string doesn't end with .gov, add it on here\n return Domain.available(domain + \".gov\")\n except Exception:\n return False\n\n\n@require_http_methods([\"GET\"])\n@login_not_required\ndef available(request, domain=\"\"):\n \"\"\"Is a given domain available or not.\n\n Response is a JSON dictionary with the key \"available\" and value true or\n false.\n \"\"\"\n DraftDomain = apps.get_model(\"registrar.DraftDomain\")\n # validate that the given domain could be a domain name and fail early if\n # not.\n if not (DraftDomain.string_could_be_domain(domain) or DraftDomain.string_could_be_domain(domain + \".gov\")):\n return JsonResponse({\"available\": False, \"message\": DOMAIN_API_MESSAGES[\"invalid\"]})\n # a domain is available if it is NOT in the list of current domains\n try:\n if check_domain_available(domain):\n return JsonResponse({\"available\": True, \"message\": DOMAIN_API_MESSAGES[\"success\"]})\n else:\n return JsonResponse({\"available\": False, \"message\": DOMAIN_API_MESSAGES[\"unavailable\"]})\n except Exception:\n return JsonResponse({\"available\": False, \"message\": DOMAIN_API_MESSAGES[\"error\"]})\n", "path": "src/api/views.py"}], "after_files": [{"content": "\"\"\"Internal API views\"\"\"\nfrom django.apps import apps\nfrom django.views.decorators.http import require_http_methods\nfrom django.http import JsonResponse\nfrom django.utils.safestring import mark_safe\n\nfrom registrar.templatetags.url_helpers import public_site_url\n\nimport requests\n\nfrom login_required import login_not_required\n\nfrom cachetools.func import ttl_cache\n\n\nDOMAIN_FILE_URL = \"https://raw.githubusercontent.com/cisagov/dotgov-data/main/current-full.csv\"\n\n\nDOMAIN_API_MESSAGES = {\n \"required\": \"Enter the .gov domain you want. Don\u2019t include \u201cwww\u201d or \u201c.gov.\u201d\"\n \" For example, if you want www.city.gov, you would enter \u201ccity\u201d\"\n \" (without the quotes).\",\n \"extra_dots\": \"Enter the .gov domain you want without any periods.\",\n # message below is considered safe; no user input can be inserted into the message\n # body; public_site_url() function reads from local app settings and therefore safe\n \"unavailable\": mark_safe( # nosec\n \"That domain isn\u2019t available. \"\n \"<a class='usa-link' href='{}' target='_blank'>\"\n \"Read more about choosing your .gov domain.</a>\".format(public_site_url(\"domains/choosing\"))\n ),\n \"invalid\": \"Enter a domain using only letters, numbers, or hyphens (though we don't recommend using hyphens).\",\n \"success\": \"That domain is available!\",\n \"error\": \"Error finding domain availability.\",\n}\n\n\n# this file doesn't change that often, nor is it that big, so cache the result\n# in memory for ten minutes\n@ttl_cache(ttl=600)\ndef _domains():\n \"\"\"Return a list of the current .gov domains.\n\n Fetch a file from DOMAIN_FILE_URL, parse the CSV for the domain,\n lowercase everything and return the list.\n \"\"\"\n DraftDomain = apps.get_model(\"registrar.DraftDomain\")\n # 5 second timeout\n file_contents = requests.get(DOMAIN_FILE_URL, timeout=5).text\n domains = set()\n # skip the first line\n for line in file_contents.splitlines()[1:]:\n # get the domain before the first comma\n domain = line.split(\",\", 1)[0]\n # sanity-check the string we got from the file here\n if DraftDomain.string_could_be_domain(domain):\n # lowercase everything when we put it in domains\n domains.add(domain.lower())\n return domains\n\n\ndef check_domain_available(domain):\n \"\"\"Return true if the given domain is available.\n\n The given domain is lowercased to match against the domains list. If the\n given domain doesn't end with .gov, \".gov\" is added when looking for\n a match.\n \"\"\"\n Domain = apps.get_model(\"registrar.Domain\")\n try:\n if domain.endswith(\".gov\"):\n return Domain.available(domain)\n else:\n # domain search string doesn't end with .gov, add it on here\n return Domain.available(domain + \".gov\")\n except Exception:\n return False\n\n\n@require_http_methods([\"GET\"])\n@login_not_required\ndef available(request, domain=\"\"):\n \"\"\"Is a given domain available or not.\n\n Response is a JSON dictionary with the key \"available\" and value true or\n false.\n \"\"\"\n DraftDomain = apps.get_model(\"registrar.DraftDomain\")\n # validate that the given domain could be a domain name and fail early if\n # not.\n if not (DraftDomain.string_could_be_domain(domain) or DraftDomain.string_could_be_domain(domain + \".gov\")):\n return JsonResponse({\"available\": False, \"message\": DOMAIN_API_MESSAGES[\"invalid\"]})\n # a domain is available if it is NOT in the list of current domains\n try:\n if check_domain_available(domain):\n return JsonResponse({\"available\": True, \"message\": DOMAIN_API_MESSAGES[\"success\"]})\n else:\n return JsonResponse({\"available\": False, \"message\": DOMAIN_API_MESSAGES[\"unavailable\"]})\n except Exception:\n return JsonResponse({\"available\": False, \"message\": DOMAIN_API_MESSAGES[\"error\"]})\n", "path": "src/api/views.py"}]}
1,391
338
gh_patches_debug_34044
rasdani/github-patches
git_diff
TileDB-Inc__TileDB-Py-246
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- likely race condition in DenseArray.__new__ Test program below exposes (what I believe is a) race in class handling in DenseArray. The test program is completely artificial - it just spins up 5 threads, each of which open an S3 object and ask its shape. Without a sleep to sequence their execution, it fails inside the DenseArray.__new__ method. Run with the sleep statement in worker(), I see: ``` python cctest.py starting... 0 starting... 1 working... 0 ending... 0 (1305994, 972) starting... 2 working... 1 ending... 1 (1305994, 972) starting... 3 working... 2 ending... 2 (1305994, 972) starting... 4 working... 3 ending... 3 (1305994, 972) working... 4 ending... 4 (1305994, 972) all done [(1305994, 972), (1305994, 972), (1305994, 972), (1305994, 972), (1305994, 972)] ``` Run with the sleep in worker removed/commented out: ``` $ python cctest.py starting... 0 starting... 1 starting... 2 working... 0 ending... 0 (1305994, 972) starting... 3 working... 2 ending... 2 (1305994, 972) starting... 4 working... 3 ending... 3 (1305994, 972) working... 4 ending... 4 (1305994, 972) Traceback (most recent call last): File "cctest.py", line 33, in <module> main() File "cctest.py", line 27, in main res = [f.result() for f in concurrent.futures.as_completed(futures)] File "cctest.py", line 27, in <listcomp> res = [f.result() for f in concurrent.futures.as_completed(futures)] File "/usr/lib64/python3.7/concurrent/futures/_base.py", line 428, in result return self.__get_result() File "/usr/lib64/python3.7/concurrent/futures/_base.py", line 384, in __get_result raise self._exception File "/usr/lib64/python3.7/concurrent/futures/thread.py", line 57, in run result = self.fn(*self.args, **self.kwargs) File "cctest.py", line 11, in worker with tiledb.DenseArray(uri) as A: File "/home/ec2-user/venv/lib64/python3.7/site-packages/tiledb/array.py", line 10, in __new__ DenseArray.__bases__ = DenseArray.__bases__ + (cloudarray.CloudArray,) TypeError: duplicate base class CloudArray ``` Test case: ``` import tiledb import time import numpy as np import concurrent.futures def worker(n, uri): print('starting...', n) # time.sleep(n) with tiledb.DenseArray(uri) as A: print('working...', n) res = A.shape print('ending...', n, res) return res def main(): executor_cls = concurrent.futures.ThreadPoolExecutor uri = "s3://tiledb-eval/10x_mouse_13MM_processed-Xr3zstd.cxg/X" with executor_cls(max_workers=2) as executor: futures = [executor.submit(worker, n, uri) for n in range(0, 5)] res = [f.result() for f in concurrent.futures.as_completed(futures)] print('all done', res) if __name__ == '__main__': main() ``` --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `tiledb/array.py` Content: ``` 1 from .libtiledb import DenseArrayImpl, SparseArrayImpl 2 3 class DenseArray(DenseArrayImpl): 4 _mixin_init = False 5 6 def __new__(cls, *args, **kwargs): 7 if not cls._mixin_init: 8 try: 9 from tiledb.cloud import cloudarray 10 DenseArray.__bases__ = DenseArray.__bases__ + (cloudarray.CloudArray,) 11 except ImportError: 12 pass 13 finally: 14 cls._mixin_init = True 15 16 obj = super(DenseArray, cls).__new__(cls, *args, **kwargs) 17 return obj 18 19 class SparseArray(SparseArrayImpl): 20 _mixin_init = False 21 22 def __new__(cls, *args, **kwargs): 23 if not cls._mixin_init: 24 try: 25 from tiledb.cloud import cloudarray 26 SparseArray.__bases__ = SparseArray.__bases__ + (cloudarray.CloudArray,) 27 except ImportError: 28 pass 29 finally: 30 cls._mixin_init = True 31 32 obj = super(SparseArray, cls).__new__(cls, *args, **kwargs) 33 return obj 34 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/tiledb/array.py b/tiledb/array.py --- a/tiledb/array.py +++ b/tiledb/array.py @@ -1,17 +1,29 @@ from .libtiledb import DenseArrayImpl, SparseArrayImpl +# Extensible (pure Python) array class definitions inheriting from the +# Cython implemention. The cloudarray mix-in adds optional functionality +# for registering arrays and executing functions on the + +# NOTE: the mixin import must be inside the __new__ initializer because it +# needs to be deferred. tiledb.cloud is not yet known to the importer +# when this code is imported. +# TODO: might be possible to work-around/simplify by using +# import meta-hooks instead. + class DenseArray(DenseArrayImpl): _mixin_init = False def __new__(cls, *args, **kwargs): if not cls._mixin_init: + # must set before importing, because import is not thread-safe + # https://github.com/TileDB-Inc/TileDB-Py/issues/244 + cls._mixin_init = True try: from tiledb.cloud import cloudarray DenseArray.__bases__ = DenseArray.__bases__ + (cloudarray.CloudArray,) + DenseArray.__doc__ = DenseArrayImpl.__doc__ except ImportError: pass - finally: - cls._mixin_init = True obj = super(DenseArray, cls).__new__(cls, *args, **kwargs) return obj @@ -21,13 +33,13 @@ def __new__(cls, *args, **kwargs): if not cls._mixin_init: + cls._mixin_init = True try: from tiledb.cloud import cloudarray SparseArray.__bases__ = SparseArray.__bases__ + (cloudarray.CloudArray,) + SparseArray.__doc__ = DenseArrayImpl.__doc__ except ImportError: pass - finally: - cls._mixin_init = True obj = super(SparseArray, cls).__new__(cls, *args, **kwargs) return obj
{"golden_diff": "diff --git a/tiledb/array.py b/tiledb/array.py\n--- a/tiledb/array.py\n+++ b/tiledb/array.py\n@@ -1,17 +1,29 @@\n from .libtiledb import DenseArrayImpl, SparseArrayImpl\n \n+# Extensible (pure Python) array class definitions inheriting from the\n+# Cython implemention. The cloudarray mix-in adds optional functionality\n+# for registering arrays and executing functions on the\n+\n+# NOTE: the mixin import must be inside the __new__ initializer because it\n+# needs to be deferred. tiledb.cloud is not yet known to the importer\n+# when this code is imported.\n+# TODO: might be possible to work-around/simplify by using\n+# import meta-hooks instead.\n+\n class DenseArray(DenseArrayImpl):\n _mixin_init = False\n \n def __new__(cls, *args, **kwargs):\n if not cls._mixin_init:\n+ # must set before importing, because import is not thread-safe\n+ # https://github.com/TileDB-Inc/TileDB-Py/issues/244\n+ cls._mixin_init = True\n try:\n from tiledb.cloud import cloudarray\n DenseArray.__bases__ = DenseArray.__bases__ + (cloudarray.CloudArray,)\n+ DenseArray.__doc__ = DenseArrayImpl.__doc__\n except ImportError:\n pass\n- finally:\n- cls._mixin_init = True\n \n obj = super(DenseArray, cls).__new__(cls, *args, **kwargs)\n return obj\n@@ -21,13 +33,13 @@\n \n def __new__(cls, *args, **kwargs):\n if not cls._mixin_init:\n+ cls._mixin_init = True\n try:\n from tiledb.cloud import cloudarray\n SparseArray.__bases__ = SparseArray.__bases__ + (cloudarray.CloudArray,)\n+ SparseArray.__doc__ = DenseArrayImpl.__doc__\n except ImportError:\n pass\n- finally:\n- cls._mixin_init = True\n \n obj = super(SparseArray, cls).__new__(cls, *args, **kwargs)\n return obj\n", "issue": "likely race condition in DenseArray.__new__\n\r\nTest program below exposes (what I believe is a) race in class handling in DenseArray. The test program is completely artificial - it just spins up 5 threads, each of which open an S3 object and ask its shape. Without a sleep to sequence their execution, it fails inside the DenseArray.__new__ method.\r\n\r\nRun with the sleep statement in worker(), I see:\r\n```\r\npython cctest.py\r\nstarting... 0\r\nstarting... 1\r\nworking... 0\r\nending... 0 (1305994, 972)\r\nstarting... 2\r\nworking... 1\r\nending... 1 (1305994, 972)\r\nstarting... 3\r\nworking... 2\r\nending... 2 (1305994, 972)\r\nstarting... 4\r\nworking... 3\r\nending... 3 (1305994, 972)\r\nworking... 4\r\nending... 4 (1305994, 972)\r\nall done [(1305994, 972), (1305994, 972), (1305994, 972), (1305994, 972), (1305994, 972)]\r\n```\r\n\r\nRun with the sleep in worker removed/commented out:\r\n```\r\n$ python cctest.py\r\nstarting... 0\r\nstarting... 1\r\nstarting... 2\r\nworking... 0\r\nending... 0 (1305994, 972)\r\nstarting... 3\r\nworking... 2\r\nending... 2 (1305994, 972)\r\nstarting... 4\r\nworking... 3\r\nending... 3 (1305994, 972)\r\nworking... 4\r\nending... 4 (1305994, 972)\r\nTraceback (most recent call last):\r\n File \"cctest.py\", line 33, in <module>\r\n main()\r\n File \"cctest.py\", line 27, in main\r\n res = [f.result() for f in concurrent.futures.as_completed(futures)]\r\n File \"cctest.py\", line 27, in <listcomp>\r\n res = [f.result() for f in concurrent.futures.as_completed(futures)]\r\n File \"/usr/lib64/python3.7/concurrent/futures/_base.py\", line 428, in result\r\n return self.__get_result()\r\n File \"/usr/lib64/python3.7/concurrent/futures/_base.py\", line 384, in __get_result\r\n raise self._exception\r\n File \"/usr/lib64/python3.7/concurrent/futures/thread.py\", line 57, in run\r\n result = self.fn(*self.args, **self.kwargs)\r\n File \"cctest.py\", line 11, in worker\r\n with tiledb.DenseArray(uri) as A:\r\n File \"/home/ec2-user/venv/lib64/python3.7/site-packages/tiledb/array.py\", line 10, in __new__\r\n DenseArray.__bases__ = DenseArray.__bases__ + (cloudarray.CloudArray,)\r\nTypeError: duplicate base class CloudArray\r\n```\r\n\r\nTest case:\r\n```\r\nimport tiledb\r\nimport time\r\nimport numpy as np\r\nimport concurrent.futures\r\n\r\n\r\ndef worker(n, uri):\r\n print('starting...', n)\r\n # time.sleep(n)\r\n\r\n with tiledb.DenseArray(uri) as A:\r\n print('working...', n)\r\n res = A.shape\r\n\r\n print('ending...', n, res)\r\n return res\r\n\r\n\r\ndef main():\r\n executor_cls = concurrent.futures.ThreadPoolExecutor\r\n\r\n uri = \"s3://tiledb-eval/10x_mouse_13MM_processed-Xr3zstd.cxg/X\"\r\n\r\n with executor_cls(max_workers=2) as executor:\r\n futures = [executor.submit(worker, n, uri) for n in range(0, 5)]\r\n res = [f.result() for f in concurrent.futures.as_completed(futures)]\r\n\r\n print('all done', res)\r\n\r\n\r\nif __name__ == '__main__':\r\n main()\r\n```\n", "before_files": [{"content": "from .libtiledb import DenseArrayImpl, SparseArrayImpl\n\nclass DenseArray(DenseArrayImpl):\n _mixin_init = False\n\n def __new__(cls, *args, **kwargs):\n if not cls._mixin_init:\n try:\n from tiledb.cloud import cloudarray\n DenseArray.__bases__ = DenseArray.__bases__ + (cloudarray.CloudArray,)\n except ImportError:\n pass\n finally:\n cls._mixin_init = True\n\n obj = super(DenseArray, cls).__new__(cls, *args, **kwargs)\n return obj\n\nclass SparseArray(SparseArrayImpl):\n _mixin_init = False\n\n def __new__(cls, *args, **kwargs):\n if not cls._mixin_init:\n try:\n from tiledb.cloud import cloudarray\n SparseArray.__bases__ = SparseArray.__bases__ + (cloudarray.CloudArray,)\n except ImportError:\n pass\n finally:\n cls._mixin_init = True\n\n obj = super(SparseArray, cls).__new__(cls, *args, **kwargs)\n return obj\n", "path": "tiledb/array.py"}], "after_files": [{"content": "from .libtiledb import DenseArrayImpl, SparseArrayImpl\n\n# Extensible (pure Python) array class definitions inheriting from the\n# Cython implemention. The cloudarray mix-in adds optional functionality\n# for registering arrays and executing functions on the\n\n# NOTE: the mixin import must be inside the __new__ initializer because it\n# needs to be deferred. tiledb.cloud is not yet known to the importer\n# when this code is imported.\n# TODO: might be possible to work-around/simplify by using\n# import meta-hooks instead.\n\nclass DenseArray(DenseArrayImpl):\n _mixin_init = False\n\n def __new__(cls, *args, **kwargs):\n if not cls._mixin_init:\n # must set before importing, because import is not thread-safe\n # https://github.com/TileDB-Inc/TileDB-Py/issues/244\n cls._mixin_init = True\n try:\n from tiledb.cloud import cloudarray\n DenseArray.__bases__ = DenseArray.__bases__ + (cloudarray.CloudArray,)\n DenseArray.__doc__ = DenseArrayImpl.__doc__\n except ImportError:\n pass\n\n obj = super(DenseArray, cls).__new__(cls, *args, **kwargs)\n return obj\n\nclass SparseArray(SparseArrayImpl):\n _mixin_init = False\n\n def __new__(cls, *args, **kwargs):\n if not cls._mixin_init:\n cls._mixin_init = True\n try:\n from tiledb.cloud import cloudarray\n SparseArray.__bases__ = SparseArray.__bases__ + (cloudarray.CloudArray,)\n SparseArray.__doc__ = DenseArrayImpl.__doc__\n except ImportError:\n pass\n\n obj = super(SparseArray, cls).__new__(cls, *args, **kwargs)\n return obj\n", "path": "tiledb/array.py"}]}
1,496
477
gh_patches_debug_13110
rasdani/github-patches
git_diff
Mailu__Mailu-1863
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Radicale password check fails (length limited?) So far, the webdav radicale is protected with basic auth (apparently htaccess or similar). If user password > 8 then it fails to connect. we should remove this limitation to let use stong passwords --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `core/admin/mailu/internal/views/auth.py` Content: ``` 1 from mailu import models, utils 2 from mailu.internal import internal, nginx 3 from flask import current_app as app 4 5 import flask 6 import flask_login 7 import base64 8 import ipaddress 9 10 11 @internal.route("/auth/email") 12 def nginx_authentication(): 13 """ Main authentication endpoint for Nginx email server 14 """ 15 limiter = utils.limiter.get_limiter(app.config["AUTH_RATELIMIT"], "auth-ip") 16 client_ip = flask.request.headers["Client-Ip"] 17 if not limiter.test(client_ip): 18 response = flask.Response() 19 response.headers['Auth-Status'] = 'Authentication rate limit from one source exceeded' 20 response.headers['Auth-Error-Code'] = '451 4.3.2' 21 if int(flask.request.headers['Auth-Login-Attempt']) < 10: 22 response.headers['Auth-Wait'] = '3' 23 return response 24 headers = nginx.handle_authentication(flask.request.headers) 25 response = flask.Response() 26 for key, value in headers.items(): 27 response.headers[key] = str(value) 28 if ("Auth-Status" not in headers) or (headers["Auth-Status"] != "OK"): 29 limit_subnet = str(app.config["AUTH_RATELIMIT_SUBNET"]) != 'False' 30 subnet = ipaddress.ip_network(app.config["SUBNET"]) 31 if limit_subnet or ipaddress.ip_address(client_ip) not in subnet: 32 limiter.hit(flask.request.headers["Client-Ip"]) 33 return response 34 35 36 @internal.route("/auth/admin") 37 def admin_authentication(): 38 """ Fails if the user is not an authenticated admin. 39 """ 40 if (not flask_login.current_user.is_anonymous 41 and flask_login.current_user.global_admin 42 and flask_login.current_user.enabled): 43 return "" 44 return flask.abort(403) 45 46 47 @internal.route("/auth/basic") 48 def basic_authentication(): 49 """ Tries to authenticate using the Authorization header. 50 """ 51 authorization = flask.request.headers.get("Authorization") 52 if authorization and authorization.startswith("Basic "): 53 encoded = authorization.replace("Basic ", "") 54 user_email, password = base64.b64decode(encoded).split(b":") 55 user = models.User.query.get(user_email.decode("utf8")) 56 if user and user.enabled and user.check_password(password.decode("utf8")): 57 response = flask.Response() 58 response.headers["X-User"] = user.email 59 return response 60 response = flask.Response(status=401) 61 response.headers["WWW-Authenticate"] = 'Basic realm="Login Required"' 62 return response 63 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/core/admin/mailu/internal/views/auth.py b/core/admin/mailu/internal/views/auth.py --- a/core/admin/mailu/internal/views/auth.py +++ b/core/admin/mailu/internal/views/auth.py @@ -51,7 +51,7 @@ authorization = flask.request.headers.get("Authorization") if authorization and authorization.startswith("Basic "): encoded = authorization.replace("Basic ", "") - user_email, password = base64.b64decode(encoded).split(b":") + user_email, password = base64.b64decode(encoded).split(b":", 1) user = models.User.query.get(user_email.decode("utf8")) if user and user.enabled and user.check_password(password.decode("utf8")): response = flask.Response()
{"golden_diff": "diff --git a/core/admin/mailu/internal/views/auth.py b/core/admin/mailu/internal/views/auth.py\n--- a/core/admin/mailu/internal/views/auth.py\n+++ b/core/admin/mailu/internal/views/auth.py\n@@ -51,7 +51,7 @@\n authorization = flask.request.headers.get(\"Authorization\")\n if authorization and authorization.startswith(\"Basic \"):\n encoded = authorization.replace(\"Basic \", \"\")\n- user_email, password = base64.b64decode(encoded).split(b\":\")\n+ user_email, password = base64.b64decode(encoded).split(b\":\", 1)\n user = models.User.query.get(user_email.decode(\"utf8\"))\n if user and user.enabled and user.check_password(password.decode(\"utf8\")):\n response = flask.Response()\n", "issue": "Radicale password check fails (length limited?)\nSo far, the webdav radicale is protected with basic auth (apparently htaccess or similar).\r\nIf user password > 8 then it fails to connect. \r\nwe should remove this limitation to let use stong passwords\n", "before_files": [{"content": "from mailu import models, utils\nfrom mailu.internal import internal, nginx\nfrom flask import current_app as app\n\nimport flask\nimport flask_login\nimport base64\nimport ipaddress\n\n\[email protected](\"/auth/email\")\ndef nginx_authentication():\n \"\"\" Main authentication endpoint for Nginx email server\n \"\"\"\n limiter = utils.limiter.get_limiter(app.config[\"AUTH_RATELIMIT\"], \"auth-ip\")\n client_ip = flask.request.headers[\"Client-Ip\"]\n if not limiter.test(client_ip):\n response = flask.Response()\n response.headers['Auth-Status'] = 'Authentication rate limit from one source exceeded'\n response.headers['Auth-Error-Code'] = '451 4.3.2'\n if int(flask.request.headers['Auth-Login-Attempt']) < 10:\n response.headers['Auth-Wait'] = '3'\n return response\n headers = nginx.handle_authentication(flask.request.headers)\n response = flask.Response()\n for key, value in headers.items():\n response.headers[key] = str(value)\n if (\"Auth-Status\" not in headers) or (headers[\"Auth-Status\"] != \"OK\"):\n limit_subnet = str(app.config[\"AUTH_RATELIMIT_SUBNET\"]) != 'False'\n subnet = ipaddress.ip_network(app.config[\"SUBNET\"])\n if limit_subnet or ipaddress.ip_address(client_ip) not in subnet:\n limiter.hit(flask.request.headers[\"Client-Ip\"])\n return response\n\n\[email protected](\"/auth/admin\")\ndef admin_authentication():\n \"\"\" Fails if the user is not an authenticated admin.\n \"\"\"\n if (not flask_login.current_user.is_anonymous\n and flask_login.current_user.global_admin\n and flask_login.current_user.enabled):\n return \"\"\n return flask.abort(403)\n\n\[email protected](\"/auth/basic\")\ndef basic_authentication():\n \"\"\" Tries to authenticate using the Authorization header.\n \"\"\"\n authorization = flask.request.headers.get(\"Authorization\")\n if authorization and authorization.startswith(\"Basic \"):\n encoded = authorization.replace(\"Basic \", \"\")\n user_email, password = base64.b64decode(encoded).split(b\":\")\n user = models.User.query.get(user_email.decode(\"utf8\"))\n if user and user.enabled and user.check_password(password.decode(\"utf8\")):\n response = flask.Response()\n response.headers[\"X-User\"] = user.email\n return response\n response = flask.Response(status=401)\n response.headers[\"WWW-Authenticate\"] = 'Basic realm=\"Login Required\"'\n return response\n", "path": "core/admin/mailu/internal/views/auth.py"}], "after_files": [{"content": "from mailu import models, utils\nfrom mailu.internal import internal, nginx\nfrom flask import current_app as app\n\nimport flask\nimport flask_login\nimport base64\nimport ipaddress\n\n\[email protected](\"/auth/email\")\ndef nginx_authentication():\n \"\"\" Main authentication endpoint for Nginx email server\n \"\"\"\n limiter = utils.limiter.get_limiter(app.config[\"AUTH_RATELIMIT\"], \"auth-ip\")\n client_ip = flask.request.headers[\"Client-Ip\"]\n if not limiter.test(client_ip):\n response = flask.Response()\n response.headers['Auth-Status'] = 'Authentication rate limit from one source exceeded'\n response.headers['Auth-Error-Code'] = '451 4.3.2'\n if int(flask.request.headers['Auth-Login-Attempt']) < 10:\n response.headers['Auth-Wait'] = '3'\n return response\n headers = nginx.handle_authentication(flask.request.headers)\n response = flask.Response()\n for key, value in headers.items():\n response.headers[key] = str(value)\n if (\"Auth-Status\" not in headers) or (headers[\"Auth-Status\"] != \"OK\"):\n limit_subnet = str(app.config[\"AUTH_RATELIMIT_SUBNET\"]) != 'False'\n subnet = ipaddress.ip_network(app.config[\"SUBNET\"])\n if limit_subnet or ipaddress.ip_address(client_ip) not in subnet:\n limiter.hit(flask.request.headers[\"Client-Ip\"])\n return response\n\n\[email protected](\"/auth/admin\")\ndef admin_authentication():\n \"\"\" Fails if the user is not an authenticated admin.\n \"\"\"\n if (not flask_login.current_user.is_anonymous\n and flask_login.current_user.global_admin\n and flask_login.current_user.enabled):\n return \"\"\n return flask.abort(403)\n\n\[email protected](\"/auth/basic\")\ndef basic_authentication():\n \"\"\" Tries to authenticate using the Authorization header.\n \"\"\"\n authorization = flask.request.headers.get(\"Authorization\")\n if authorization and authorization.startswith(\"Basic \"):\n encoded = authorization.replace(\"Basic \", \"\")\n user_email, password = base64.b64decode(encoded).split(b\":\", 1)\n user = models.User.query.get(user_email.decode(\"utf8\"))\n if user and user.enabled and user.check_password(password.decode(\"utf8\")):\n response = flask.Response()\n response.headers[\"X-User\"] = user.email\n return response\n response = flask.Response(status=401)\n response.headers[\"WWW-Authenticate\"] = 'Basic realm=\"Login Required\"'\n return response\n", "path": "core/admin/mailu/internal/views/auth.py"}]}
978
165
gh_patches_debug_1340
rasdani/github-patches
git_diff
nilearn__nilearn-3337
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Spelling Error <!--Describe your proposed enhancement in detail.--> I think the authors meant to describe ADHD but have written ADHD as AHDH. It is just a simple spelling or typographic error. <!--List any pages that would be impacted by the enhancement.--> ### Affected pages 1. https://nilearn.github.io/dev/auto_examples/04_glm_first_level/plot_adhd_dmn.html#sphx-glr-auto-examples-04-glm-first-level-plot-adhd-dmn-py 2. https://nilearn.github.io/dev/glm/first_level_model.html#fitting-a-first-level-model --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `examples/04_glm_first_level/plot_adhd_dmn.py` Content: ``` 1 """Default Mode Network extraction of AHDH dataset 2 =============================================== 3 4 This example shows a full step-by-step workflow of fitting a GLM to data 5 extracted from a seed on the Posterior Cingulate Cortex and saving the results. 6 7 More specifically: 8 9 1. A sequence of fMRI volumes are loaded. 10 2. A design matrix with the Posterior Cingulate Cortex seed is defined. 11 3. A GLM is applied to the dataset (effect/covariance, then contrast estimation). 12 4. The Default Mode Network is displayed. 13 14 .. include:: ../../../examples/masker_note.rst 15 16 """ 17 import numpy as np 18 19 from nilearn import datasets, plotting 20 from nilearn.maskers import NiftiSpheresMasker 21 22 from nilearn.glm.first_level import FirstLevelModel 23 from nilearn.glm.first_level import make_first_level_design_matrix 24 25 ######################################################################### 26 # Prepare data and analysis parameters 27 # ------------------------------------- 28 # Prepare the data. 29 adhd_dataset = datasets.fetch_adhd(n_subjects=1) 30 31 # Prepare timing 32 t_r = 2. 33 slice_time_ref = 0. 34 n_scans = 176 35 36 # Prepare seed 37 pcc_coords = (0, -53, 26) 38 39 ######################################################################### 40 # Estimate contrasts 41 # ------------------ 42 # Specify the contrasts. 43 seed_masker = NiftiSpheresMasker([pcc_coords], radius=10, detrend=True, 44 standardize=True, low_pass=0.1, 45 high_pass=0.01, t_r=2., 46 memory='nilearn_cache', 47 memory_level=1, verbose=0) 48 seed_time_series = seed_masker.fit_transform(adhd_dataset.func[0]) 49 frametimes = np.linspace(0, (n_scans - 1) * t_r, n_scans) 50 design_matrix = make_first_level_design_matrix(frametimes, hrf_model='spm', 51 add_regs=seed_time_series, 52 add_reg_names=["pcc_seed"]) 53 dmn_contrast = np.array([1] + [0] * (design_matrix.shape[1] - 1)) 54 contrasts = {'seed_based_glm': dmn_contrast} 55 56 ######################################################################### 57 # Perform first level analysis 58 # ---------------------------- 59 # Setup and fit GLM. 60 first_level_model = FirstLevelModel(t_r=t_r, slice_time_ref=slice_time_ref) 61 first_level_model = first_level_model.fit(run_imgs=adhd_dataset.func[0], 62 design_matrices=design_matrix) 63 64 ######################################################################### 65 # Estimate the contrast. 66 print('Contrast seed_based_glm computed.') 67 z_map = first_level_model.compute_contrast(contrasts['seed_based_glm'], 68 output_type='z_score') 69 70 # Saving snapshots of the contrasts 71 filename = 'dmn_z_map.png' 72 display = plotting.plot_stat_map(z_map, threshold=3.0, title='Seed based GLM', 73 cut_coords=pcc_coords) 74 display.add_markers(marker_coords=[pcc_coords], marker_color='g', 75 marker_size=300) 76 display.savefig(filename) 77 print("Save z-map in '{0}'.".format(filename)) 78 79 ########################################################################### 80 # Generating a report 81 # ------------------- 82 # It can be useful to quickly generate a 83 # portable, ready-to-view report with most of the pertinent information. 84 # This is easy to do if you have a fitted model and the list of contrasts, 85 # which we do here. 86 87 from nilearn.reporting import make_glm_report 88 89 report = make_glm_report(first_level_model, 90 contrasts=contrasts, 91 title='ADHD DMN Report', 92 cluster_threshold=15, 93 min_distance=8., 94 plot_type='glass', 95 ) 96 97 ######################################################################### 98 # We have several ways to access the report: 99 100 # report # This report can be viewed in a notebook 101 # report.save_as_html('report.html') 102 # report.open_in_browser() 103 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/examples/04_glm_first_level/plot_adhd_dmn.py b/examples/04_glm_first_level/plot_adhd_dmn.py --- a/examples/04_glm_first_level/plot_adhd_dmn.py +++ b/examples/04_glm_first_level/plot_adhd_dmn.py @@ -1,4 +1,4 @@ -"""Default Mode Network extraction of AHDH dataset +"""Default Mode Network extraction of ADHD dataset =============================================== This example shows a full step-by-step workflow of fitting a GLM to data
{"golden_diff": "diff --git a/examples/04_glm_first_level/plot_adhd_dmn.py b/examples/04_glm_first_level/plot_adhd_dmn.py\n--- a/examples/04_glm_first_level/plot_adhd_dmn.py\n+++ b/examples/04_glm_first_level/plot_adhd_dmn.py\n@@ -1,4 +1,4 @@\n-\"\"\"Default Mode Network extraction of AHDH dataset\n+\"\"\"Default Mode Network extraction of ADHD dataset\n ===============================================\n \n This example shows a full step-by-step workflow of fitting a GLM to data\n", "issue": "Spelling Error\n<!--Describe your proposed enhancement in detail.-->\r\nI think the authors meant to describe ADHD but have written ADHD as AHDH. It is just a simple spelling or typographic error.\r\n<!--List any pages that would be impacted by the enhancement.-->\r\n### Affected pages\r\n1. https://nilearn.github.io/dev/auto_examples/04_glm_first_level/plot_adhd_dmn.html#sphx-glr-auto-examples-04-glm-first-level-plot-adhd-dmn-py\r\n 2. https://nilearn.github.io/dev/glm/first_level_model.html#fitting-a-first-level-model\n", "before_files": [{"content": "\"\"\"Default Mode Network extraction of AHDH dataset\n===============================================\n\nThis example shows a full step-by-step workflow of fitting a GLM to data\nextracted from a seed on the Posterior Cingulate Cortex and saving the results.\n\nMore specifically:\n\n1. A sequence of fMRI volumes are loaded.\n2. A design matrix with the Posterior Cingulate Cortex seed is defined.\n3. A GLM is applied to the dataset (effect/covariance, then contrast estimation).\n4. The Default Mode Network is displayed.\n\n.. include:: ../../../examples/masker_note.rst\n\n\"\"\"\nimport numpy as np\n\nfrom nilearn import datasets, plotting\nfrom nilearn.maskers import NiftiSpheresMasker\n\nfrom nilearn.glm.first_level import FirstLevelModel\nfrom nilearn.glm.first_level import make_first_level_design_matrix\n\n#########################################################################\n# Prepare data and analysis parameters\n# -------------------------------------\n# Prepare the data.\nadhd_dataset = datasets.fetch_adhd(n_subjects=1)\n\n# Prepare timing\nt_r = 2.\nslice_time_ref = 0.\nn_scans = 176\n\n# Prepare seed\npcc_coords = (0, -53, 26)\n\n#########################################################################\n# Estimate contrasts\n# ------------------\n# Specify the contrasts.\nseed_masker = NiftiSpheresMasker([pcc_coords], radius=10, detrend=True,\n standardize=True, low_pass=0.1,\n high_pass=0.01, t_r=2.,\n memory='nilearn_cache',\n memory_level=1, verbose=0)\nseed_time_series = seed_masker.fit_transform(adhd_dataset.func[0])\nframetimes = np.linspace(0, (n_scans - 1) * t_r, n_scans)\ndesign_matrix = make_first_level_design_matrix(frametimes, hrf_model='spm',\n add_regs=seed_time_series,\n add_reg_names=[\"pcc_seed\"])\ndmn_contrast = np.array([1] + [0] * (design_matrix.shape[1] - 1))\ncontrasts = {'seed_based_glm': dmn_contrast}\n\n#########################################################################\n# Perform first level analysis\n# ----------------------------\n# Setup and fit GLM.\nfirst_level_model = FirstLevelModel(t_r=t_r, slice_time_ref=slice_time_ref)\nfirst_level_model = first_level_model.fit(run_imgs=adhd_dataset.func[0],\n design_matrices=design_matrix)\n\n#########################################################################\n# Estimate the contrast.\nprint('Contrast seed_based_glm computed.')\nz_map = first_level_model.compute_contrast(contrasts['seed_based_glm'],\n output_type='z_score')\n\n# Saving snapshots of the contrasts\nfilename = 'dmn_z_map.png'\ndisplay = plotting.plot_stat_map(z_map, threshold=3.0, title='Seed based GLM',\n cut_coords=pcc_coords)\ndisplay.add_markers(marker_coords=[pcc_coords], marker_color='g',\n marker_size=300)\ndisplay.savefig(filename)\nprint(\"Save z-map in '{0}'.\".format(filename))\n\n###########################################################################\n# Generating a report\n# -------------------\n# It can be useful to quickly generate a\n# portable, ready-to-view report with most of the pertinent information.\n# This is easy to do if you have a fitted model and the list of contrasts,\n# which we do here.\n\nfrom nilearn.reporting import make_glm_report\n\nreport = make_glm_report(first_level_model,\n contrasts=contrasts,\n title='ADHD DMN Report',\n cluster_threshold=15,\n min_distance=8.,\n plot_type='glass',\n )\n\n#########################################################################\n# We have several ways to access the report:\n\n# report # This report can be viewed in a notebook\n# report.save_as_html('report.html')\n# report.open_in_browser()\n", "path": "examples/04_glm_first_level/plot_adhd_dmn.py"}], "after_files": [{"content": "\"\"\"Default Mode Network extraction of ADHD dataset\n===============================================\n\nThis example shows a full step-by-step workflow of fitting a GLM to data\nextracted from a seed on the Posterior Cingulate Cortex and saving the results.\n\nMore specifically:\n\n1. A sequence of fMRI volumes are loaded.\n2. A design matrix with the Posterior Cingulate Cortex seed is defined.\n3. A GLM is applied to the dataset (effect/covariance, then contrast estimation).\n4. The Default Mode Network is displayed.\n\n.. include:: ../../../examples/masker_note.rst\n\n\"\"\"\nimport numpy as np\n\nfrom nilearn import datasets, plotting\nfrom nilearn.maskers import NiftiSpheresMasker\n\nfrom nilearn.glm.first_level import FirstLevelModel\nfrom nilearn.glm.first_level import make_first_level_design_matrix\n\n#########################################################################\n# Prepare data and analysis parameters\n# -------------------------------------\n# Prepare the data.\nadhd_dataset = datasets.fetch_adhd(n_subjects=1)\n\n# Prepare timing\nt_r = 2.\nslice_time_ref = 0.\nn_scans = 176\n\n# Prepare seed\npcc_coords = (0, -53, 26)\n\n#########################################################################\n# Estimate contrasts\n# ------------------\n# Specify the contrasts.\nseed_masker = NiftiSpheresMasker([pcc_coords], radius=10, detrend=True,\n standardize=True, low_pass=0.1,\n high_pass=0.01, t_r=2.,\n memory='nilearn_cache',\n memory_level=1, verbose=0)\nseed_time_series = seed_masker.fit_transform(adhd_dataset.func[0])\nframetimes = np.linspace(0, (n_scans - 1) * t_r, n_scans)\ndesign_matrix = make_first_level_design_matrix(frametimes, hrf_model='spm',\n add_regs=seed_time_series,\n add_reg_names=[\"pcc_seed\"])\ndmn_contrast = np.array([1] + [0] * (design_matrix.shape[1] - 1))\ncontrasts = {'seed_based_glm': dmn_contrast}\n\n#########################################################################\n# Perform first level analysis\n# ----------------------------\n# Setup and fit GLM.\nfirst_level_model = FirstLevelModel(t_r=t_r, slice_time_ref=slice_time_ref)\nfirst_level_model = first_level_model.fit(run_imgs=adhd_dataset.func[0],\n design_matrices=design_matrix)\n\n#########################################################################\n# Estimate the contrast.\nprint('Contrast seed_based_glm computed.')\nz_map = first_level_model.compute_contrast(contrasts['seed_based_glm'],\n output_type='z_score')\n\n# Saving snapshots of the contrasts\nfilename = 'dmn_z_map.png'\ndisplay = plotting.plot_stat_map(z_map, threshold=3.0, title='Seed based GLM',\n cut_coords=pcc_coords)\ndisplay.add_markers(marker_coords=[pcc_coords], marker_color='g',\n marker_size=300)\ndisplay.savefig(filename)\nprint(\"Save z-map in '{0}'.\".format(filename))\n\n###########################################################################\n# Generating a report\n# -------------------\n# It can be useful to quickly generate a\n# portable, ready-to-view report with most of the pertinent information.\n# This is easy to do if you have a fitted model and the list of contrasts,\n# which we do here.\n\nfrom nilearn.reporting import make_glm_report\n\nreport = make_glm_report(first_level_model,\n contrasts=contrasts,\n title='ADHD DMN Report',\n cluster_threshold=15,\n min_distance=8.,\n plot_type='glass',\n )\n\n#########################################################################\n# We have several ways to access the report:\n\n# report # This report can be viewed in a notebook\n# report.save_as_html('report.html')\n# report.open_in_browser()\n", "path": "examples/04_glm_first_level/plot_adhd_dmn.py"}]}
1,424
124
gh_patches_debug_850
rasdani/github-patches
git_diff
dotkom__onlineweb4-1359
We are currently solving the following issue within our repository. Here is the issue text: --- BEGIN ISSUE --- Option to post video in article Make it possible to post video in article from dashboard. --- END ISSUE --- Below are some code segments, each from a relevant file. One or more of these files may contain bugs. --- BEGIN FILES --- Path: `apps/article/dashboard/forms.py` Content: ``` 1 # -*- encoding: utf-8 -*- 2 from django import forms 3 4 from apps.article.models import Article 5 from apps.dashboard.widgets import DatetimePickerInput, multiple_widget_generator 6 from apps.gallery.widgets import SingleImageInput 7 8 from taggit.forms import TagWidget 9 10 11 class ArticleForm(forms.ModelForm): 12 13 class Meta(object): 14 """ 15 Add fields that should have DTP activated in the datetimepicker_fields list 16 """ 17 18 model = Article 19 fields = [ 20 'heading', 21 'ingress_short', 22 'ingress', 23 'content', 24 'image', 25 'published_date', 26 'authors', 27 'tags', 28 'featured' 29 ] 30 31 # Fields should be a mapping between field name and an attribute dictionary 32 img_fields = [('image', {'id': 'responsive-image-id'})] 33 dtp_fields = [('published_date', {})] 34 widgetlist = [ 35 (DatetimePickerInput, dtp_fields), 36 (SingleImageInput, img_fields) 37 ] 38 39 # Multiple widget generator merges results from regular widget_generator into a single widget dict 40 widgets = multiple_widget_generator(widgetlist) 41 widgets.update({'tags': TagWidget(attrs={'placeholder': 'Eksempel: åre, online, kjelleren'})}) 42 labels = { 43 'tags': u'Tags' 44 } 45 ``` --- END FILES --- Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks. Here is an example: ```diff diff --git a/examples/server_async.py b/examples/server_async.py --- a/examples/server_async.py +++ b/examples/server_async.py @@ -313,4 +313,4 @@ if __name__ == "__main__": - asyncio.run(run_async_server("."), debug=True) + asyncio.run(run_async_server(), debug=True) diff --git a/examples/server_sync.py b/examples/server_sync.py --- a/examples/server_sync.py +++ b/examples/server_sync.py @@ -313,5 +313,5 @@ if __name__ == "__main__": - server = run_sync_server(".") + server = run_sync_server() server.shutdown() ```
diff --git a/apps/article/dashboard/forms.py b/apps/article/dashboard/forms.py --- a/apps/article/dashboard/forms.py +++ b/apps/article/dashboard/forms.py @@ -22,6 +22,7 @@ 'ingress', 'content', 'image', + 'video', 'published_date', 'authors', 'tags',
{"golden_diff": "diff --git a/apps/article/dashboard/forms.py b/apps/article/dashboard/forms.py\n--- a/apps/article/dashboard/forms.py\n+++ b/apps/article/dashboard/forms.py\n@@ -22,6 +22,7 @@\n 'ingress',\n 'content',\n 'image',\n+ 'video',\n 'published_date',\n 'authors',\n 'tags',\n", "issue": "Option to post video in article\nMake it possible to post video in article from dashboard.\n\n", "before_files": [{"content": "# -*- encoding: utf-8 -*-\nfrom django import forms\n\nfrom apps.article.models import Article\nfrom apps.dashboard.widgets import DatetimePickerInput, multiple_widget_generator\nfrom apps.gallery.widgets import SingleImageInput\n\nfrom taggit.forms import TagWidget\n\n\nclass ArticleForm(forms.ModelForm):\n\n class Meta(object):\n \"\"\"\n Add fields that should have DTP activated in the datetimepicker_fields list\n \"\"\"\n\n model = Article\n fields = [\n 'heading',\n 'ingress_short',\n 'ingress',\n 'content',\n 'image',\n 'published_date',\n 'authors',\n 'tags',\n 'featured'\n ]\n\n # Fields should be a mapping between field name and an attribute dictionary\n img_fields = [('image', {'id': 'responsive-image-id'})]\n dtp_fields = [('published_date', {})]\n widgetlist = [\n (DatetimePickerInput, dtp_fields),\n (SingleImageInput, img_fields)\n ]\n\n # Multiple widget generator merges results from regular widget_generator into a single widget dict\n widgets = multiple_widget_generator(widgetlist)\n widgets.update({'tags': TagWidget(attrs={'placeholder': 'Eksempel: \u00e5re, online, kjelleren'})})\n labels = {\n 'tags': u'Tags'\n }\n", "path": "apps/article/dashboard/forms.py"}], "after_files": [{"content": "# -*- encoding: utf-8 -*-\nfrom django import forms\n\nfrom apps.article.models import Article\nfrom apps.dashboard.widgets import DatetimePickerInput, multiple_widget_generator\nfrom apps.gallery.widgets import SingleImageInput\n\nfrom taggit.forms import TagWidget\n\n\nclass ArticleForm(forms.ModelForm):\n\n class Meta(object):\n \"\"\"\n Add fields that should have DTP activated in the datetimepicker_fields list\n \"\"\"\n\n model = Article\n fields = [\n 'heading',\n 'ingress_short',\n 'ingress',\n 'content',\n 'image',\n 'video',\n 'published_date',\n 'authors',\n 'tags',\n 'featured'\n ]\n\n # Fields should be a mapping between field name and an attribute dictionary\n img_fields = [('image', {'id': 'responsive-image-id'})]\n dtp_fields = [('published_date', {})]\n widgetlist = [\n (DatetimePickerInput, dtp_fields),\n (SingleImageInput, img_fields)\n ]\n\n # Multiple widget generator merges results from regular widget_generator into a single widget dict\n widgets = multiple_widget_generator(widgetlist)\n widgets.update({'tags': TagWidget(attrs={'placeholder': 'Eksempel: \u00e5re, online, kjelleren'})})\n labels = {\n 'tags': u'Tags'\n }\n", "path": "apps/article/dashboard/forms.py"}]}
635
75